diff --git a/.flake8 b/.flake8 index ed9316381..29227d4cf 100644 --- a/.flake8 +++ b/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6ad83346e..a9024b15d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.3.0 + rev: v3.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer diff --git a/CHANGELOG.md b/CHANGELOG.md index 658443a46..9cab925d8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.1.0](https://www.github.com/googleapis/python-logging/compare/v2.0.2...v2.1.0) (2021-01-12) + + +### Features + +* allow modifying LogEntry data using extra argument ([#129](https://www.github.com/googleapis/python-logging/issues/129)) ([92b287f](https://www.github.com/googleapis/python-logging/commit/92b287f424418fde137cc81f370dcab07f84023b)) +* support http_request field ([#120](https://www.github.com/googleapis/python-logging/issues/120)) ([ba94afb](https://www.github.com/googleapis/python-logging/commit/ba94afb7d0a5371f2d2de4232de56df34e8a1f99)) + + +### Bug Fixes + +* add InternalServerError to list of expected errors ([#151](https://www.github.com/googleapis/python-logging/issues/151)) ([9bf49f5](https://www.github.com/googleapis/python-logging/commit/9bf49f51df5321e8b9c39018dff7d767347256d6)) + + +### Documentation + +* fix usage guide ([#140](https://www.github.com/googleapis/python-logging/issues/140)) ([1ca3981](https://www.github.com/googleapis/python-logging/commit/1ca398103fdfefb5576d6ef2ba20cfa4bd4ab252)) + ### [2.0.2](https://www.github.com/googleapis/python-logging/compare/v2.0.1...v2.0.2) (2020-12-14) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index ab6c09b8f..cd48664d8 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -202,25 +202,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-logging/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/docs/usage.rst b/docs/usage.rst index 4714144f9..1ea9440fc 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -8,21 +8,21 @@ To write log entries, first create a :class:`~google.cloud.logging.logger.Logger`, passing the "log name" with which to associate the entries: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_create] :end-before: [END logger_create] :dedent: 4 Write a simple text entry to the logger. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_text] :end-before: [END logger_log_text] :dedent: 4 Write a dictionary entry to the logger. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_struct] :end-before: [END logger_log_struct] :dedent: 4 @@ -34,7 +34,7 @@ Supported Resource values are listed at `Monitored Resource Types`_ .. _Monitored Resource Types: https://cloud.google.com/logging/docs/api/v2/resource-list -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_log_resource_text] :end-before: [END logger_log_resource_text] :dedent: 4 @@ -44,7 +44,7 @@ Retrieving log entries Fetch entries for the default project. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_entries_default] :end-before: [END client_list_entries_default] :dedent: 4 @@ -59,41 +59,27 @@ will be instances of one of the following classes: - :class:`~google.cloud.logging.entries.StructEntry` - :class:`~google.cloud.logging.entries.ProtobufEntry` -Fetch entries across multiple projects. - -.. literalinclude:: snippets.py - :start-after: [START client_list_entries_multi_project] - :end-before: [END client_list_entries_multi_project] - :dedent: 4 - Filter entries retrieved using the `Advanced Logs Filters`_ syntax .. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters Fetch entries for the default project. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_entries_filter] :end-before: [END client_list_entries_filter] :dedent: 4 Sort entries in descending timestamp order. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_entries_order_by] :end-before: [END client_list_entries_order_by] :dedent: 4 -Retrieve entries in batches of 10, iterating until done. - -.. literalinclude:: snippets.py - :start-after: [START client_list_entries_paged] - :end-before: [END client_list_entries_paged] - :dedent: 4 - Retrieve entries for a single logger, sorting in descending timestamp order: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_list_entries] :end-before: [END logger_list_entries] :dedent: 4 @@ -102,7 +88,7 @@ Retrieve entries for a single logger, sorting in descending timestamp order: Delete all entries for a logger ------------------------------- -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logger_delete] :end-before: [END logger_delete] :dedent: 8 @@ -116,35 +102,35 @@ used within Cloud Monitoring to create charts and alerts. List all metrics for a project: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_metrics] :end-before: [END client_list_metrics] :dedent: 4 Create a metric: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_create] :end-before: [END metric_create] :dedent: 4 Refresh local information about a metric: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_reload] :end-before: [END metric_reload] :dedent: 4 Update a metric: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_update] :end-before: [END metric_update] :dedent: 4 Delete a metric: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START metric_delete] :end-before: [END metric_delete] :dedent: 4 @@ -166,14 +152,14 @@ Make sure that the storage bucket you want to export logs too has Add ``cloud-logs@google.com`` as the owner of the bucket: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_bucket_permissions] :end-before: [END sink_bucket_permissions] :dedent: 4 Create a Cloud Storage sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_storage_create] :end-before: [END sink_storage_create] :dedent: 4 @@ -189,14 +175,14 @@ See: `Setting permissions for BigQuery`_ .. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_dataset_permissions] :end-before: [END sink_dataset_permissions] :dedent: 4 Create a BigQuery sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_bigquery_create] :end-before: [END sink_bigquery_create] :dedent: 4 @@ -212,14 +198,14 @@ See: `Setting permissions for Pub/Sub`_ .. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_topic_permissions] :end-before: [END sink_topic_permissions] :dedent: 4 Create a Cloud Pub/Sub sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_pubsub_create] :end-before: [END sink_pubsub_create] :dedent: 4 @@ -229,28 +215,28 @@ Manage Sinks List all sinks for a project: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START client_list_sinks] :end-before: [END client_list_sinks] :dedent: 4 Refresh local information about a sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_reload] :end-before: [END sink_reload] :dedent: 4 Update a sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_update] :end-before: [END sink_update] :dedent: 4 Delete a sink: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START sink_delete] :end-before: [END sink_delete] :dedent: 4 @@ -263,7 +249,7 @@ Cloud Logging. There are different handler options to accomplish this. To automatically pick the default for your current environment, use :meth:`~google.cloud.logging.client.Client.get_default_handler`. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START create_default_handler] :end-before: [END create_default_handler] :dedent: 4 @@ -274,7 +260,7 @@ as well as any other loggers created. A helper method :meth:`~google.cloud.logging.client.Client.setup_logging` is provided to configure this automatically. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START setup_logging] :end-before: [END setup_logging] :dedent: 4 @@ -286,7 +272,7 @@ to configure this automatically. You can also exclude certain loggers: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START setup_logging_excludes] :end-before: [END setup_logging_excludes] :dedent: 4 @@ -300,7 +286,7 @@ directly create a :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` instance which will write directly to the API. -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START create_cloud_handler] :end-before: [END create_cloud_handler] :dedent: 4 @@ -316,7 +302,7 @@ All logs will go to a single custom log, which defaults to "python". The name of the Python logger will be included in the structured log entry under the "python_logger" field. You can change it by providing a name to the handler: -.. literalinclude:: snippets.py +.. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START create_named_handler] :end-before: [END create_named_handler] :dedent: 4 diff --git a/google/cloud/logging_v2/handlers/_helpers.py b/google/cloud/logging_v2/handlers/_helpers.py index 3150e46c3..9821e95af 100644 --- a/google/cloud/logging_v2/handlers/_helpers.py +++ b/google/cloud/logging_v2/handlers/_helpers.py @@ -23,9 +23,14 @@ flask = None from google.cloud.logging_v2.handlers.middleware.request import _get_django_request +from google.logging.type.http_request_pb2 import HttpRequest _DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" +_DJANGO_USERAGENT_HEADER = "HTTP_USER_AGENT" +_DJANGO_REMOTE_ADDR_HEADER = "REMOTE_ADDR" +_DJANGO_REFERER_HEADER = "HTTP_REFERER" _FLASK_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" +_PROTOCOL_HEADER = "SERVER_PROTOCOL" def format_stackdriver_json(record, message): @@ -46,59 +51,86 @@ def format_stackdriver_json(record, message): return json.dumps(payload) -def get_trace_id_from_flask(): - """Get trace_id from flask request headers. +def get_request_data_from_flask(): + """Get http_request and trace data from flask request headers. Returns: - str: TraceID in HTTP request headers. + Tuple[Optional[google.logging.type.http_request_pb2.HttpRequest], Optional[str]]: + Data related to the current http request and the trace_id for the + request. Both fields will be None if a flask request isn't found. """ if flask is None or not flask.request: - return None + return None, None + + # build http_request + http_request = HttpRequest( + request_method=flask.request.method, + request_url=flask.request.url, + request_size=flask.request.content_length, + user_agent=flask.request.user_agent.string, + remote_ip=flask.request.remote_addr, + referer=flask.request.referrer, + protocol=flask.request.environ.get(_PROTOCOL_HEADER), + ) + # find trace id + trace_id = None header = flask.request.headers.get(_FLASK_TRACE_HEADER) + if header: + trace_id = header.split("/", 1)[0] - if header is None: - return None - - trace_id = header.split("/", 1)[0] - - return trace_id + return http_request, trace_id -def get_trace_id_from_django(): - """Get trace_id from django request headers. +def get_request_data_from_django(): + """Get http_request and trace data from django request headers. Returns: - str: TraceID in HTTP request headers. + Tuple[Optional[google.logging.type.http_request_pb2.HttpRequest], Optional[str]]: + Data related to the current http request and the trace_id for the + request. Both fields will be None if a django request isn't found. """ request = _get_django_request() if request is None: - return None + return None, None + # build http_request + http_request = HttpRequest( + request_method=request.method, + request_url=request.build_absolute_uri(), + request_size=len(request.body), + user_agent=request.META.get(_DJANGO_USERAGENT_HEADER), + remote_ip=request.META.get(_DJANGO_REMOTE_ADDR_HEADER), + referer=request.META.get(_DJANGO_REFERER_HEADER), + protocol=request.META.get(_PROTOCOL_HEADER), + ) + # find trace id + trace_id = None header = request.META.get(_DJANGO_TRACE_HEADER) - if header is None: - return None - - trace_id = header.split("/", 1)[0] + if header: + trace_id = header.split("/", 1)[0] - return trace_id + return http_request, trace_id -def get_trace_id(): - """Helper to get trace_id from web application request header. +def get_request_data(): + """Helper to get http_request and trace data from supported web + frameworks (currently supported: Flask and Django). Returns: - str: TraceID in HTTP request headers. + Tuple[Optional[google.logging.type.http_request_pb2.HttpRequest], Optional[str]]: + Data related to the current http request and the trace_id for the + request. Both fields will be None if a supported web request isn't found. """ checkers = ( - get_trace_id_from_django, - get_trace_id_from_flask, + get_request_data_from_django, + get_request_data_from_flask, ) for checker in checkers: - trace_id = checker() - if trace_id is not None: - return trace_id + http_request, trace_id = checker() + if http_request is not None: + return http_request, trace_id - return None + return None, None diff --git a/google/cloud/logging_v2/handlers/app_engine.py b/google/cloud/logging_v2/handlers/app_engine.py index fed9bd205..a5d57c53e 100644 --- a/google/cloud/logging_v2/handlers/app_engine.py +++ b/google/cloud/logging_v2/handlers/app_engine.py @@ -21,7 +21,7 @@ import logging import os -from google.cloud.logging_v2.handlers._helpers import get_trace_id +from google.cloud.logging_v2.handlers._helpers import get_request_data from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport from google.cloud.logging_v2.resource import Resource @@ -96,7 +96,7 @@ def get_gae_labels(self): """ gae_labels = {} - trace_id = get_trace_id() + _, trace_id = get_request_data() if trace_id is not None: gae_labels[_TRACE_ID_LABEL] = trace_id @@ -113,12 +113,25 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(AppEngineHandler, self).format(record) + inferred_http, inferred_trace = get_request_data() + if inferred_trace is not None: + inferred_trace = f"projects/{self.project_id}/traces/{inferred_trace}" + # allow user overrides + trace = getattr(record, "trace", inferred_trace) + span_id = getattr(record, "span_id", None) + http_request = getattr(record, "http_request", inferred_http) + resource = getattr(record, "resource", self.resource) + user_labels = getattr(record, "labels", {}) + # merge labels gae_labels = self.get_gae_labels() - trace_id = ( - "projects/%s/traces/%s" % (self.project_id, gae_labels[_TRACE_ID_LABEL]) - if _TRACE_ID_LABEL in gae_labels - else None - ) + gae_labels.update(user_labels) + # send off request self.transport.send( - record, message, resource=self.resource, labels=gae_labels, trace=trace_id + record, + message, + resource=resource, + labels=gae_labels, + trace=trace, + span_id=span_id, + http_request=http_request, ) diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index d45c7b61b..fd99f7adc 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -87,6 +87,7 @@ def __init__( self.name = name self.client = client self.transport = transport(client, name) + self.project_id = client.project self.resource = resource self.labels = labels @@ -101,7 +102,26 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) - self.transport.send(record, message, resource=self.resource, labels=self.labels) + trace_id = getattr(record, "trace", None) + span_id = getattr(record, "span_id", None) + http_request = getattr(record, "http_request", None) + resource = getattr(record, "resource", self.resource) + user_labels = getattr(record, "labels", {}) + # merge labels + total_labels = self.labels if self.labels is not None else {} + total_labels.update(user_labels) + if len(total_labels) == 0: + total_labels = None + # send off request + self.transport.send( + record, + message, + resource=resource, + labels=(total_labels if total_labels else None), + trace=trace_id, + span_id=span_id, + http_request=http_request, + ) def setup_logging( diff --git a/google/cloud/logging_v2/handlers/transports/background_thread.py b/google/cloud/logging_v2/handlers/transports/background_thread.py index 873fa452d..3d654dbd8 100644 --- a/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -222,31 +222,21 @@ def _main_thread_terminated(self): file=sys.stderr, ) - def enqueue( - self, record, message, *, resource=None, labels=None, trace=None, span_id=None - ): + def enqueue(self, record, message, **kwargs): """Queues a log entry to be written by the background thread. Args: record (logging.LogRecord): Python log record that the handler was called with. message (str): The message from the ``LogRecord`` after being formatted by the associated log formatters. - resource (Optional[google.cloud.logging_v2.resource.Resource]): - Monitored resource of the entry - labels (Optional[dict]): Mapping of labels for the entry. - trace (Optional[str]): TraceID to apply to the logging entry. - span_id (Optional[str]): Span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + kwargs: Additional optional arguments for the logger """ queue_entry = { "info": {"message": message, "python_logger": record.name}, "severity": _helpers._normalize_severity(record.levelno), - "resource": resource, - "labels": labels, - "trace": trace, - "span_id": span_id, "timestamp": datetime.datetime.utcfromtimestamp(record.created), } + queue_entry.update(kwargs) self._queue.put_nowait(queue_entry) def flush(self): @@ -291,30 +281,16 @@ def __init__( ) self.worker.start() - def send( - self, record, message, resource=None, labels=None, trace=None, span_id=None - ): + def send(self, record, message, **kwargs): """Overrides Transport.send(). Args: record (logging.LogRecord): Python log record that the handler was called with. message (str): The message from the ``LogRecord`` after being formatted by the associated log formatters. - resource (Optional[google.cloud.logging_v2.resource.Resource]): - Monitored resource of the entry. - labels (Optional[dict]): Mapping of labels for the entry. - trace (Optional[str]): TraceID to apply to the logging entry. - span_id (Optional[str]): span_id within the trace for the log entry. - Specify the trace parameter if span_id is set. + kwargs: Additional optional arguments for the logger """ - self.worker.enqueue( - record, - message, - resource=resource, - labels=labels, - trace=trace, - span_id=span_id, - ) + self.worker.enqueue(record, message, **kwargs) def flush(self): """Submit any pending log records.""" diff --git a/google/cloud/logging_v2/handlers/transports/base.py b/google/cloud/logging_v2/handlers/transports/base.py index c94c7ad70..d60a5a070 100644 --- a/google/cloud/logging_v2/handlers/transports/base.py +++ b/google/cloud/logging_v2/handlers/transports/base.py @@ -22,18 +22,14 @@ class Transport(object): client and name object, and must override :meth:`send`. """ - def send( - self, record, message, *, resource=None, labels=None, trace=None, span_id=None - ): + def send(self, record, message, **kwargs): """Transport send to be implemented by subclasses. Args: record (logging.LogRecord): Python log record that the handler was called with. message (str): The message from the ``LogRecord`` after being formatted by the associated log formatters. - resource (Optional[google.cloud.logging_v2.resource.Resource]): - Monitored resource of the entry. - labels (Optional[dict]): Mapping of labels for the entry. + kwargs: Additional optional arguments for the logger """ raise NotImplementedError diff --git a/google/cloud/logging_v2/handlers/transports/sync.py b/google/cloud/logging_v2/handlers/transports/sync.py index 550c29391..35ee73daa 100644 --- a/google/cloud/logging_v2/handlers/transports/sync.py +++ b/google/cloud/logging_v2/handlers/transports/sync.py @@ -30,9 +30,7 @@ class SyncTransport(Transport): def __init__(self, client, name): self.logger = client.logger(name) - def send( - self, record, message, *, resource=None, labels=None, trace=None, span_id=None - ): + def send(self, record, message, **kwargs): """Overrides transport.send(). Args: @@ -40,16 +38,9 @@ def send( Python log record that the handler was called with. message (str): The message from the ``LogRecord`` after being formatted by the associated log formatters. - resource (Optional[~logging_v2.resource.Resource]): - Monitored resource of the entry. - labels (Optional[dict]): Mapping of labels for the entry. + kwargs: Additional optional arguments for the logger """ info = {"message": message, "python_logger": record.name} self.logger.log_struct( - info, - severity=_helpers._normalize_severity(record.levelno), - resource=resource, - labels=labels, - trace=trace, - span_id=span_id, + info, severity=_helpers._normalize_severity(record.levelno), **kwargs, ) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index bdb659d04..b071a67f3 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1,4 @@ -google-cloud-logging==2.0.1 +google-cloud-logging==2.0.2 +google-cloud-storage==1.35.0 +google-cloud-pubsub==2.2.0 +google-cloud-bigquery==2.6.1 diff --git a/docs/snippets.py b/samples/snippets/usage_guide.py similarity index 65% rename from docs/snippets.py rename to samples/snippets/usage_guide.py index da9ba9b2d..b28d10980 100644 --- a/docs/snippets.py +++ b/samples/snippets/usage_guide.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Testable usage examples for Cloud Logging API wrapper +"""Samples embedded in the Usage Guide (docs/usage.rst) Each example function takes a ``client`` argument (which must be an instance of :class:`google.cloud.logging.client.Client`) and uses it to perform a task @@ -23,9 +23,10 @@ need to be deleted during teardown. """ +import os import time -from google.cloud.logging.client import Client +from google.cloud.logging import Client def snippet(func): @@ -42,25 +43,6 @@ def do_something_with(item): # pylint: disable=unused-argument pass -# pylint: disable=reimported,unused-variable,unused-argument -@snippet -def instantiate_client(_unused_client, _unused_to_delete): - """Instantiate client.""" - - # [START client_create_default] - from google.cloud import logging - - client = logging.Client() - # [END client_create_default] - - credentials = object() - # [START client_create_explicit] - from google.cloud import logging - - client = logging.Client(project="my-project", credentials=credentials) - # [END client_create_explicit] - - # pylint: enable=reimported,unused-variable,unused-argument @@ -71,55 +53,32 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument # [START client_list_entries_default] for entry in client.list_entries(): # API call(s) do_something_with(entry) - # [END client_list_entries_default] + # [END client_list_entries_default] + break # [START client_list_entries_filter] - FILTER = "logName:log_name AND textPayload:simple" - for entry in client.list_entries(filter_=FILTER): # API call(s) + filter_str = "logName:log_name AND textPayload:simple" + for entry in client.list_entries(filter_=filter_str): # API call(s) do_something_with(entry) - # [END client_list_entries_filter] + # [END client_list_entries_filter] + break # [START client_list_entries_order_by] from google.cloud.logging import DESCENDING for entry in client.list_entries(order_by=DESCENDING): # API call(s) do_something_with(entry) - # [END client_list_entries_order_by] - - # [START client_list_entries_paged] - iterator = client.list_entries() - pages = iterator.pages - - page1 = next(pages) # API call - for entry in page1: - do_something_with(entry) - - page2 = next(pages) # API call - for entry in page2: - do_something_with(entry) - # [END client_list_entries_paged] - - -# @snippet Commented because we need real project IDs to test -def client_list_entries_multi_project( - client, to_delete -): # pylint: disable=unused-argument - """List entries via client across multiple projects.""" - - # [START client_list_entries_multi_project] - resource_names = ["projects/one-project", "projects/another-project"] - for entry in client.list_entries(resource_names=resource_names): # API call(s) - do_something_with(entry) - # [END client_list_entries_multi_project] + # [END client_list_entries_order_by] + break @snippet def logger_usage(client, to_delete): """Logger usage.""" - LOG_NAME = "logger_usage_%d" % (_millis()) + log_name = "logger_usage_%d" % (_millis()) # [START logger_create] - logger = client.logger(LOG_NAME) + logger = client.logger(log_name) # [END logger_create] to_delete.append(logger) @@ -134,7 +93,7 @@ def logger_usage(client, to_delete): # [END logger_log_struct] # [START logger_log_resource_text] - from google.cloud.logging.resource import Resource + from google.cloud.logging import Resource res = Resource( type="generic_node", @@ -168,11 +127,11 @@ def _logger_delete(): @snippet def metric_crud(client, to_delete): """Metric CRUD.""" - METRIC_NAME = "robots-%d" % (_millis(),) - DESCRIPTION = "Robots all up in your server" - FILTER = "logName:apache-access AND textPayload:robot" - UPDATED_FILTER = "textPayload:robot" - UPDATED_DESCRIPTION = "Danger, Will Robinson!" + metric_name = "robots-%d" % (_millis(),) + description = "Robots all up in your server" + filter = "logName:apache-access AND textPayload:robot" + updated_filter = "textPayload:robot" + updated_description = "Danger, Will Robinson!" # [START client_list_metrics] for metric in client.list_metrics(): # API call(s) @@ -180,7 +139,7 @@ def metric_crud(client, to_delete): # [END client_list_metrics] # [START metric_create] - metric = client.metric(METRIC_NAME, filter_=FILTER, description=DESCRIPTION) + metric = client.metric(metric_name, filter_=filter, description=description) assert not metric.exists() # API call metric.create() # API call assert metric.exists() # API call @@ -188,20 +147,20 @@ def metric_crud(client, to_delete): to_delete.append(metric) # [START metric_reload] - existing_metric = client.metric(METRIC_NAME) + existing_metric = client.metric(metric_name) existing_metric.reload() # API call # [END metric_reload] - assert existing_metric.filter_ == FILTER - assert existing_metric.description == DESCRIPTION + assert existing_metric.filter_ == filter + assert existing_metric.description == description # [START metric_update] - existing_metric.filter_ = UPDATED_FILTER - existing_metric.description = UPDATED_DESCRIPTION + existing_metric.filter_ = updated_filter + existing_metric.description = updated_description existing_metric.update() # API call # [END metric_update] existing_metric.reload() - assert existing_metric.filter_ == UPDATED_FILTER - assert existing_metric.description == UPDATED_DESCRIPTION + assert existing_metric.filter_ == updated_filter + assert existing_metric.description == updated_description def _metric_delete(): # [START metric_delete] @@ -215,9 +174,9 @@ def _metric_delete(): def _sink_storage_setup(client): from google.cloud import storage - BUCKET_NAME = "sink-storage-%d" % (_millis(),) + bucket_name = "sink-storage-%d" % (_millis(),) client = storage.Client() - bucket = client.bucket(BUCKET_NAME) + bucket = client.bucket(bucket_name) bucket.create() # [START sink_bucket_permissions] @@ -236,12 +195,12 @@ def sink_storage(client, to_delete): """Sink log entries to storage.""" bucket = _sink_storage_setup(client) to_delete.append(bucket) - SINK_NAME = "robots-storage-%d" % (_millis(),) - FILTER = "textPayload:robot" + sink_name = "robots-storage-%d" % (_millis(),) + filter = "textPayload:robot" # [START sink_storage_create] - DESTINATION = "storage.googleapis.com/%s" % (bucket.name,) - sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) + destination = "storage.googleapis.com/%s" % (bucket.name,) + sink = client.sink(sink_name, filter_=filter, destination=destination) assert not sink.exists() # API call sink.create() # API call assert sink.exists() # API call @@ -252,19 +211,17 @@ def sink_storage(client, to_delete): def _sink_bigquery_setup(client): from google.cloud import bigquery - DATASET_NAME = "sink_bigquery_%d" % (_millis(),) + dataset_name = "sink_bigquery_%d" % (_millis(),) client = bigquery.Client() - dataset = client.dataset(DATASET_NAME) - dataset.create() - dataset.reload() + dataset = client.create_dataset(dataset_name) # [START sink_dataset_permissions] - from google.cloud.bigquery.dataset import AccessGrant + from google.cloud.bigquery.dataset import AccessEntry - grants = dataset.access_grants - grants.append(AccessGrant("WRITER", "groupByEmail", "cloud-logs@google.com")) - dataset.access_grants = grants - dataset.update() # API call + entry_list = dataset.access_entries + entry_list.append(AccessEntry("WRITER", "groupByEmail", "cloud-logs@google.com")) + dataset.access_entries = entry_list + client.update_dataset(dataset, ["access_entries"]) # API call # [END sink_dataset_permissions] return dataset @@ -274,13 +231,12 @@ def _sink_bigquery_setup(client): def sink_bigquery(client, to_delete): """Sink log entries to bigquery.""" dataset = _sink_bigquery_setup(client) - to_delete.append(dataset) - SINK_NAME = "robots-bigquery-%d" % (_millis(),) - FILTER = "textPayload:robot" + sink_name = "robots-bigquery-%d" % (_millis(),) + filter_str = "textPayload:robot" # [START sink_bigquery_create] - DESTINATION = "bigquery.googleapis.com%s" % (dataset.path,) - sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) + destination = "bigquery.googleapis.com%s" % (dataset.path,) + sink = client.sink(sink_name, filter_=filter_str, destination=destination) assert not sink.exists() # API call sink.create() # API call assert sink.exists() # API call @@ -291,15 +247,21 @@ def sink_bigquery(client, to_delete): def _sink_pubsub_setup(client): from google.cloud import pubsub - TOPIC_NAME = "sink-pubsub-%d" % (_millis(),) - client = pubsub.Client() - topic = client.topic(TOPIC_NAME) - topic.create() + client = pubsub.PublisherClient() + + project_id = os.environ["GOOGLE_CLOUD_PROJECT"] + topic_id = "sink-pubsub-%d" % (_millis(),) # [START sink_topic_permissions] - policy = topic.get_iam_policy() # API call - policy.owners.add(policy.group("cloud-logs@google.com")) - topic.set_iam_policy(policy) # API call + topic_path = client.topic_path(project_id, topic_id) + topic = client.create_topic(request={"name": topic_path}) + + policy = client.get_iam_policy(request={"resource": topic_path}) # API call + policy.bindings.add(role="roles/owner", members=["group:cloud-logs@google.com"]) + + client.set_iam_policy( + request={"resource": topic_path, "policy": policy} + ) # API call # [END sink_topic_permissions] return topic @@ -309,19 +271,18 @@ def _sink_pubsub_setup(client): def sink_pubsub(client, to_delete): """Sink log entries to pubsub.""" topic = _sink_pubsub_setup(client) - to_delete.append(topic) - SINK_NAME = "robots-pubsub-%d" % (_millis(),) - FILTER = "logName:apache-access AND textPayload:robot" - UPDATED_FILTER = "textPayload:robot" + sink_name = "robots-pubsub-%d" % (_millis(),) + filter_str = "logName:apache-access AND textPayload:robot" + updated_filter = "textPayload:robot" # [START sink_pubsub_create] - DESTINATION = "pubsub.googleapis.com/%s" % (topic.full_name,) - sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) + destination = "pubsub.googleapis.com/%s" % (topic.name,) + sink = client.sink(sink_name, filter_=filter_str, destination=destination) assert not sink.exists() # API call sink.create() # API call assert sink.exists() # API call # [END sink_pubsub_create] - to_delete.insert(0, sink) # delete sink before topic + created_sink = sink # [START client_list_sinks] for sink in client.list_sinks(): # API call(s) @@ -329,23 +290,23 @@ def sink_pubsub(client, to_delete): # [END client_list_sinks] # [START sink_reload] - existing_sink = client.sink(SINK_NAME) + existing_sink = client.sink(sink_name) existing_sink.reload() # [END sink_reload] - assert existing_sink.filter_ == FILTER - assert existing_sink.destination == DESTINATION + assert existing_sink.filter_ == filter_str + assert existing_sink.destination == destination # [START sink_update] - existing_sink.filter_ = UPDATED_FILTER + existing_sink.filter_ = updated_filter existing_sink.update() # [END sink_update] existing_sink.reload() - assert existing_sink.filter_ == UPDATED_FILTER + assert existing_sink.filter_ == updated_filter + sink = created_sink # [START sink_delete] sink.delete() # [END sink_delete] - to_delete.pop(0) @snippet diff --git a/samples/snippets/usage_guide_test.py b/samples/snippets/usage_guide_test.py new file mode 100644 index 000000000..f02d82fbd --- /dev/null +++ b/samples/snippets/usage_guide_test.py @@ -0,0 +1,90 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud.logging import Client + +import usage_guide + + +def test_logger_usage(): + client = Client() + + to_delete = [] + usage_guide.logger_usage(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_metric_crud(): + client = Client() + + to_delete = [] + usage_guide.metric_crud(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_sink_storage(): + client = Client() + + to_delete = [] + usage_guide.sink_storage(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_sink_bigquery(): + client = Client() + + to_delete = [] + usage_guide.sink_bigquery(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_sink_pubsub(): + client = Client() + + to_delete = [] + usage_guide.sink_pubsub(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) + + +def test_logging_handler(): + client = Client() + + usage_guide.logging_handler(client) + + +def test_setup_logging(): + client = Client() + + usage_guide.setup_logging(client) + + +def test_client_list_entries(): + client = Client() + + to_delete = [] + usage_guide.client_list_entries(client, to_delete) + + for item in to_delete: + usage_guide._backoff_not_found(item.delete) diff --git a/setup.py b/setup.py index d50ed511d..96df33d25 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.0.2" +version = "2.1.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/synth.metadata b/synth.metadata index 6ab2fc041..d3cd0a5cc 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "3a25c8cd9bd06e5a8f488945c9bc94380e2bf0d1" + "sha": "f81e7a694d24f3ba2ad4380bbf500b8bc463e314" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" + "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" + "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" } } ], diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt new file mode 100644 index 000000000..e69de29bb diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt new file mode 100644 index 000000000..e69de29bb diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt new file mode 100644 index 000000000..0e0bdeb0b --- /dev/null +++ b/testing/constraints-3.6.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.22.0 +google-cloud-core==1.4.1 +proto-plus==1.11.0 \ No newline at end of file diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 000000000..e69de29bb diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 000000000..e69de29bb diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 000000000..e69de29bb diff --git a/tests/system/test_system.py b/tests/system/test_system.py index f9cb96e18..45126f5e5 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -20,6 +20,7 @@ from google.api_core.exceptions import BadGateway from google.api_core.exceptions import Conflict +from google.api_core.exceptions import InternalServerError from google.api_core.exceptions import NotFound from google.api_core.exceptions import TooManyRequests from google.api_core.exceptions import ResourceExhausted @@ -27,7 +28,8 @@ from google.api_core.exceptions import ServiceUnavailable import google.cloud.logging from google.cloud._helpers import UTC -from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers import AppEngineHandler +from google.cloud.logging_v2.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource @@ -67,7 +69,9 @@ def _list_entries(logger): :returns: List of all entries consumed. """ inner = RetryResult(_has_entries, max_tries=9)(_consume_entries) - outer = RetryErrors((ServiceUnavailable, ResourceExhausted), max_tries=9)(inner) + outer = RetryErrors( + (ServiceUnavailable, ResourceExhausted, InternalServerError), max_tries=9 + )(inner) return outer(logger) @@ -308,6 +312,39 @@ def test_log_handler_sync(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) + def test_handlers_w_extras(self): + LOG_MESSAGE = "Testing with injected extras." + + for cls in [CloudLoggingHandler, AppEngineHandler]: + LOGGER_NAME = f"{cls.__name__}-handler_extras" + handler_name = self._logger_name(LOGGER_NAME) + + handler = cls(Config.CLIENT, name=handler_name, transport=SyncTransport) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + expected_request = {"requestUrl": "localhost"} + extra = { + "trace": "123", + "span_id": "456", + "http_request": expected_request, + "resource": Resource(type="cloudiot_device", labels={}), + "labels": {"test-label": "manual"}, + } + cloud_logger.warn(LOG_MESSAGE, extra=extra) + + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].trace, extra["trace"]) + self.assertEqual(entries[0].span_id, extra["span_id"]) + self.assertEqual(entries[0].http_request, expected_request) + self.assertEqual(entries[0].labels, extra["labels"]) + self.assertEqual(entries[0].resource.type, extra["resource"].type) + def test_log_root_handler(self): LOG_MESSAGE = "It was the best of times." diff --git a/tests/unit/handlers/test__helpers.py b/tests/unit/handlers/test__helpers.py index 1fbf6c860..8fb37305b 100644 --- a/tests/unit/handlers/test__helpers.py +++ b/tests/unit/handlers/test__helpers.py @@ -16,13 +16,18 @@ import mock +_FLASK_TRACE_ID = "flask-id" +_FLASK_HTTP_REQUEST = {"request_url": "https://flask.palletsprojects.com/en/1.1.x/"} +_DJANGO_TRACE_ID = "django-id" +_DJANGO_HTTP_REQUEST = {"request_url": "https://www.djangoproject.com/"} -class Test_get_trace_id_from_flask(unittest.TestCase): + +class Test_get_request_data_from_flask(unittest.TestCase): @staticmethod def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_trace_id_from_flask() + return _helpers.get_request_data_from_flask() @staticmethod def create_app(): @@ -39,13 +44,14 @@ def index(): def test_no_context_header(self): app = self.create_app() with app.test_request_context(path="/", headers={}): - trace_id = self._call_fut() + http_request, trace_id = self._call_fut() self.assertIsNone(trace_id) + self.assertEqual(http_request.request_method, "GET") def test_valid_context_header(self): flask_trace_header = "X_CLOUD_TRACE_CONTEXT" - expected_trace_id = "testtraceidflask" + expected_trace_id = _FLASK_TRACE_ID flask_trace_id = expected_trace_id + "/testspanid" app = self.create_app() @@ -54,17 +60,57 @@ def test_valid_context_header(self): ) with context: - trace_id = self._call_fut() + http_request, trace_id = self._call_fut() self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(http_request.request_method, "GET") + + def test_http_request_populated(self): + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_referrer = "self" + expected_ip = "10.1.2.3" + body_content = "test" + headers = { + "User-Agent": expected_agent, + "Referer": expected_referrer, + } + app = self.create_app() + with app.test_client() as c: + c.put( + path=expected_path, + data=body_content, + environ_base={"REMOTE_ADDR": expected_ip}, + headers=headers, + ) + http_request, trace_id = self._call_fut() + + self.assertEqual(http_request.request_method, "PUT") + self.assertEqual(http_request.request_url, expected_path) + self.assertEqual(http_request.user_agent, expected_agent) + self.assertEqual(http_request.referer, expected_referrer) + self.assertEqual(http_request.remote_ip, expected_ip) + self.assertEqual(http_request.request_size, len(body_content)) + self.assertEqual(http_request.protocol, "HTTP/1.1") + + def test_http_request_sparse(self): + expected_path = "http://testserver/123" + app = self.create_app() + with app.test_client() as c: + c.put(path=expected_path) + http_request, trace_id = self._call_fut() + self.assertEqual(http_request.request_method, "PUT") + self.assertEqual(http_request.request_url, expected_path) + self.assertEqual(http_request.protocol, "HTTP/1.1") -class Test_get_trace_id_from_django(unittest.TestCase): + +class Test_get_request_data_from_django(unittest.TestCase): @staticmethod def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_trace_id_from_django() + return _helpers.get_request_data_from_django() def setUp(self): from django.conf import settings @@ -89,7 +135,8 @@ def test_no_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - trace_id = self._call_fut() + http_request, trace_id = self._call_fut() + self.assertEqual(http_request.request_method, "GET") self.assertIsNone(trace_id) def test_valid_context_header(self): @@ -106,61 +153,137 @@ def test_valid_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - trace_id = self._call_fut() + http_request, trace_id = self._call_fut() self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(http_request.request_method, "GET") + + def test_http_request_populated(self): + from django.test import RequestFactory + from google.cloud.logging_v2.handlers.middleware import request + + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_referrer = "self" + body_content = "test" + django_request = RequestFactory().put( + expected_path, + data=body_content, + HTTP_USER_AGENT=expected_agent, + HTTP_REFERER=expected_referrer, + ) + + middleware = request.RequestMiddleware(None) + middleware.process_request(django_request) + http_request, trace_id = self._call_fut() + self.assertEqual(http_request.request_method, "PUT") + self.assertEqual(http_request.request_url, expected_path) + self.assertEqual(http_request.user_agent, expected_agent) + self.assertEqual(http_request.referer, expected_referrer) + self.assertEqual(http_request.remote_ip, "127.0.0.1") + self.assertEqual(http_request.request_size, len(body_content)) + self.assertEqual(http_request.protocol, "HTTP/1.1") + + def test_http_request_sparse(self): + from django.test import RequestFactory + from google.cloud.logging_v2.handlers.middleware import request + + expected_path = "http://testserver/123" + django_request = RequestFactory().put(expected_path) + middleware = request.RequestMiddleware(None) + middleware.process_request(django_request) + http_request, trace_id = self._call_fut() + self.assertEqual(http_request.request_method, "PUT") + self.assertEqual(http_request.request_url, expected_path) + self.assertEqual(http_request.remote_ip, "127.0.0.1") + self.assertEqual(http_request.protocol, "HTTP/1.1") -class Test_get_trace_id(unittest.TestCase): +class Test_get_request_data(unittest.TestCase): @staticmethod def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_trace_id() + return _helpers.get_request_data() def _helper(self, django_return, flask_return): django_patch = mock.patch( - "google.cloud.logging_v2.handlers._helpers.get_trace_id_from_django", + "google.cloud.logging_v2.handlers._helpers.get_request_data_from_django", return_value=django_return, ) flask_patch = mock.patch( - "google.cloud.logging_v2.handlers._helpers.get_trace_id_from_flask", + "google.cloud.logging_v2.handlers._helpers.get_request_data_from_flask", return_value=flask_return, ) with django_patch as django_mock: with flask_patch as flask_mock: - trace_id = self._call_fut() + result = self._call_fut() - return django_mock, flask_mock, trace_id + return django_mock, flask_mock, result def test_from_django(self): - django_mock, flask_mock, trace_id = self._helper("test-django-trace-id", None) - self.assertEqual(trace_id, django_mock.return_value) + django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID) + flask_expected = (None, None) + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, django_expected) django_mock.assert_called_once_with() flask_mock.assert_not_called() def test_from_flask(self): - django_mock, flask_mock, trace_id = self._helper(None, "test-flask-trace-id") - self.assertEqual(trace_id, flask_mock.return_value) + django_expected = (None, None) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID) + + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, flask_expected) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_from_django_and_flask(self): - django_mock, flask_mock, trace_id = self._helper( - "test-django-trace-id", "test-flask-trace-id" - ) + django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID) + + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + # Django wins. - self.assertEqual(trace_id, django_mock.return_value) + self.assertEqual(output, django_expected) django_mock.assert_called_once_with() flask_mock.assert_not_called() - def test_missing(self): - django_mock, flask_mock, trace_id = self._helper(None, None) - self.assertIsNone(trace_id) + def test_missing_http_request(self): + flask_expected = (None, _FLASK_TRACE_ID) + django_expected = (None, _DJANGO_TRACE_ID) + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + + # function only returns trace if http_request data is present + self.assertEqual(output, (None, None)) + + django_mock.assert_called_once_with() + flask_mock.assert_called_once_with() + + def test_missing_trace_id(self): + flask_expected = (_FLASK_HTTP_REQUEST, None) + django_expected = (None, _DJANGO_TRACE_ID) + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + + # trace_id is optional + self.assertEqual(output, flask_expected) + + django_mock.assert_called_once_with() + flask_mock.assert_called_once_with() + + def test_missing_both(self): + flask_expected = (None, None) + django_expected = (None, None) + django_mock, flask_mock, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, (None, None)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() + + def test_wo_libraries(self): + output = self._call_fut() + self.assertEqual(output, (None, None)) diff --git a/tests/unit/handlers/test_app_engine.py b/tests/unit/handlers/test_app_engine.py index ea16e3c85..1ac9c5dd5 100644 --- a/tests/unit/handlers/test_app_engine.py +++ b/tests/unit/handlers/test_app_engine.py @@ -87,36 +87,104 @@ def test_constructor_w_gae_flex_env(self): self.assertIs(handler.stream, stream) def test_emit(self): - client = mock.Mock(project=self.PROJECT, spec=["project"]) - handler = self._make_one(client, transport=_Transport) - gae_resource = handler.get_gae_resource() - gae_labels = handler.get_gae_labels() - trace = None - logname = "app" - message = "hello world" - record = logging.LogRecord(logname, logging, None, None, message, None, None) - handler.emit(record) - - self.assertIs(handler.transport.client, client) - self.assertEqual(handler.transport.name, logname) - self.assertEqual( - handler.transport.send_called_with, - (record, message, gae_resource, gae_labels, trace), + expected_http_request = {"request_url": "test"} + trace_id = "trace-test" + expected_trace_id = f"projects/{self.PROJECT}/traces/{trace_id}" + get_request_patch = mock.patch( + "google.cloud.logging_v2.handlers.app_engine.get_request_data", + return_value=(expected_http_request, trace_id), ) + with get_request_patch: + # library integrations mocked to return test data + client = mock.Mock(project=self.PROJECT, spec=["project"]) + handler = self._make_one(client, transport=_Transport) + gae_resource = handler.get_gae_resource() + gae_labels = handler.get_gae_labels() + logname = "app" + message = "hello world" + record = logging.LogRecord( + logname, logging, None, None, message, None, None + ) + handler.project_id = self.PROJECT + handler.emit(record) + + self.assertIs(handler.transport.client, client) + self.assertEqual(handler.transport.name, logname) + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + gae_resource, + gae_labels, + expected_trace_id, + None, + expected_http_request, + ), + ) + + def test_emit_manual_field_override(self): + from google.cloud.logging_v2.resource import Resource + + inferred_http_request = {"request_url": "test"} + inferred_trace_id = "trace-test" + get_request_patch = mock.patch( + "google.cloud.logging_v2.handlers.app_engine.get_request_data", + return_value=(inferred_http_request, inferred_trace_id), + ) + with get_request_patch: + # library integrations mocked to return test data + client = mock.Mock(project=self.PROJECT, spec=["project"]) + handler = self._make_one(client, transport=_Transport) + gae_labels = handler.get_gae_labels() + logname = "app" + message = "hello world" + record = logging.LogRecord( + logname, logging, None, None, message, None, None + ) + handler.project_id = self.PROJECT + # set attributes manually + expected_trace = "123" + setattr(record, "trace", expected_trace) + expected_span = "456" + setattr(record, "span_id", expected_span) + expected_http = {"reuqest_url": "manual"} + setattr(record, "http_request", expected_http) + expected_resource = Resource(type="test", labels={}) + setattr(record, "resource", expected_resource) + additional_labels = {"test-label": "manual"} + expected_labels = dict(gae_labels) + expected_labels.update(additional_labels) + setattr(record, "labels", additional_labels) + handler.emit(record) + self.assertIs(handler.transport.client, client) + self.assertEqual(handler.transport.name, logname) + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + expected_resource, + expected_labels, + expected_trace, + expected_span, + expected_http, + ), + ) def _get_gae_labels_helper(self, trace_id): - get_trace_patch = mock.patch( - "google.cloud.logging_v2.handlers.app_engine.get_trace_id", - return_value=trace_id, + get_request_patch = mock.patch( + "google.cloud.logging_v2.handlers.app_engine.get_request_data", + return_value=(None, trace_id), ) client = mock.Mock(project=self.PROJECT, spec=["project"]) # The handler actually calls ``get_gae_labels()``. - with get_trace_patch as mock_get_trace: + with get_request_patch as mock_get_request: handler = self._make_one(client, transport=_Transport) gae_labels = handler.get_gae_labels() - self.assertEqual(mock_get_trace.mock_calls, [mock.call()]) + self.assertEqual(mock_get_request.mock_calls, [mock.call()]) return gae_labels @@ -138,5 +206,13 @@ def __init__(self, client, name): self.client = client self.name = name - def send(self, record, message, resource, labels, trace): - self.send_called_with = (record, message, resource, labels, trace) + def send(self, record, message, resource, labels, trace, span_id, http_request): + self.send_called_with = ( + record, + message, + resource, + labels, + trace, + span_id, + http_request, + ) diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index e967b2015..d84c19635 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -85,7 +85,44 @@ def test_emit(self): self.assertEqual( handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, None), + (record, message, _GLOBAL_RESOURCE, None, None, None, None), + ) + + def test_emit_manual_field_override(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.resource import Resource + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE + ) + logname = "loggername" + message = "hello world" + record = logging.LogRecord(logname, logging, None, None, message, None, None) + # set attributes manually + expected_trace = "123" + setattr(record, "trace", expected_trace) + expected_span = "456" + setattr(record, "span_id", expected_span) + expected_http = {"reuqest_url": "manual"} + setattr(record, "http_request", expected_http) + expected_resource = Resource(type="test", labels={}) + setattr(record, "resource", expected_resource) + expected_labels = {"test-label": "manual"} + setattr(record, "labels", expected_labels) + handler.emit(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + expected_resource, + expected_labels, + expected_trace, + expected_span, + expected_http, + ), ) @@ -148,5 +185,22 @@ def __init__(self, client, name): self.client = client self.name = name - def send(self, record, message, resource, labels=None): - self.send_called_with = (record, message, resource, labels) + def send( + self, + record, + message, + resource, + labels=None, + trace=None, + span_id=None, + http_request=None, + ): + self.send_called_with = ( + record, + message, + resource, + labels, + trace, + span_id, + http_request, + ) diff --git a/tests/unit/handlers/transports/test_background_thread.py b/tests/unit/handlers/transports/test_background_thread.py index e9626a759..5410c5f10 100644 --- a/tests/unit/handlers/transports/test_background_thread.py +++ b/tests/unit/handlers/transports/test_background_thread.py @@ -64,12 +64,7 @@ def test_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE) transport.worker.enqueue.assert_called_once_with( - record, - message, - resource=_GLOBAL_RESOURCE, - labels=None, - trace=None, - span_id=None, + record, message, resource=_GLOBAL_RESOURCE, ) def test_trace_send(self): @@ -91,12 +86,7 @@ def test_trace_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE, trace=trace) transport.worker.enqueue.assert_called_once_with( - record, - message, - resource=_GLOBAL_RESOURCE, - labels=None, - trace=trace, - span_id=None, + record, message, resource=_GLOBAL_RESOURCE, trace=trace, ) def test_span_send(self): @@ -118,12 +108,7 @@ def test_span_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE, span_id=span_id) transport.worker.enqueue.assert_called_once_with( - record, - message, - resource=_GLOBAL_RESOURCE, - labels=None, - trace=None, - span_id=span_id, + record, message, resource=_GLOBAL_RESOURCE, span_id=span_id, ) def test_flush(self): @@ -297,11 +282,12 @@ def test_enqueue_defaults(self): expected_info = {"message": message, "python_logger": "testing"} self.assertEqual(entry["info"], expected_info) self.assertEqual(entry["severity"], LogSeverity.INFO) - self.assertIsNone(entry["resource"]) - self.assertIsNone(entry["labels"]) - self.assertIsNone(entry["trace"]) - self.assertIsNone(entry["span_id"]) self.assertIsInstance(entry["timestamp"], datetime.datetime) + self.assertNotIn("resource", entry.keys()) + self.assertNotIn("labels", entry.keys()) + self.assertNotIn("trace", entry.keys()) + self.assertNotIn("span_id", entry.keys()) + self.assertNotIn("http_request", entry.keys()) def test_enqueue_explicit(self): import datetime @@ -503,6 +489,7 @@ def log_struct( trace=None, span_id=None, timestamp=None, + http_request=None, ): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE diff --git a/tests/unit/handlers/transports/test_sync.py b/tests/unit/handlers/transports/test_sync.py index 0ee6db229..9f0642757 100644 --- a/tests/unit/handlers/transports/test_sync.py +++ b/tests/unit/handlers/transports/test_sync.py @@ -58,6 +58,7 @@ def test_send(self): None, None, None, + None, ) self.assertEqual(transport.logger.log_struct_called_with, EXPECTED_SENT) @@ -76,6 +77,7 @@ def log_struct( labels=None, trace=None, span_id=None, + http_request=None, ): self.log_struct_called_with = ( message, @@ -84,6 +86,7 @@ def log_struct( labels, trace, span_id, + http_request, )