From fda97a53e00ec1cbcd97f57502e30eb9bffe0bce Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 4 Dec 2020 12:07:26 -0800 Subject: [PATCH 1/9] chore: Re-generated to pick up changes from googleapis. (#97) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * feat: Makes remaining LogBucket and LogViews methods public PiperOrigin-RevId: 342353190 Source-Author: Google APIs Source-Date: Fri Nov 13 15:44:35 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: be0bdf86cd31aa7c1a7b30a9a2e9f2fd53ee3d91 Source-Link: https://github.com/googleapis/googleapis/commit/be0bdf86cd31aa7c1a7b30a9a2e9f2fd53ee3d91 * fixed lint issue Co-authored-by: Daniel Sanche --- .../logging_v2/proto/logging_config.proto | 435 ++++- .../logging_v2/proto/logging_metrics.proto | 4 +- .../config_service_v2/async_client.py | 444 ++++- .../services/config_service_v2/client.py | 466 +++++- .../services/config_service_v2/pagers.py | 128 ++ .../config_service_v2/transports/base.py | 101 ++ .../config_service_v2/transports/grpc.py | 220 ++- .../transports/grpc_asyncio.py | 230 ++- .../metrics_service_v2/async_client.py | 12 +- .../services/metrics_service_v2/client.py | 12 +- google/cloud/logging_v2/types/__init__.py | 20 + .../cloud/logging_v2/types/logging_config.py | 298 +++- .../cloud/logging_v2/types/logging_metrics.py | 4 +- samples/snippets/noxfile.py | 2 + synth.metadata | 4 +- .../logging_v2/test_config_service_v2.py | 1451 ++++++++++++++++- 16 files changed, 3658 insertions(+), 173 deletions(-) diff --git a/google/cloud/logging_v2/proto/logging_config.proto b/google/cloud/logging_v2/proto/logging_config.proto index 9486f4a9a..9b10932d6 100644 --- a/google/cloud/logging_v2/proto/logging_config.proto +++ b/google/cloud/logging_v2/proto/logging_config.proto @@ -55,7 +55,7 @@ service ConfigServiceV2 { "https://www.googleapis.com/auth/logging.admin," "https://www.googleapis.com/auth/logging.read"; - // Lists buckets (Beta). + // Lists buckets. rpc ListBuckets(ListBucketsRequest) returns (ListBucketsResponse) { option (google.api.http) = { get: "/v2/{parent=*/*/locations/*}/buckets" @@ -75,7 +75,7 @@ service ConfigServiceV2 { option (google.api.method_signature) = "parent"; } - // Gets a bucket (Beta). + // Gets a bucket. rpc GetBucket(GetBucketRequest) returns (LogBucket) { option (google.api.http) = { get: "/v2/{name=*/*/locations/*/buckets/*}" @@ -94,6 +94,31 @@ service ConfigServiceV2 { }; } + // Creates a bucket that can be used to store log entries. Once a bucket has + // been created, the region cannot be changed. + rpc CreateBucket(CreateBucketRequest) returns (LogBucket) { + option (google.api.http) = { + post: "/v2/{parent=*/*/locations/*}/buckets" + body: "bucket" + additional_bindings { + post: "/v2/{parent=projects/*/locations/*}/buckets" + body: "bucket" + } + additional_bindings { + post: "/v2/{parent=organizations/*/locations/*}/buckets" + body: "bucket" + } + additional_bindings { + post: "/v2/{parent=folders/*/locations/*}/buckets" + body: "bucket" + } + additional_bindings { + post: "/v2/{parent=billingAccounts/*/locations/*}/buckets" + body: "bucket" + } + }; + } + // Updates a bucket. This method replaces the following fields in the // existing bucket with values from the new bucket: `retention_period` // @@ -104,7 +129,6 @@ service ConfigServiceV2 { // will be returned. // // A buckets region may not be modified after it is created. - // This method is in Beta. rpc UpdateBucket(UpdateBucketRequest) returns (LogBucket) { option (google.api.http) = { patch: "/v2/{name=*/*/locations/*/buckets/*}" @@ -128,6 +152,161 @@ service ConfigServiceV2 { }; } + // Deletes a bucket. + // Moves the bucket to the DELETE_REQUESTED state. After 7 days, the + // bucket will be purged and all logs in the bucket will be permanently + // deleted. + rpc DeleteBucket(DeleteBucketRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=*/*/locations/*/buckets/*}" + additional_bindings { + delete: "/v2/{name=projects/*/locations/*/buckets/*}" + } + additional_bindings { + delete: "/v2/{name=organizations/*/locations/*/buckets/*}" + } + additional_bindings { + delete: "/v2/{name=folders/*/locations/*/buckets/*}" + } + additional_bindings { + delete: "/v2/{name=billingAccounts/*/locations/*/buckets/*}" + } + }; + } + + // Undeletes a bucket. A bucket that has been deleted may be undeleted within + // the grace period of 7 days. + rpc UndeleteBucket(UndeleteBucketRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/{name=*/*/locations/*/buckets/*}:undelete" + body: "*" + additional_bindings { + post: "/v2/{name=projects/*/locations/*/buckets/*}:undelete" + body: "*" + } + additional_bindings { + post: "/v2/{name=organizations/*/locations/*/buckets/*}:undelete" + body: "*" + } + additional_bindings { + post: "/v2/{name=folders/*/locations/*/buckets/*}:undelete" + body: "*" + } + additional_bindings { + post: "/v2/{name=billingAccounts/*/locations/*/buckets/*}:undelete" + body: "*" + } + }; + } + + // Lists views on a bucket. + rpc ListViews(ListViewsRequest) returns (ListViewsResponse) { + option (google.api.http) = { + get: "/v2/{parent=*/*/locations/*/buckets/*}/views" + additional_bindings { + get: "/v2/{parent=projects/*/locations/*/buckets/*}/views" + } + additional_bindings { + get: "/v2/{parent=organizations/*/locations/*/buckets/*}/views" + } + additional_bindings { + get: "/v2/{parent=folders/*/locations/*/buckets/*}/views" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views" + } + }; + option (google.api.method_signature) = "parent"; + } + + // Gets a view. + rpc GetView(GetViewRequest) returns (LogView) { + option (google.api.http) = { + get: "/v2/{name=*/*/locations/*/buckets/*/views/*}" + additional_bindings { + get: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + get: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + get: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + get: "/v2/{name=billingAccounts/*/buckets/*/views/*}" + } + }; + } + + // Creates a view over logs in a bucket. A bucket may contain a maximum of + // 50 views. + rpc CreateView(CreateViewRequest) returns (LogView) { + option (google.api.http) = { + post: "/v2/{parent=*/*/locations/*/buckets/*}/views" + body: "view" + additional_bindings { + post: "/v2/{parent=projects/*/locations/*/buckets/*}/views" + body: "view" + } + additional_bindings { + post: "/v2/{parent=organizations/*/locations/*/buckets/*}/views" + body: "view" + } + additional_bindings { + post: "/v2/{parent=folders/*/locations/*/buckets/*}/views" + body: "view" + } + additional_bindings { + post: "/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views" + body: "view" + } + }; + } + + // Updates a view. This method replaces the following fields in the existing + // view with values from the new view: `filter`. + rpc UpdateView(UpdateViewRequest) returns (LogView) { + option (google.api.http) = { + patch: "/v2/{name=*/*/locations/*/buckets/*/views/*}" + body: "view" + additional_bindings { + patch: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" + body: "view" + } + additional_bindings { + patch: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" + body: "view" + } + additional_bindings { + patch: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" + body: "view" + } + additional_bindings { + patch: "/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}" + body: "view" + } + }; + } + + // Deletes a view from a bucket. + rpc DeleteView(DeleteViewRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=*/*/locations/*/buckets/*/views/*}" + additional_bindings { + delete: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + delete: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + delete: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" + } + additional_bindings { + delete: "/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}" + } + }; + } + // Lists sinks. rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { option (google.api.http) = { @@ -420,7 +599,7 @@ service ConfigServiceV2 { } } -// Describes a repository of logs (Beta). +// Describes a repository of logs. message LogBucket { option (google.api.resource) = { type: "logging.googleapis.com/LogBucket" @@ -435,7 +614,6 @@ message LogBucket { // "projects/my-project-id/locations/my-location/buckets/my-bucket-id The // supported locations are: // "global" - // "us-central1" // // For the location of `global` it is unspecified where logs are actually // stored. @@ -458,10 +636,63 @@ message LogBucket { // 30 days will be used. int32 retention_days = 11; + // Whether the bucket has been locked. + // The retention period on a locked bucket may not be changed. + // Locked buckets may only be deleted if they are empty. + bool locked = 9; + // Output only. The bucket lifecycle state. LifecycleState lifecycle_state = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; } +// LogBucket lifecycle states. +enum LifecycleState { + // Unspecified state. This is only used/useful for distinguishing + // unset values. + LIFECYCLE_STATE_UNSPECIFIED = 0; + + // The normal and active state. + ACTIVE = 1; + + // The bucket has been marked for deletion by the user. + DELETE_REQUESTED = 2; +} + +// Describes a view over logs in a bucket. +message LogView { + option (google.api.resource) = { + type: "logging.googleapis.com/LogView" + pattern: "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + pattern: "organizations/{organization}/locations/{location}/buckets/{bucket}/views/{view}" + pattern: "folders/{folder}/locations/{location}/buckets/{bucket}/views/{view}" + pattern: "billingAccounts/{billing_account}/locations/{location}/buckets/{bucket}/views/{view}" + }; + + // The resource name of the view. + // For example + // "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view + string name = 1; + + // Describes this view. + string description = 3; + + // Output only. The creation timestamp of the view. + google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The last update timestamp of the view. + google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Filter that restricts which log entries in a bucket are visible in this + // view. Filters are restricted to be a logical AND of ==/!= of any of the + // following: + // originating project/folder/organization/billing account. + // resource type + // log id + // Example: SOURCE("projects/myproject") AND resource.type = "gce_instance" + // AND LOG_ID("stdout") + string filter = 7; +} + // Describes a sink used to export log entries to one of the following // destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a // Cloud Pub/Sub topic. A logs filter controls which log entries are exported. @@ -529,10 +760,15 @@ message LogSink { // export any log entries. bool disabled = 19 [(google.api.field_behavior) = OPTIONAL]; + // Optional. Log entries that match any of the exclusion filters will not be exported. + // If a log entry is matched by both `filter` and one of `exclusion_filters` + // it will not be exported. + repeated LogExclusion exclusions = 16 [(google.api.field_behavior) = OPTIONAL]; + // Deprecated. This field is unused. VersionFormat output_version_format = 6 [deprecated = true]; - // Output only. An IAM identity–a service account or group—under which Logging + // Output only. An IAM identity—a service account or group—under which Logging // writes the exported log entries to the sink's destination. This field is // set by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and // [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] based on the @@ -599,20 +835,7 @@ message BigQueryOptions { bool uses_timestamp_column_partitioning = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } -// LogBucket lifecycle states (Beta). -enum LifecycleState { - // Unspecified state. This is only used/useful for distinguishing - // unset values. - LIFECYCLE_STATE_UNSPECIFIED = 0; - - // The normal and active state. - ACTIVE = 1; - - // The bucket has been marked for deletion by the user. - DELETE_REQUESTED = 2; -} - -// The parameters to `ListBuckets` (Beta). +// The parameters to `ListBuckets`. message ListBucketsRequest { // Required. The parent resource whose buckets are to be listed: // @@ -643,7 +866,7 @@ message ListBucketsRequest { int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; } -// The response from ListBuckets (Beta). +// The response from ListBuckets. message ListBucketsResponse { // A list of buckets. repeated LogBucket buckets = 1; @@ -654,7 +877,32 @@ message ListBucketsResponse { string next_page_token = 2; } -// The parameters to `UpdateBucket` (Beta). +// The parameters to `CreateBucket`. +message CreateBucketRequest { + // Required. The resource in which to create the bucket: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + // + // Example: `"projects/my-logging-project/locations/global"` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "logging.googleapis.com/LogBucket" + } + ]; + + // Required. A client-assigned identifier such as `"my-bucket"`. Identifiers are + // limited to 100 characters and can include only letters, digits, + // underscores, hyphens, and periods. + string bucket_id = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The new bucket. The region specified in the new bucket must be compliant + // with any Location Restriction Org Policy. The name field in the bucket is + // ignored. + LogBucket bucket = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// The parameters to `UpdateBucket`. message UpdateBucketRequest { // Required. The full resource name of the bucket to update. // @@ -688,7 +936,7 @@ message UpdateBucketRequest { google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; } -// The parameters to `GetBucket` (Beta). +// The parameters to `GetBucket`. message GetBucketRequest { // Required. The resource name of the bucket: // @@ -707,6 +955,147 @@ message GetBucketRequest { ]; } +// The parameters to `DeleteBucket`. +message DeleteBucketRequest { + // Required. The full resource name of the bucket to delete. + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogBucket" + } + ]; +} + +// The parameters to `UndeleteBucket`. +message UndeleteBucketRequest { + // Required. The full resource name of the bucket to undelete. + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogBucket" + } + ]; +} + +// The parameters to `ListViews`. +message ListViewsRequest { + // Required. The bucket whose views are to be listed: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + string parent = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// The response from ListViews. +message ListViewsResponse { + // A list of views. + repeated LogView views = 1; + + // If there might be more results than appear in this response, then + // `nextPageToken` is included. To get the next set of results, call the same + // method again using the value of `nextPageToken` as `pageToken`. + string next_page_token = 2; +} + +// The parameters to `CreateView`. +message CreateViewRequest { + // Required. The bucket in which to create the view + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + // + // Example: + // `"projects/my-logging-project/locations/my-location/buckets/my-bucket"` + string parent = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The id to use for this view. + string view_id = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The new view. + LogView view = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// The parameters to `UpdateView`. +message UpdateViewRequest { + // Required. The full resource name of the view to update + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The updated view. + LogView view = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Field mask that specifies the fields in `view` that need + // an update. A field will be overwritten if, and only if, it is + // in the update mask. `name` and output only fields cannot be updated. + // + // For a detailed `FieldMask` definition, see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + // + // Example: `updateMask=filter`. + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// The parameters to `GetView`. +message GetViewRequest { + // Required. The resource name of the policy: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogView" + } + ]; +} + +// The parameters to `DeleteView`. +message DeleteViewRequest { + // Required. The full resource name of the view to delete: + // + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // + // Example: + // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "logging.googleapis.com/LogView" + } + ]; +} + // The parameters to `ListSinks`. message ListSinksRequest { // Required. The parent resource whose sinks are to be listed: diff --git a/google/cloud/logging_v2/proto/logging_metrics.proto b/google/cloud/logging_v2/proto/logging_metrics.proto index eb9f73ffa..09d629648 100644 --- a/google/cloud/logging_v2/proto/logging_metrics.proto +++ b/google/cloud/logging_v2/proto/logging_metrics.proto @@ -92,8 +92,8 @@ service MetricsServiceV2 { // Describes a logs-based metric. The value of the metric is the number of log // entries that match a logs filter in a given time interval. // -// Logs-based metric can also be used to extract values from logs and create a -// a distribution of the values. The distribution records the statistics of the +// Logs-based metrics can also be used to extract values from logs and create a +// distribution of the values. The distribution records the statistics of the // extracted values along with an optional histogram of the values as specified // by the bucket options. message LogMetric { diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index d025f5916..73737c1d8 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -58,6 +58,8 @@ class ConfigServiceV2AsyncClient: ) log_sink_path = staticmethod(ConfigServiceV2Client.log_sink_path) parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) + log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) + parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) common_billing_account_path = staticmethod( ConfigServiceV2Client.common_billing_account_path @@ -161,12 +163,11 @@ async def list_buckets( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: - r"""Lists buckets (Beta). + r"""Lists buckets. Args: request (:class:`~.logging_config.ListBucketsRequest`): - The request object. The parameters to `ListBuckets` - (Beta). + The request object. The parameters to `ListBuckets`. parent (:class:`str`): Required. The parent resource whose buckets are to be listed: @@ -193,7 +194,7 @@ async def list_buckets( Returns: ~.pagers.ListBucketsAsyncPager: - The response from ListBuckets (Beta). + The response from ListBuckets. Iterating over this object will yield results and resolve additional pages automatically. @@ -251,12 +252,11 @@ async def get_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket (Beta). + r"""Gets a bucket. Args: request (:class:`~.logging_config.GetBucketRequest`): - The request object. The parameters to `GetBucket` - (Beta). + The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -266,9 +266,7 @@ async def get_bucket( Returns: ~.logging_config.LogBucket: - Describes a repository of logs - (Beta). - + Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -294,6 +292,56 @@ async def get_bucket( # Done; return the response. return response + async def create_bucket( + self, + request: logging_config.CreateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Args: + request (:class:`~.logging_config.CreateBucketRequest`): + The request object. The parameters to `CreateBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def update_bucket( self, request: logging_config.UpdateBucketRequest = None, @@ -312,13 +360,11 @@ async def update_bucket( If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION will be returned. - A buckets region may not be modified after it is created. This - method is in Beta. + A buckets region may not be modified after it is created. Args: request (:class:`~.logging_config.UpdateBucketRequest`): - The request object. The parameters to `UpdateBucket` - (Beta). + The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -328,9 +374,7 @@ async def update_bucket( Returns: ~.logging_config.LogBucket: - Describes a repository of logs - (Beta). - + Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -356,6 +400,372 @@ async def update_bucket( # Done; return the response. return response + async def delete_bucket( + self, + request: logging_config.DeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Args: + request (:class:`~.logging_config.DeleteBucketRequest`): + The request object. The parameters to `DeleteBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def undelete_bucket( + self, + request: logging_config.UndeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Args: + request (:class:`~.logging_config.UndeleteBucketRequest`): + The request object. The parameters to `UndeleteBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.undelete_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def list_views( + self, + request: logging_config.ListViewsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViewsAsyncPager: + r"""Lists views on a bucket. + + Args: + request (:class:`~.logging_config.ListViewsRequest`): + The request object. The parameters to `ListViews`. + parent (:class:`str`): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListViewsAsyncPager: + The response from ListViews. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.ListViewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_views, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListViewsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_view( + self, + request: logging_config.GetViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Gets a view. + + Args: + request (:class:`~.logging_config.GetViewRequest`): + The request object. The parameters to `GetView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_view( + self, + request: logging_config.CreateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Args: + request (:class:`~.logging_config.CreateViewRequest`): + The request object. The parameters to `CreateView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_view( + self, + request: logging_config.UpdateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Args: + request (:class:`~.logging_config.UpdateViewRequest`): + The request object. The parameters to `UpdateView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_view( + self, + request: logging_config.DeleteViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a view from a bucket. + + Args: + request (:class:`~.logging_config.DeleteViewRequest`): + The request object. The parameters to `DeleteView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + async def list_sinks( self, request: logging_config.ListSinksRequest = None, diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index ea9ee605a..a16f5f20b 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -192,6 +192,22 @@ def parse_log_sink_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def log_view_path(project: str, location: str, bucket: str, view: str,) -> str: + """Return a fully-qualified log_view string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, location=location, bucket=bucket, view=view, + ) + + @staticmethod + def parse_log_view_path(path: str) -> Dict[str, str]: + """Parse a log_view path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" @@ -377,12 +393,11 @@ def list_buckets( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: - r"""Lists buckets (Beta). + r"""Lists buckets. Args: request (:class:`~.logging_config.ListBucketsRequest`): - The request object. The parameters to `ListBuckets` - (Beta). + The request object. The parameters to `ListBuckets`. parent (:class:`str`): Required. The parent resource whose buckets are to be listed: @@ -409,7 +424,7 @@ def list_buckets( Returns: ~.pagers.ListBucketsPager: - The response from ListBuckets (Beta). + The response from ListBuckets. Iterating over this object will yield results and resolve additional pages automatically. @@ -468,12 +483,11 @@ def get_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket (Beta). + r"""Gets a bucket. Args: request (:class:`~.logging_config.GetBucketRequest`): - The request object. The parameters to `GetBucket` - (Beta). + The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -483,9 +497,7 @@ def get_bucket( Returns: ~.logging_config.LogBucket: - Describes a repository of logs - (Beta). - + Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -512,6 +524,57 @@ def get_bucket( # Done; return the response. return response + def create_bucket( + self, + request: logging_config.CreateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Args: + request (:class:`~.logging_config.CreateBucketRequest`): + The request object. The parameters to `CreateBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + def update_bucket( self, request: logging_config.UpdateBucketRequest = None, @@ -530,13 +593,11 @@ def update_bucket( If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION will be returned. - A buckets region may not be modified after it is created. This - method is in Beta. + A buckets region may not be modified after it is created. Args: request (:class:`~.logging_config.UpdateBucketRequest`): - The request object. The parameters to `UpdateBucket` - (Beta). + The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -546,9 +607,7 @@ def update_bucket( Returns: ~.logging_config.LogBucket: - Describes a repository of logs - (Beta). - + Describes a repository of logs. """ # Create or coerce a protobuf request object. @@ -575,6 +634,379 @@ def update_bucket( # Done; return the response. return response + def delete_bucket( + self, + request: logging_config.DeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Args: + request (:class:`~.logging_config.DeleteBucketRequest`): + The request object. The parameters to `DeleteBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def undelete_bucket( + self, + request: logging_config.UndeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Args: + request (:class:`~.logging_config.UndeleteBucketRequest`): + The request object. The parameters to `UndeleteBucket`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UndeleteBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def list_views( + self, + request: logging_config.ListViewsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViewsPager: + r"""Lists views on a bucket. + + Args: + request (:class:`~.logging_config.ListViewsRequest`): + The request object. The parameters to `ListViews`. + parent (:class:`str`): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListViewsPager: + The response from ListViews. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListViewsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_views] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListViewsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_view( + self, + request: logging_config.GetViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Gets a view. + + Args: + request (:class:`~.logging_config.GetViewRequest`): + The request object. The parameters to `GetView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_view( + self, + request: logging_config.CreateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Args: + request (:class:`~.logging_config.CreateViewRequest`): + The request object. The parameters to `CreateView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_view( + self, + request: logging_config.UpdateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Args: + request (:class:`~.logging_config.UpdateViewRequest`): + The request object. The parameters to `UpdateView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_view( + self, + request: logging_config.DeleteViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a view from a bucket. + + Args: + request (:class:`~.logging_config.DeleteViewRequest`): + The request object. The parameters to `DeleteView`. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + def list_sinks( self, request: logging_config.ListSinksRequest = None, diff --git a/google/cloud/logging_v2/services/config_service_v2/pagers.py b/google/cloud/logging_v2/services/config_service_v2/pagers.py index 173780b5e..8e1c4ee0d 100644 --- a/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -148,6 +148,134 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListViewsPager: + """A pager for iterating through ``list_views`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListViewsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging_config.ListViewsResponse], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListViewsRequest`): + The initial request object. + response (:class:`~.logging_config.ListViewsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogView]: + for page in self.pages: + yield from page.views + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListViewsAsyncPager: + """A pager for iterating through ``list_views`` requests. + + This class thinly wraps an initial + :class:`~.logging_config.ListViewsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`~.logging_config.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListViewsResponse]], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.logging_config.ListViewsRequest`): + The initial request object. + response (:class:`~.logging_config.ListViewsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogView]: + async def async_generator(): + async for page in self.pages: + for response in page.views: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListSinksPager: """A pager for iterating through ``list_sinks`` requests. diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index a0393aa98..3e17598fe 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -116,9 +116,33 @@ def _prep_wrapped_messages(self, client_info): self.get_bucket: gapic_v1.method.wrap_method( self.get_bucket, default_timeout=None, client_info=client_info, ), + self.create_bucket: gapic_v1.method.wrap_method( + self.create_bucket, default_timeout=None, client_info=client_info, + ), self.update_bucket: gapic_v1.method.wrap_method( self.update_bucket, default_timeout=None, client_info=client_info, ), + self.delete_bucket: gapic_v1.method.wrap_method( + self.delete_bucket, default_timeout=None, client_info=client_info, + ), + self.undelete_bucket: gapic_v1.method.wrap_method( + self.undelete_bucket, default_timeout=None, client_info=client_info, + ), + self.list_views: gapic_v1.method.wrap_method( + self.list_views, default_timeout=None, client_info=client_info, + ), + self.get_view: gapic_v1.method.wrap_method( + self.get_view, default_timeout=None, client_info=client_info, + ), + self.create_view: gapic_v1.method.wrap_method( + self.create_view, default_timeout=None, client_info=client_info, + ), + self.update_view: gapic_v1.method.wrap_method( + self.update_view, default_timeout=None, client_info=client_info, + ), + self.delete_view: gapic_v1.method.wrap_method( + self.delete_view, default_timeout=None, client_info=client_info, + ), self.list_sinks: gapic_v1.method.wrap_method( self.list_sinks, default_retry=retries.Retry( @@ -266,6 +290,17 @@ def get_bucket( ]: raise NotImplementedError() + @property + def create_bucket( + self, + ) -> typing.Callable[ + [logging_config.CreateBucketRequest], + typing.Union[ + logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] + ], + ]: + raise NotImplementedError() + @property def update_bucket( self, @@ -277,6 +312,72 @@ def update_bucket( ]: raise NotImplementedError() + @property + def delete_bucket( + self, + ) -> typing.Callable[ + [logging_config.DeleteBucketRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def undelete_bucket( + self, + ) -> typing.Callable[ + [logging_config.UndeleteBucketRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def list_views( + self, + ) -> typing.Callable[ + [logging_config.ListViewsRequest], + typing.Union[ + logging_config.ListViewsResponse, + typing.Awaitable[logging_config.ListViewsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_view( + self, + ) -> typing.Callable[ + [logging_config.GetViewRequest], + typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + ]: + raise NotImplementedError() + + @property + def create_view( + self, + ) -> typing.Callable[ + [logging_config.CreateViewRequest], + typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + ]: + raise NotImplementedError() + + @property + def update_view( + self, + ) -> typing.Callable[ + [logging_config.UpdateViewRequest], + typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + ]: + raise NotImplementedError() + + @property + def delete_view( + self, + ) -> typing.Callable[ + [logging_config.DeleteViewRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + @property def list_sinks( self, diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 5603beeb5..f083373b1 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -238,7 +238,7 @@ def list_buckets( ]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets (Beta). + Lists buckets. Returns: Callable[[~.ListBucketsRequest], @@ -264,7 +264,7 @@ def get_bucket( ) -> Callable[[logging_config.GetBucketRequest], logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket (Beta). + Gets a bucket. Returns: Callable[[~.GetBucketRequest], @@ -284,6 +284,34 @@ def get_bucket( ) return self._stubs["get_bucket"] + @property + def create_bucket( + self, + ) -> Callable[[logging_config.CreateBucketRequest], logging_config.LogBucket]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs["create_bucket"] + @property def update_bucket( self, @@ -300,8 +328,7 @@ def update_bucket( If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION will be returned. - A buckets region may not be modified after it is created. This - method is in Beta. + A buckets region may not be modified after it is created. Returns: Callable[[~.UpdateBucketRequest], @@ -321,6 +348,191 @@ def update_bucket( ) return self._stubs["update_bucket"] + @property + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], empty.Empty]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_bucket"] + + @property + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], empty.Empty]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Returns: + Callable[[~.UndeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["undelete_bucket"] + + @property + def list_views( + self, + ) -> Callable[[logging_config.ListViewsRequest], logging_config.ListViewsResponse]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a bucket. + + Returns: + Callable[[~.ListViewsRequest], + ~.ListViewsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_views" not in self._stubs: + self._stubs["list_views"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs["list_views"] + + @property + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], logging_config.LogView]: + r"""Return a callable for the get view method over gRPC. + + Gets a view. + + Returns: + Callable[[~.GetViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_view" not in self._stubs: + self._stubs["get_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["get_view"] + + @property + def create_view( + self, + ) -> Callable[[logging_config.CreateViewRequest], logging_config.LogView]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Returns: + Callable[[~.CreateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_view" not in self._stubs: + self._stubs["create_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["create_view"] + + @property + def update_view( + self, + ) -> Callable[[logging_config.UpdateViewRequest], logging_config.LogView]: + r"""Return a callable for the update view method over gRPC. + + Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Returns: + Callable[[~.UpdateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_view" not in self._stubs: + self._stubs["update_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["update_view"] + + @property + def delete_view(self) -> Callable[[logging_config.DeleteViewRequest], empty.Empty]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view from a bucket. + + Returns: + Callable[[~.DeleteViewRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_view"] + @property def list_sinks( self, diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index a4c94db22..7376164e4 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -243,7 +243,7 @@ def list_buckets( ]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets (Beta). + Lists buckets. Returns: Callable[[~.ListBucketsRequest], @@ -271,7 +271,7 @@ def get_bucket( ]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket (Beta). + Gets a bucket. Returns: Callable[[~.GetBucketRequest], @@ -291,6 +291,36 @@ def get_bucket( ) return self._stubs["get_bucket"] + @property + def create_bucket( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], Awaitable[logging_config.LogBucket] + ]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs["create_bucket"] + @property def update_bucket( self, @@ -309,8 +339,7 @@ def update_bucket( If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION will be returned. - A buckets region may not be modified after it is created. This - method is in Beta. + A buckets region may not be modified after it is created. Returns: Callable[[~.UpdateBucketRequest], @@ -330,6 +359,199 @@ def update_bucket( ) return self._stubs["update_bucket"] + @property + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_bucket"] + + @property + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Returns: + Callable[[~.UndeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["undelete_bucket"] + + @property + def list_views( + self, + ) -> Callable[ + [logging_config.ListViewsRequest], Awaitable[logging_config.ListViewsResponse] + ]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a bucket. + + Returns: + Callable[[~.ListViewsRequest], + Awaitable[~.ListViewsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_views" not in self._stubs: + self._stubs["list_views"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs["list_views"] + + @property + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], Awaitable[logging_config.LogView]]: + r"""Return a callable for the get view method over gRPC. + + Gets a view. + + Returns: + Callable[[~.GetViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_view" not in self._stubs: + self._stubs["get_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["get_view"] + + @property + def create_view( + self, + ) -> Callable[ + [logging_config.CreateViewRequest], Awaitable[logging_config.LogView] + ]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Returns: + Callable[[~.CreateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_view" not in self._stubs: + self._stubs["create_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["create_view"] + + @property + def update_view( + self, + ) -> Callable[ + [logging_config.UpdateViewRequest], Awaitable[logging_config.LogView] + ]: + r"""Return a callable for the update view method over gRPC. + + Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Returns: + Callable[[~.UpdateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_view" not in self._stubs: + self._stubs["update_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs["update_view"] + + @property + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view from a bucket. + + Returns: + Callable[[~.DeleteViewRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_view"] + @property def list_sinks( self, diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 93dfbd71b..bd3c759a1 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -273,8 +273,8 @@ async def get_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -379,8 +379,8 @@ async def create_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -475,8 +475,8 @@ async def update_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index f4bca3926..d03ce86cd 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -452,8 +452,8 @@ def get_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -549,8 +549,8 @@ def create_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -646,8 +646,8 @@ def update_log_metric( value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to - extract values from logs and create a a + Logs-based metrics can also be used to + extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an diff --git a/google/cloud/logging_v2/types/__init__.py b/google/cloud/logging_v2/types/__init__.py index ab5f9c8c1..b24bf3b8c 100644 --- a/google/cloud/logging_v2/types/__init__.py +++ b/google/cloud/logging_v2/types/__init__.py @@ -22,12 +22,22 @@ ) from .logging_config import ( LogBucket, + LogView, LogSink, BigQueryOptions, ListBucketsRequest, ListBucketsResponse, + CreateBucketRequest, UpdateBucketRequest, GetBucketRequest, + DeleteBucketRequest, + UndeleteBucketRequest, + ListViewsRequest, + ListViewsResponse, + CreateViewRequest, + UpdateViewRequest, + GetViewRequest, + DeleteViewRequest, ListSinksRequest, ListSinksResponse, GetSinkRequest, @@ -73,12 +83,22 @@ "LogEntryOperation", "LogEntrySourceLocation", "LogBucket", + "LogView", "LogSink", "BigQueryOptions", "ListBucketsRequest", "ListBucketsResponse", + "CreateBucketRequest", "UpdateBucketRequest", "GetBucketRequest", + "DeleteBucketRequest", + "UndeleteBucketRequest", + "ListViewsRequest", + "ListViewsResponse", + "CreateViewRequest", + "UpdateViewRequest", + "GetViewRequest", + "DeleteViewRequest", "ListSinksRequest", "ListSinksResponse", "GetSinkRequest", diff --git a/google/cloud/logging_v2/types/logging_config.py b/google/cloud/logging_v2/types/logging_config.py index 2161d6872..aaf057acf 100644 --- a/google/cloud/logging_v2/types/logging_config.py +++ b/google/cloud/logging_v2/types/logging_config.py @@ -27,12 +27,22 @@ manifest={ "LifecycleState", "LogBucket", + "LogView", "LogSink", "BigQueryOptions", "ListBucketsRequest", "ListBucketsResponse", + "CreateBucketRequest", "UpdateBucketRequest", "GetBucketRequest", + "DeleteBucketRequest", + "UndeleteBucketRequest", + "ListViewsRequest", + "ListViewsResponse", + "CreateViewRequest", + "UpdateViewRequest", + "GetViewRequest", + "DeleteViewRequest", "ListSinksRequest", "ListSinksResponse", "GetSinkRequest", @@ -54,20 +64,20 @@ class LifecycleState(proto.Enum): - r"""LogBucket lifecycle states (Beta).""" + r"""LogBucket lifecycle states.""" LIFECYCLE_STATE_UNSPECIFIED = 0 ACTIVE = 1 DELETE_REQUESTED = 2 class LogBucket(proto.Message): - r"""Describes a repository of logs (Beta). + r"""Describes a repository of logs. Attributes: name (str): The resource name of the bucket. For example: "projects/my-project-id/locations/my-location/buckets/my-bucket-id - The supported locations are: "global" "us-central1" + The supported locations are: "global" For the location of ``global`` it is unspecified where logs are actually stored. Once a bucket has been created, the @@ -88,6 +98,11 @@ class LogBucket(proto.Message): period is 1 day. If this value is set to zero at bucket creation time, the default time of 30 days will be used. + locked (bool): + Whether the bucket has been locked. + The retention period on a locked bucket may not + be changed. Locked buckets may only be deleted + if they are empty. lifecycle_state (~.logging_config.LifecycleState): Output only. The bucket lifecycle state. """ @@ -102,9 +117,48 @@ class LogBucket(proto.Message): retention_days = proto.Field(proto.INT32, number=11) + locked = proto.Field(proto.BOOL, number=9) + lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) +class LogView(proto.Message): + r"""Describes a view over logs in a bucket. + + Attributes: + name (str): + The resource name of the view. + For example + "projects/my-project-id/locations/my- + location/buckets/my-bucket-id/views/my-view + description (str): + Describes this view. + create_time (~.timestamp.Timestamp): + Output only. The creation timestamp of the + view. + update_time (~.timestamp.Timestamp): + Output only. The last update timestamp of the + view. + filter (str): + Filter that restricts which log entries in a bucket are + visible in this view. Filters are restricted to be a logical + AND of ==/!= of any of the following: originating + project/folder/organization/billing account. resource type + log id Example: SOURCE("projects/myproject") AND + resource.type = "gce_instance" AND LOG_ID("stdout") + """ + + name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=3) + + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + filter = proto.Field(proto.STRING, number=7) + + class LogSink(proto.Message): r"""Describes a sink used to export log entries to one of the following destinations in any project: a Cloud Storage bucket, a @@ -152,10 +206,15 @@ class LogSink(proto.Message): disabled (bool): Optional. If set to True, then this sink is disabled and it does not export any log entries. + exclusions (Sequence[~.logging_config.LogExclusion]): + Optional. Log entries that match any of the exclusion + filters will not be exported. If a log entry is matched by + both ``filter`` and one of ``exclusion_filters`` it will not + be exported. output_version_format (~.logging_config.LogSink.VersionFormat): Deprecated. This field is unused. writer_identity (str): - Output only. An IAM identity–a service account or + Output only. An IAM identity—a service account or group—under which Logging writes the exported log entries to the sink's destination. This field is set by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] @@ -218,6 +277,8 @@ class VersionFormat(proto.Enum): disabled = proto.Field(proto.BOOL, number=19) + exclusions = proto.RepeatedField(proto.MESSAGE, number=16, message="LogExclusion",) + output_version_format = proto.Field(proto.ENUM, number=6, enum=VersionFormat,) writer_identity = proto.Field(proto.STRING, number=8) @@ -264,7 +325,7 @@ class BigQueryOptions(proto.Message): class ListBucketsRequest(proto.Message): - r"""The parameters to ``ListBuckets`` (Beta). + r"""The parameters to ``ListBuckets``. Attributes: parent (str): @@ -302,7 +363,7 @@ class ListBucketsRequest(proto.Message): class ListBucketsResponse(proto.Message): - r"""The response from ListBuckets (Beta). + r"""The response from ListBuckets. Attributes: buckets (Sequence[~.logging_config.LogBucket]): @@ -323,8 +384,39 @@ def raw_page(self): next_page_token = proto.Field(proto.STRING, number=2) +class CreateBucketRequest(proto.Message): + r"""The parameters to ``CreateBucket``. + + Attributes: + parent (str): + Required. The resource in which to create the bucket: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + + Example: ``"projects/my-logging-project/locations/global"`` + bucket_id (str): + Required. A client-assigned identifier such as + ``"my-bucket"``. Identifiers are limited to 100 characters + and can include only letters, digits, underscores, hyphens, + and periods. + bucket (~.logging_config.LogBucket): + Required. The new bucket. The region + specified in the new bucket must be compliant + with any Location Restriction Org Policy. The + name field in the bucket is ignored. + """ + + parent = proto.Field(proto.STRING, number=1) + + bucket_id = proto.Field(proto.STRING, number=2) + + bucket = proto.Field(proto.MESSAGE, number=3, message="LogBucket",) + + class UpdateBucketRequest(proto.Message): - r"""The parameters to ``UpdateBucket`` (Beta). + r"""The parameters to ``UpdateBucket``. Attributes: name (str): @@ -364,7 +456,7 @@ class UpdateBucketRequest(proto.Message): class GetBucketRequest(proto.Message): - r"""The parameters to ``GetBucket`` (Beta). + r"""The parameters to ``GetBucket``. Attributes: name (str): @@ -384,6 +476,196 @@ class GetBucketRequest(proto.Message): name = proto.Field(proto.STRING, number=1) +class DeleteBucketRequest(proto.Message): + r"""The parameters to ``DeleteBucket``. + + Attributes: + name (str): + Required. The full resource name of the bucket to delete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class UndeleteBucketRequest(proto.Message): + r"""The parameters to ``UndeleteBucket``. + + Attributes: + name (str): + Required. The full resource name of the bucket to undelete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListViewsRequest(proto.Message): + r"""The parameters to ``ListViews``. + + Attributes: + parent (str): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + +class ListViewsResponse(proto.Message): + r"""The response from ListViews. + + Attributes: + views (Sequence[~.logging_config.LogView]): + A list of views. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + views = proto.RepeatedField(proto.MESSAGE, number=1, message="LogView",) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class CreateViewRequest(proto.Message): + r"""The parameters to ``CreateView``. + + Attributes: + parent (str): + Required. The bucket in which to create the view + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` + view_id (str): + Required. The id to use for this view. + view (~.logging_config.LogView): + Required. The new view. + """ + + parent = proto.Field(proto.STRING, number=1) + + view_id = proto.Field(proto.STRING, number=2) + + view = proto.Field(proto.MESSAGE, number=3, message="LogView",) + + +class UpdateViewRequest(proto.Message): + r"""The parameters to ``UpdateView``. + + Attributes: + name (str): + Required. The full resource name of the view to update + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + view (~.logging_config.LogView): + Required. The updated view. + update_mask (~.field_mask.FieldMask): + Optional. Field mask that specifies the fields in ``view`` + that need an update. A field will be overwritten if, and + only if, it is in the update mask. ``name`` and output only + fields cannot be updated. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + """ + + name = proto.Field(proto.STRING, number=1) + + view = proto.Field(proto.MESSAGE, number=2, message="LogView",) + + update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + + +class GetViewRequest(proto.Message): + r"""The parameters to ``GetView``. + + Attributes: + name (str): + Required. The resource name of the policy: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class DeleteViewRequest(proto.Message): + r"""The parameters to ``DeleteView``. + + Attributes: + name (str): + Required. The full resource name of the view to delete: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + """ + + name = proto.Field(proto.STRING, number=1) + + class ListSinksRequest(proto.Message): r"""The parameters to ``ListSinks``. diff --git a/google/cloud/logging_v2/types/logging_metrics.py b/google/cloud/logging_v2/types/logging_metrics.py index 2f7c5b472..a9642d13b 100644 --- a/google/cloud/logging_v2/types/logging_metrics.py +++ b/google/cloud/logging_v2/types/logging_metrics.py @@ -41,8 +41,8 @@ class LogMetric(proto.Message): r"""Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a given time interval. - Logs-based metric can also be used to extract values from logs - and create a a distribution of the values. The distribution + Logs-based metrics can also be used to extract values from logs + and create a distribution of the values. The distribution records the statistics of the extracted values along with an optional histogram of the values as specified by the bucket options. diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index b90eef00f..2a0629078 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -148,6 +148,8 @@ def lint(session): "." ] session.run("flake8", *args) + + # # Black # diff --git a/synth.metadata b/synth.metadata index 05600e4ce..4e26d4877 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "4b0ad15b0ff483486ae90d73092e7be00f8c1848", - "internalRef": "341842584" + "sha": "be0bdf86cd31aa7c1a7b30a9a2e9f2fd53ee3d91", + "internalRef": "342353190" } }, { diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index 469684436..25e35e5c6 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -800,6 +800,7 @@ def test_get_bucket( name="name_value", description="description_value", retention_days=1512, + locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) @@ -821,16 +822,1163 @@ def test_get_bucket( assert response.retention_days == 1512 + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_get_bucket_from_dict(): + test_get_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + + response = await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_bucket_async_from_dict(): + await test_get_bucket_async(request_type=dict) + + +def test_get_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_bucket( + transport: str = "grpc", request_type=logging_config.CreateBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + + response = client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_create_bucket_from_dict(): + test_create_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + + response = await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_get_bucket_from_dict(): - test_get_bucket(request_type=dict) +@pytest.mark.asyncio +async def test_create_bucket_async_from_dict(): + await test_create_bucket_async(request_type=dict) + + +def test_create_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_update_bucket( + transport: str = "grpc", request_type=logging_config.UpdateBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + + response = client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_update_bucket_from_dict(): + test_update_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + + response = await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.retention_days == 1512 + + assert response.locked is True + + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_update_bucket_async_from_dict(): + await test_update_bucket_async(request_type=dict) + + +def test_update_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_bucket( + transport: str = "grpc", request_type=logging_config.DeleteBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_bucket_from_dict(): + test_delete_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_bucket_async_from_dict(): + await test_delete_bucket_async(request_type=dict) + + +def test_delete_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = None + + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_undelete_bucket( + transport: str = "grpc", request_type=logging_config.UndeleteBucketRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UndeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_undelete_bucket_from_dict(): + test_undelete_bucket(request_type=dict) + + +@pytest.mark.asyncio +async def test_undelete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.UndeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_undelete_bucket_async_from_dict(): + await test_undelete_bucket_async(request_type=dict) + + +def test_undelete_bucket_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = None + + client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_undelete_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_list_views( + transport: str = "grpc", request_type=logging_config.ListViewsRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListViewsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListViewsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_views_from_dict(): + test_list_views(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_views_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.ListViewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_views_async_from_dict(): + await test_list_views_async(request_type=dict) + + +def test_list_views_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = logging_config.ListViewsResponse() + + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_views_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse() + ) + + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_views_flattened(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_views(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_views_flattened_error(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + logging_config.ListViewsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_views_flattened_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_views(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_views_flattened_error_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_views( + logging_config.ListViewsRequest(), parent="parent_value", + ) + + +def test_list_views_pager(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse(views=[], next_page_token="def",), + logging_config.ListViewsResponse( + views=[logging_config.LogView(),], next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[logging_config.LogView(), logging_config.LogView(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_views(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogView) for i in results) + + +def test_list_views_pages(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse(views=[], next_page_token="def",), + logging_config.ListViewsResponse( + views=[logging_config.LogView(),], next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[logging_config.LogView(), logging_config.LogView(),], + ), + RuntimeError, + ) + pages = list(client.list_views(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_views_async_pager(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse(views=[], next_page_token="def",), + logging_config.ListViewsResponse( + views=[logging_config.LogView(),], next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[logging_config.LogView(), logging_config.LogView(),], + ), + RuntimeError, + ) + async_pager = await client.list_views(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogView) for i in responses) + + +@pytest.mark.asyncio +async def test_list_views_async_pages(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse(views=[], next_page_token="def",), + logging_config.ListViewsResponse( + views=[logging_config.LogView(),], next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[logging_config.LogView(), logging_config.LogView(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_views(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRequest): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name="name_value", description="description_value", filter="filter_value", + ) + + response = client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetViewRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogView) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + +def test_get_view_from_dict(): + test_get_view(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + + response = await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.GetViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + +@pytest.mark.asyncio +async def test_get_view_async_from_dict(): + await test_get_view_async(request_type=dict) + + +def test_get_view_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = logging_config.LogView() + + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio -async def test_get_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest +async def test_get_view_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) + + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_view( + transport: str = "grpc", request_type=logging_config.CreateViewRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name="name_value", description="description_value", filter="filter_value", + ) + + response = client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.CreateViewRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, logging_config.LogView) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.filter == "filter_value" + + +def test_create_view_from_dict(): + test_create_view(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -841,55 +1989,52 @@ async def test_get_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( + logging_config.LogView( name="name_value", description="description_value", - retention_days=1512, - lifecycle_state=logging_config.LifecycleState.ACTIVE, + filter="filter_value", ) ) - response = await client.get_bucket(request) + response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) + assert isinstance(response, logging_config.LogView) assert response.name == "name_value" assert response.description == "description_value" - assert response.retention_days == 1512 - - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.filter == "filter_value" @pytest.mark.asyncio -async def test_get_bucket_async_from_dict(): - await test_get_bucket_async(request_type=dict) +async def test_create_view_async_from_dict(): + await test_create_view_async(request_type=dict) -def test_get_bucket_field_headers(): +def test_create_view_field_headers(): client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetBucketRequest() - request.name = "name/value" + request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: - call.return_value = logging_config.LogBucket() + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = logging_config.LogView() - client.get_bucket(request) + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -898,25 +2043,25 @@ def test_get_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio -async def test_get_bucket_field_headers_async(): +async def test_create_view_field_headers_async(): client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetBucketRequest() - request.name = "name/value" + request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket() + logging_config.LogView() ) - await client.get_bucket(request) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -925,11 +2070,11 @@ async def test_get_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] -def test_update_bucket( - transport: str = "grpc", request_type=logging_config.UpdateBucketRequest +def test_update_view( + transport: str = "grpc", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2Client( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -940,43 +2085,38 @@ def test_update_bucket( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - lifecycle_state=logging_config.LifecycleState.ACTIVE, + call.return_value = logging_config.LogView( + name="name_value", description="description_value", filter="filter_value", ) - response = client.update_bucket(request) + response = client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) + assert isinstance(response, logging_config.LogView) assert response.name == "name_value" assert response.description == "description_value" - assert response.retention_days == 1512 - - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.filter == "filter_value" -def test_update_bucket_from_dict(): - test_update_bucket(request_type=dict) +def test_update_view_from_dict(): + test_update_view(request_type=dict) @pytest.mark.asyncio -async def test_update_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +async def test_update_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2AsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -987,55 +2127,52 @@ async def test_update_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( + logging_config.LogView( name="name_value", description="description_value", - retention_days=1512, - lifecycle_state=logging_config.LifecycleState.ACTIVE, + filter="filter_value", ) ) - response = await client.update_bucket(request) + response = await client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) + assert isinstance(response, logging_config.LogView) assert response.name == "name_value" assert response.description == "description_value" - assert response.retention_days == 1512 - - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.filter == "filter_value" @pytest.mark.asyncio -async def test_update_bucket_async_from_dict(): - await test_update_bucket_async(request_type=dict) +async def test_update_view_async_from_dict(): + await test_update_view_async(request_type=dict) -def test_update_bucket_field_headers(): +def test_update_view_field_headers(): client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateBucketRequest() + request = logging_config.UpdateViewRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: - call.return_value = logging_config.LogBucket() + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = logging_config.LogView() - client.update_bucket(request) + client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1048,21 +2185,136 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio -async def test_update_bucket_field_headers_async(): +async def test_update_view_field_headers_async(): client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateBucketRequest() + request = logging_config.UpdateViewRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket() + logging_config.LogView() ) - await client.update_bucket(request) + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_view( + transport: str = "grpc", request_type=logging_config.DeleteViewRequest +): + client = ConfigServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteViewRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_view_from_dict(): + test_delete_view(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest +): + client = ConfigServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == logging_config.DeleteViewRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_view_async_from_dict(): + await test_delete_view_async(request_type=dict) + + +def test_delete_view_field_headers(): + client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = None + + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_view_field_headers_async(): + client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3941,7 +5193,15 @@ def test_config_service_v2_base_transport(): methods = ( "list_buckets", "get_bucket", + "create_bucket", "update_bucket", + "delete_bucket", + "undelete_bucket", + "list_views", + "get_view", + "create_view", + "update_view", + "delete_view", "list_sinks", "get_sink", "create_sink", @@ -4260,8 +5520,35 @@ def test_parse_log_sink_path(): assert expected == actual +def test_log_view_path(): + project = "oyster" + location = "nudibranch" + bucket = "cuttlefish" + view = "mussel" + + expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, location=location, bucket=bucket, view=view, + ) + actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) + assert expected == actual + + +def test_parse_log_view_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "bucket": "scallop", + "view": "abalone", + } + path = ConfigServiceV2Client.log_view_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_view_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -4272,7 +5559,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "clam", } path = ConfigServiceV2Client.common_billing_account_path(**expected) @@ -4282,7 +5569,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "whelk" expected = "folders/{folder}".format(folder=folder,) actual = ConfigServiceV2Client.common_folder_path(folder) @@ -4291,7 +5578,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "octopus", } path = ConfigServiceV2Client.common_folder_path(**expected) @@ -4301,7 +5588,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "oyster" expected = "organizations/{organization}".format(organization=organization,) actual = ConfigServiceV2Client.common_organization_path(organization) @@ -4310,7 +5597,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "nudibranch", } path = ConfigServiceV2Client.common_organization_path(**expected) @@ -4320,7 +5607,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "cuttlefish" expected = "projects/{project}".format(project=project,) actual = ConfigServiceV2Client.common_project_path(project) @@ -4329,7 +5616,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "mussel", } path = ConfigServiceV2Client.common_project_path(**expected) @@ -4339,8 +5626,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -4351,8 +5638,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "scallop", + "location": "abalone", } path = ConfigServiceV2Client.common_location_path(**expected) From 021cfbb83ca00a14eba8fb770905208e060cdb0f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 4 Dec 2020 21:07:49 +0100 Subject: [PATCH 2/9] chore(deps): update dependency google-cloud-logging to v2.0.1 (#109) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 379ac65b1..bdb659d04 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-logging==2.0.0 +google-cloud-logging==2.0.1 From e704f287a40db38d0da42fa5e21e7a9ef73922ec Mon Sep 17 00:00:00 2001 From: Brady Kieffer Date: Mon, 7 Dec 2020 18:18:33 -0500 Subject: [PATCH 3/9] fix: Remove keyword only argument for RequestsMiddleware (#113) * fix: Remove keyword only argument for RequestsMiddleware Remove keyword only arguments from request middleware. This causes django to fail when attempting to load middleware. Django currently only supports handlers being passed in as args. * Test that we can instantiate middleware with or without kwargs * Make get_response a required parameter in RequestMiddleware --- .../cloud/logging_v2/handlers/middleware/request.py | 2 +- tests/unit/handlers/middleware/test_request.py | 13 +++++++++++++ tests/unit/handlers/test__helpers.py | 4 ++-- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/google/cloud/logging_v2/handlers/middleware/request.py b/google/cloud/logging_v2/handlers/middleware/request.py index da361b967..1804947ec 100644 --- a/google/cloud/logging_v2/handlers/middleware/request.py +++ b/google/cloud/logging_v2/handlers/middleware/request.py @@ -42,7 +42,7 @@ def _get_django_request(): class RequestMiddleware(MiddlewareMixin): """Saves the request in thread local""" - def __init__(self, *, get_response=None): + def __init__(self, get_response): self.get_response = get_response def process_request(self, request): diff --git a/tests/unit/handlers/middleware/test_request.py b/tests/unit/handlers/middleware/test_request.py index 16d3f9ba2..d0e3daf24 100644 --- a/tests/unit/handlers/middleware/test_request.py +++ b/tests/unit/handlers/middleware/test_request.py @@ -41,6 +41,9 @@ def _get_target_class(self): return request.RequestMiddleware def _make_one(self, *args, **kw): + if not args and "get_response" not in kw: + kw["get_response"] = None + return self._get_target_class()(*args, **kw) def test_process_request(self): @@ -54,6 +57,16 @@ def test_process_request(self): django_request = request._get_django_request() self.assertEqual(django_request, mock_request) + def test_can_instantiate_middleware_without_kwargs(self): + handler = mock.Mock() + middleware = self._make_one(handler) + self.assertEqual(middleware.get_response, handler) + + def test_can_instantiate_middleware_with_kwargs(self): + handler = mock.Mock() + middleware = self._make_one(get_response=handler) + self.assertEqual(middleware.get_response, handler) + class Test__get_django_request(DjangoBase): @staticmethod diff --git a/tests/unit/handlers/test__helpers.py b/tests/unit/handlers/test__helpers.py index 0cd3b30d8..1fbf6c860 100644 --- a/tests/unit/handlers/test__helpers.py +++ b/tests/unit/handlers/test__helpers.py @@ -87,7 +87,7 @@ def test_no_context_header(self): django_request = RequestFactory().get("/") - middleware = request.RequestMiddleware() + middleware = request.RequestMiddleware(None) middleware.process_request(django_request) trace_id = self._call_fut() self.assertIsNone(trace_id) @@ -104,7 +104,7 @@ def test_valid_context_header(self): "/", **{django_trace_header: django_trace_id} ) - middleware = request.RequestMiddleware() + middleware = request.RequestMiddleware(None) middleware.process_request(django_request) trace_id = self._call_fut() From 6843a3aee3c0908ddbc493e7a9ecdddd01df34ef Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 8 Dec 2020 11:36:43 -0800 Subject: [PATCH 4/9] fix: Add submodule imports for handlers to logging alias (#117) --- google/cloud/logging/handlers/__init__.py | 27 ++++++++++++ .../logging/handlers/middleware/__init__.py | 17 ++++++++ .../logging/handlers/transports/__init__.py | 28 +++++++++++++ tests/unit/test_logging_shim.py | 41 ++++++++++++++++++- 4 files changed, 112 insertions(+), 1 deletion(-) create mode 100644 google/cloud/logging/handlers/__init__.py create mode 100644 google/cloud/logging/handlers/middleware/__init__.py create mode 100644 google/cloud/logging/handlers/transports/__init__.py diff --git a/google/cloud/logging/handlers/__init__.py b/google/cloud/logging/handlers/__init__.py new file mode 100644 index 000000000..29ed8f0d1 --- /dev/null +++ b/google/cloud/logging/handlers/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python :mod:`logging` handlers for Google Cloud Logging.""" + +from google.cloud.logging_v2.handlers.app_engine import AppEngineHandler +from google.cloud.logging_v2.handlers.container_engine import ContainerEngineHandler +from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers.handlers import setup_logging + +__all__ = [ + "AppEngineHandler", + "CloudLoggingHandler", + "ContainerEngineHandler", + "setup_logging", +] diff --git a/google/cloud/logging/handlers/middleware/__init__.py b/google/cloud/logging/handlers/middleware/__init__.py new file mode 100644 index 000000000..bd32e4a90 --- /dev/null +++ b/google/cloud/logging/handlers/middleware/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2017 Google LLC All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.logging_v2.handlers.middleware.request import RequestMiddleware + +__all__ = ["RequestMiddleware"] diff --git a/google/cloud/logging/handlers/transports/__init__.py b/google/cloud/logging/handlers/transports/__init__.py new file mode 100644 index 000000000..5a64caa07 --- /dev/null +++ b/google/cloud/logging/handlers/transports/__init__.py @@ -0,0 +1,28 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Transport classes for Python logging integration. +Currently two options are provided, a synchronous transport that makes +an API call for each log statement, and an asynchronous handler that +sends the API using a :class:`~google.cloud.logging.logger.Batch` object in +the background. +""" + +from google.cloud.logging_v2.handlers.transports.base import Transport +from google.cloud.logging_v2.handlers.transports.sync import SyncTransport +from google.cloud.logging_v2.handlers.transports.background_thread import ( + BackgroundThreadTransport, +) + +__all__ = ["BackgroundThreadTransport", "SyncTransport", "Transport"] diff --git a/tests/unit/test_logging_shim.py b/tests/unit/test_logging_shim.py index 507b7c635..ae09c37fa 100644 --- a/tests/unit/test_logging_shim.py +++ b/tests/unit/test_logging_shim.py @@ -17,7 +17,7 @@ class TestLoggingShim(unittest.TestCase): - def test_shim_matches_logging_v2(self): + def test_root_shim_matches_logging_v2(self): from google.cloud import logging from google.cloud import logging_v2 @@ -26,4 +26,43 @@ def test_shim_matches_logging_v2(self): for name in logging.__all__: found = getattr(logging, name) expected = getattr(logging_v2, name) + if name == "handlers": + # handler has separate shim + self.assertTrue(found) + self.assertIs(type(found), type(expected)) + else: + # other attributes should be identical + self.assertIs(found, expected) + + def test_handler_shim_matches_logging_v2(self): + from google.cloud.logging import handlers + from google.cloud.logging_v2 import handlers as handlers_2 + + self.assertEqual(handlers.__all__, handlers_2.__all__) + + for name in handlers.__all__: + found = getattr(handlers, name) + expected = getattr(handlers_2, name) + self.assertIs(found, expected) + + def test_middleware_shim_matches_logging_v2(self): + from google.cloud.logging.handlers import middleware + from google.cloud.logging_v2.handlers import middleware as middleware_2 + + self.assertEqual(middleware.__all__, middleware_2.__all__) + + for name in middleware.__all__: + found = getattr(middleware, name) + expected = getattr(middleware_2, name) + self.assertIs(found, expected) + + def test_transports_shim_matches_logging_v2(self): + from google.cloud.logging.handlers import transports + from google.cloud.logging_v2.handlers import transports as transports_2 + + self.assertEqual(transports.__all__, transports_2.__all__) + + for name in transports.__all__: + found = getattr(transports, name) + expected = getattr(transports_2, name) self.assertIs(found, expected) From 7eaa5853f3a45e3db015a09841b98aeab461e6f3 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 8 Dec 2020 13:02:15 -0800 Subject: [PATCH 5/9] test: use logging API in unit tests when possible (#118) --- tests/unit/handlers/test_app_engine.py | 2 +- tests/unit/handlers/test_container_engine.py | 4 +- tests/unit/handlers/test_handlers.py | 6 +- .../transports/test_background_thread.py | 4 +- tests/unit/handlers/transports/test_base.py | 2 +- tests/unit/handlers/transports/test_sync.py | 2 +- tests/unit/test__gapic.py | 10 +-- tests/unit/test__http.py | 24 +++---- tests/unit/test_client.py | 52 +++++++-------- tests/unit/test_entries.py | 20 +++--- tests/unit/test_logger.py | 64 +++++++++---------- tests/unit/test_metric.py | 2 +- tests/unit/test_sink.py | 2 +- 13 files changed, 95 insertions(+), 99 deletions(-) diff --git a/tests/unit/handlers/test_app_engine.py b/tests/unit/handlers/test_app_engine.py index 2a80e79b1..ea16e3c85 100644 --- a/tests/unit/handlers/test_app_engine.py +++ b/tests/unit/handlers/test_app_engine.py @@ -22,7 +22,7 @@ class TestAppEngineHandler(unittest.TestCase): PROJECT = "PROJECT" def _get_target_class(self): - from google.cloud.logging_v2.handlers.app_engine import AppEngineHandler + from google.cloud.logging.handlers import AppEngineHandler return AppEngineHandler diff --git a/tests/unit/handlers/test_container_engine.py b/tests/unit/handlers/test_container_engine.py index c5d6df65f..d2ae83881 100644 --- a/tests/unit/handlers/test_container_engine.py +++ b/tests/unit/handlers/test_container_engine.py @@ -19,9 +19,7 @@ class TestContainerEngineHandler(unittest.TestCase): PROJECT = "PROJECT" def _get_target_class(self): - from google.cloud.logging_v2.handlers.container_engine import ( - ContainerEngineHandler, - ) + from google.cloud.logging.handlers import ContainerEngineHandler return ContainerEngineHandler diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index 1c5492e1a..e967b2015 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -22,7 +22,7 @@ class TestCloudLoggingHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler + from google.cloud.logging.handlers import CloudLoggingHandler return CloudLoggingHandler @@ -47,7 +47,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): import io - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource resource = Resource("resource_type", {"resource_label": "value"}) labels = {"handler_lable": "value"} @@ -91,7 +91,7 @@ def test_emit(self): class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): - from google.cloud.logging_v2.handlers.handlers import setup_logging + from google.cloud.logging.handlers import setup_logging if excludes: return setup_logging(handler, excluded_loggers=excludes) diff --git a/tests/unit/handlers/transports/test_background_thread.py b/tests/unit/handlers/transports/test_background_thread.py index 71d868d86..e9626a759 100644 --- a/tests/unit/handlers/transports/test_background_thread.py +++ b/tests/unit/handlers/transports/test_background_thread.py @@ -25,9 +25,7 @@ class TestBackgroundThreadHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.handlers.transports import ( - BackgroundThreadTransport, - ) + from google.cloud.logging.handlers.transports import BackgroundThreadTransport return BackgroundThreadTransport diff --git a/tests/unit/handlers/transports/test_base.py b/tests/unit/handlers/transports/test_base.py index bff253f94..4cbfab02e 100644 --- a/tests/unit/handlers/transports/test_base.py +++ b/tests/unit/handlers/transports/test_base.py @@ -21,7 +21,7 @@ class TestBaseHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.handlers.transports import Transport + from google.cloud.logging.handlers.transports import Transport return Transport diff --git a/tests/unit/handlers/transports/test_sync.py b/tests/unit/handlers/transports/test_sync.py index 7bc2cd46f..0ee6db229 100644 --- a/tests/unit/handlers/transports/test_sync.py +++ b/tests/unit/handlers/transports/test_sync.py @@ -22,7 +22,7 @@ class TestSyncHandler(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.handlers.transports import SyncTransport + from google.cloud.logging.handlers.transports import SyncTransport return SyncTransport diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py index 75aa20d46..5da1c7122 100644 --- a/tests/unit/test__gapic.py +++ b/tests/unit/test__gapic.py @@ -17,7 +17,7 @@ import google.auth.credentials import mock -import google.cloud.logging_v2 +import google.cloud.logging from google.cloud import logging_v2 from google.cloud.logging_v2 import _gapic from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client @@ -91,7 +91,7 @@ def test_list_entries_with_options(self): result = client.list_entries( [PROJECT_PATH], filter_=FILTER, - order_by=google.cloud.logging_v2.ASCENDING, + order_by=google.cloud.logging.ASCENDING, page_size=42, page_token="token", ) @@ -103,7 +103,7 @@ def test_list_entries_with_options(self): request = call.call_args.args[0] assert request.resource_names == [PROJECT_PATH] assert request.filter == FILTER - assert request.order_by == google.cloud.logging_v2.ASCENDING + assert request.order_by == google.cloud.logging.ASCENDING assert request.page_size == 42 assert request.page_token == "token" @@ -179,7 +179,7 @@ def test_list_sinks(self): # Check the response assert len(sinks) == 1 sink = sinks[0] - assert isinstance(sink, google.cloud.logging_v2.sink.Sink) + assert isinstance(sink, google.cloud.logging.Sink) assert sink.name == self.SINK_NAME assert sink.destination == self.DESTINATION_URI assert sink.filter_ == FILTER @@ -351,7 +351,7 @@ def test_list_metrics(self): # Check the response assert len(metrics) == 1 metric = metrics[0] - assert isinstance(metric, google.cloud.logging_v2.metric.Metric) + assert isinstance(metric, google.cloud.logging.Metric) assert metric.name == self.METRIC_PATH assert metric.description == self.DESCRIPTION assert metric.filter_ == FILTER diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index 0cf8dcfdd..e927f6c15 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -130,9 +130,9 @@ def _make_timestamp(): return NOW, _datetime_to_rfc3339_w_nanos(NOW) def test_list_entries_no_paging(self): - from google.cloud.logging_v2.client import Client - from google.cloud.logging_v2.entries import TextEntry - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Client + from google.cloud.logging import TextEntry + from google.cloud.logging import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" @@ -184,11 +184,11 @@ def test_list_entries_no_paging(self): ) def test_list_entries_w_paging(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.client import Client - from google.cloud.logging_v2.logger import Logger - from google.cloud.logging_v2.entries import ProtobufEntry - from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging import DESCENDING + from google.cloud.logging import Client + from google.cloud.logging import Logger + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry PROJECT1 = "PROJECT1" PROJECT1_PATH = f"projects/{PROJECT1}" @@ -362,7 +362,7 @@ def test_ctor(self): self.assertEqual(api.api_request, connection.api_request) def test_list_sinks_no_paging(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink TOKEN = "TOKEN" RETURNED = { @@ -402,7 +402,7 @@ def test_list_sinks_no_paging(self): ) def test_list_sinks_w_paging(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink TOKEN = "TOKEN" PAGE_SIZE = 42 @@ -633,7 +633,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_list_metrics_no_paging(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric TOKEN = "TOKEN" RETURNED = { @@ -667,7 +667,7 @@ def test_list_metrics_no_paging(self): ) def test_list_metrics_w_paging(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric TOKEN = "TOKEN" PAGE_SIZE = 42 diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 29934c389..8083e3c56 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -43,7 +43,7 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.client import Client + from google.cloud.logging import Client return Client @@ -238,7 +238,7 @@ def make_api(client_obj): self.assertIs(again, api) def test_logger(self): - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Logger creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -249,7 +249,7 @@ def test_logger(self): self.assertEqual(logger.project, self.PROJECT) def test_list_entries_defaults(self): - from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging import TextEntry IID = "IID" TEXT = "TEXT" @@ -308,10 +308,10 @@ def test_list_entries_defaults(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.entries import ProtobufEntry - from google.cloud.logging_v2.entries import StructEntry - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import DESCENDING + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import Logger PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -404,10 +404,10 @@ def test_list_entries_explicit(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit_timestamp(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.entries import ProtobufEntry - from google.cloud.logging_v2.entries import StructEntry - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import DESCENDING + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import Logger PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -492,7 +492,7 @@ def test_list_entries_explicit_timestamp(self): ) def test_sink_defaults(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -505,7 +505,7 @@ def test_sink_defaults(self): self.assertEqual(sink.parent, self.PROJECT_PATH) def test_sink_explicit(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -520,7 +520,7 @@ def test_sink_explicit(self): self.assertEqual(sink.parent, self.PROJECT_PATH) def test_list_sinks_no_paging(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink PROJECT = "PROJECT" TOKEN = "TOKEN" @@ -559,7 +559,7 @@ def test_list_sinks_no_paging(self): ) def test_list_sinks_with_paging(self): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink PROJECT = "PROJECT" SINK_NAME = "sink_name" @@ -603,7 +603,7 @@ def test_list_sinks_with_paging(self): ) def test_metric_defaults(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric creds = _make_credentials() @@ -617,7 +617,7 @@ def test_metric_defaults(self): self.assertEqual(metric.project, self.PROJECT) def test_metric_explicit(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric creds = _make_credentials() @@ -633,7 +633,7 @@ def test_metric_explicit(self): self.assertEqual(metric.project, self.PROJECT) def test_list_metrics_no_paging(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric metrics = [ { @@ -669,7 +669,7 @@ def test_list_metrics_no_paging(self): ) def test_list_metrics_with_paging(self): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric token = "TOKEN" next_token = "T00KEN" @@ -719,7 +719,7 @@ def test_get_default_handler_app_engine(self): import os from google.cloud._testing import _Monkey from google.cloud.logging_v2.client import _APPENGINE_FLEXIBLE_ENV_VM - from google.cloud.logging_v2.handlers import AppEngineHandler + from google.cloud.logging.handlers import AppEngineHandler credentials = _make_credentials() client = self._make_one( @@ -734,7 +734,7 @@ def test_get_default_handler_app_engine(self): self.assertIsInstance(handler, AppEngineHandler) def test_get_default_handler_container_engine(self): - from google.cloud.logging_v2.handlers import ContainerEngineHandler + from google.cloud.logging.handlers import ContainerEngineHandler credentials = _make_credentials() client = self._make_one( @@ -753,8 +753,8 @@ def test_get_default_handler_container_engine(self): def test_get_default_handler_general(self): import io - from google.cloud.logging_v2.handlers import CloudLoggingHandler - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) @@ -778,7 +778,7 @@ def test_get_default_handler_general(self): self.assertEqual(handler.labels, labels) def test_setup_logging(self): - from google.cloud.logging_v2.handlers import CloudLoggingHandler + from google.cloud.logging.handlers import CloudLoggingHandler credentials = _make_credentials() client = self._make_one( @@ -804,8 +804,8 @@ def test_setup_logging(self): def test_setup_logging_w_extra_kwargs(self): import io - from google.cloud.logging_v2.handlers import CloudLoggingHandler - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging import Resource name = "test-logger" resource = Resource("resource_type", {"resource_label": "value"}) diff --git a/tests/unit/test_entries.py b/tests/unit/test_entries.py index 5b7763f45..ef90b8159 100644 --- a/tests/unit/test_entries.py +++ b/tests/unit/test_entries.py @@ -61,7 +61,7 @@ class TestLogEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.entries import LogEntry + from google.cloud.logging import LogEntry return LogEntry @@ -90,7 +90,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) IID = "IID" @@ -178,7 +178,7 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime from google.cloud._helpers import UTC - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource klass = self._get_target_class() client = _Client(self.PROJECT) @@ -332,7 +332,7 @@ def test_to_api_repr_w_source_location_no_line(self): def test_to_api_repr_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource from google.cloud._helpers import _datetime_to_rfc3339 LOG_NAME = "test.log" @@ -395,7 +395,7 @@ class TestTextEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging import TextEntry return TextEntry @@ -417,7 +417,7 @@ def test_to_api_repr_defaults(self): def test_to_api_repr_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource from google.cloud._helpers import _datetime_to_rfc3339 LOG_NAME = "test.log" @@ -483,7 +483,7 @@ class TestStructEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging import StructEntry return StructEntry @@ -505,7 +505,7 @@ def test_to_api_repr_defaults(self): def test_to_api_repr_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource from google.cloud._helpers import _datetime_to_rfc3339 LOG_NAME = "test.log" @@ -571,7 +571,7 @@ class TestProtobufEntry(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging import ProtobufEntry return ProtobufEntry @@ -652,7 +652,7 @@ def test_to_api_repr_proto_defaults(self): def test_to_api_repr_proto_explicit(self): import datetime from google.protobuf.json_format import MessageToDict - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource from google.cloud._helpers import _datetime_to_rfc3339 from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 853bcce22..869330633 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -36,7 +36,7 @@ class TestLogger(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Logger return Logger @@ -75,7 +75,7 @@ def test_ctor_explicit(self): self.assertEqual(logger.labels, LABELS) def test_batch_w_bound_client(self): - from google.cloud.logging_v2.logger import Batch + from google.cloud.logging import Batch conn = object() client = _Client(self.PROJECT, conn) @@ -86,7 +86,7 @@ def test_batch_w_bound_client(self): self.assertIs(batch.client, client) def test_batch_w_alternate_client(self): - from google.cloud.logging_v2.logger import Batch + from google.cloud.logging import Batch conn1 = object() conn2 = object() @@ -117,7 +117,7 @@ def test_log_empty_defaults_w_default_labels(self): def test_log_empty_w_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" DEFAULT_LABELS = {"foo": "spam"} @@ -207,7 +207,7 @@ def test_log_text_w_unicode_and_default_labels(self): def test_log_text_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" TEXT = "TEXT" @@ -300,7 +300,7 @@ def test_log_struct_w_default_labels(self): def test_log_struct_w_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource ALT_LOG_NAME = "projects/foo/logs/alt.log.name" STRUCT = {"message": "MESSAGE", "weather": "cloudy"} @@ -405,7 +405,7 @@ def test_log_proto_w_explicit(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource message = Struct(fields={"foo": Value(bool_value=True)}) ALT_LOG_NAME = "projects/foo/logs/alt.log.name" @@ -486,7 +486,7 @@ def test_delete_w_alternate_client(self): ) def test_list_entries_defaults(self): - from google.cloud.logging_v2.client import Client + from google.cloud.logging import Client TOKEN = "TOKEN" @@ -530,8 +530,8 @@ def test_list_entries_defaults(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.client import Client + from google.cloud.logging import DESCENDING + from google.cloud.logging import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -590,8 +590,8 @@ def test_list_entries_explicit(self): self.assertLess(yesterday - timestamp, timedelta(minutes=1)) def test_list_entries_explicit_timestamp(self): - from google.cloud.logging_v2 import DESCENDING - from google.cloud.logging_v2.client import Client + from google.cloud.logging import DESCENDING + from google.cloud.logging import Client PROJECT1 = "PROJECT1" PROJECT2 = "PROJECT2" @@ -641,7 +641,7 @@ class TestBatch(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.logger import Batch + from google.cloud.logging import Batch return Batch @@ -657,7 +657,7 @@ def test_ctor_defaults(self): self.assertEqual(len(batch.entries), 0) def test_log_empty_defaults(self): - from google.cloud.logging_v2.entries import LogEntry + from google.cloud.logging import LogEntry ENTRY = LogEntry() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -668,8 +668,8 @@ def test_log_empty_defaults(self): def test_log_empty_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource - from google.cloud.logging_v2.entries import LogEntry + from google.cloud.logging import Resource + from google.cloud.logging import LogEntry LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" @@ -714,7 +714,7 @@ def test_log_empty_explicit(self): def test_log_text_defaults(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging import TextEntry TEXT = "This is the entry text" ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) @@ -726,8 +726,8 @@ def test_log_text_defaults(self): def test_log_text_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource - from google.cloud.logging_v2.entries import TextEntry + from google.cloud.logging import Resource + from google.cloud.logging import TextEntry TEXT = "This is the entry text" LABELS = {"foo": "bar", "baz": "qux"} @@ -775,7 +775,7 @@ def test_log_text_explicit(self): def test_log_struct_defaults(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging import StructEntry STRUCT = {"message": "Message text", "weather": "partly cloudy"} ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) @@ -787,8 +787,8 @@ def test_log_struct_defaults(self): def test_log_struct_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource - from google.cloud.logging_v2.entries import StructEntry + from google.cloud.logging import Resource + from google.cloud.logging import StructEntry STRUCT = {"message": "Message text", "weather": "partly cloudy"} LABELS = {"foo": "bar", "baz": "qux"} @@ -836,7 +836,7 @@ def test_log_struct_explicit(self): def test_log_proto_defaults(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -850,8 +850,8 @@ def test_log_proto_defaults(self): def test_log_proto_explicit(self): import datetime - from google.cloud.logging_v2.resource import Resource - from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging import Resource + from google.cloud.logging import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value @@ -900,7 +900,7 @@ def test_log_proto_explicit(self): def test_commit_w_unknown_entry_type(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.entries import LogEntry + from google.cloud.logging import LogEntry logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -918,7 +918,7 @@ def test_commit_w_unknown_entry_type(self): def test_commit_w_resource_specified(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE - from google.cloud.logging_v2.resource import Resource + from google.cloud.logging import Resource logger = _Logger() client = _Client(project=self.PROJECT, connection=_make_credentials()) @@ -1035,7 +1035,7 @@ def test_commit_w_alternate_client(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Logger from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" @@ -1087,7 +1087,7 @@ def test_context_mgr_success(self): from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging_v2.logger import Logger + from google.cloud.logging import Logger from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" @@ -1137,9 +1137,9 @@ def test_context_mgr_failure(self): import datetime from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value - from google.cloud.logging_v2.entries import TextEntry - from google.cloud.logging_v2.entries import StructEntry - from google.cloud.logging_v2.entries import ProtobufEntry + from google.cloud.logging import TextEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import ProtobufEntry TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} diff --git a/tests/unit/test_metric.py b/tests/unit/test_metric.py index a71fd763f..83b49d02d 100644 --- a/tests/unit/test_metric.py +++ b/tests/unit/test_metric.py @@ -25,7 +25,7 @@ class TestMetric(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.metric import Metric + from google.cloud.logging import Metric return Metric diff --git a/tests/unit/test_sink.py b/tests/unit/test_sink.py index cac604058..1e4852ab5 100644 --- a/tests/unit/test_sink.py +++ b/tests/unit/test_sink.py @@ -27,7 +27,7 @@ class TestSink(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.logging_v2.sink import Sink + from google.cloud.logging import Sink return Sink From ca3d75114da0342d99a47d05241f967c227a94af Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 11 Dec 2020 16:50:27 -0800 Subject: [PATCH 6/9] chore: Re-generated to pick up changes from synthtool. (#127) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: add config / docs for 'pre-commit' support Source-Author: Tres Seaver Source-Date: Tue Dec 1 16:01:20 2020 -0500 Source-Repo: googleapis/synthtool Source-Sha: 32af6da519a6b042e3da62008e2a75e991efb6b4 Source-Link: https://github.com/googleapis/synthtool/commit/32af6da519a6b042e3da62008e2a75e991efb6b4 * chore(deps): update precommit hook pre-commit/pre-commit-hooks to v3.3.0 Source-Author: WhiteSource Renovate Source-Date: Wed Dec 2 17:18:24 2020 +0100 Source-Repo: googleapis/synthtool Source-Sha: 69629b64b83c6421d616be2b8e11795738ec8a6c Source-Link: https://github.com/googleapis/synthtool/commit/69629b64b83c6421d616be2b8e11795738ec8a6c * test(python): give filesystem paths to pytest-cov https://pytest-cov.readthedocs.io/en/latest/config.html The pytest-cov docs seem to suggest a filesystem path is expected. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Dec 2 09:28:04 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: f94318521f63085b9ccb43d42af89f153fb39f15 Source-Link: https://github.com/googleapis/synthtool/commit/f94318521f63085b9ccb43d42af89f153fb39f15 * chore: update noxfile.py.j2 * Update noxfile.py.j2 add changes from @glasnt to the template template to ensure that enforcing type hinting doesn't fail for repos with the sample noxfile (aka all samples repos) See https://github.com/GoogleCloudPlatform/python-docs-samples/pull/4869/files for context * fix typo Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Thu Dec 3 13:44:30 2020 -0800 Source-Repo: googleapis/synthtool Source-Sha: 18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1 Source-Link: https://github.com/googleapis/synthtool/commit/18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1 --- .pre-commit-config.yaml | 17 ++++ CONTRIBUTING.rst | 10 ++ noxfile.py | 5 +- samples/snippets/README.rst | 191 ------------------------------------ samples/snippets/noxfile.py | 19 ++-- synth.metadata | 8 +- 6 files changed, 42 insertions(+), 208 deletions(-) create mode 100644 .pre-commit-config.yaml delete mode 100644 samples/snippets/README.rst diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..6ad83346e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.3.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.4 + hooks: + - id: flake8 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index ef2706b77..ab6c09b8f 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -111,6 +111,16 @@ Coding Style should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + Exceptions to PEP8: - Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for diff --git a/noxfile.py b/noxfile.py index 1844aa62b..3db66c649 100644 --- a/noxfile.py +++ b/noxfile.py @@ -79,9 +79,8 @@ def default(session): session.run( "py.test", "--quiet", - "--cov=google.cloud.logging", - "--cov=google.cloud", - "--cov=tests.unit", + "--cov=google/cloud", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", diff --git a/samples/snippets/README.rst b/samples/snippets/README.rst deleted file mode 100644 index 9a38dca7d..000000000 --- a/samples/snippets/README.rst +++ /dev/null @@ -1,191 +0,0 @@ - -.. This file is automatically generated. Do not edit this file directly. - -Cloud Logging Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/README.rst - - -This directory contains samples for Cloud Logging. `Cloud Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. - - - - -.. _Cloud Logging: https://cloud.google.com/logging/docs - - -Setup -------------------------------------------------------------------------------- - - - -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started - - - - -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 3.6+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ - - - - - - -Samples -------------------------------------------------------------------------------- - - -Quickstart -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/quickstart.py,logging/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python quickstart.py - - - - -Snippets -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/snippets.py,logging/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python snippets.py - - - usage: snippets.py [-h] logger_name {list,write,delete} ... - - This application demonstrates how to perform basic operations on logs and - log entries with Cloud Logging. - - For more information, see the README.md under /logging and the - documentation at https://cloud.google.com/logging/docs. - - positional arguments: - logger_name Logger name - {list,write,delete} - list Lists the most recent entries for a given logger. - write Writes log entries to the given logger. - delete Deletes a logger and all its entries. Note that a - deletion can take several minutes to take effect. - - optional arguments: - -h, --help show this help message and exit - - - - - -Export -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=logging/cloud-client/export.py,logging/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python export.py - - - usage: export.py [-h] {list,create,update,delete} ... - - positional arguments: - {list,create,update,delete} - list Lists all sinks. - create Lists all sinks. - update Changes a sink's filter. The filter determines which - logs this sink matches and will be exported to the - destination. For example a filter of 'severity>=INFO' - will send all logs that have a severity of INFO or - greater to the destination. See https://cloud.google.c - om/logging/docs/view/advanced_filters for more filter - information. - delete Deletes a sink. - - optional arguments: - -h, --help show this help message and exit - - - - - - - - - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - - - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 2a0629078..bca0522ec 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -68,7 +69,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -97,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -135,7 +136,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: @@ -148,15 +149,13 @@ def lint(session): "." ] session.run("flake8", *args) - - # # Black # @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -170,7 +169,7 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -196,7 +195,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -211,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -234,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/synth.metadata b/synth.metadata index 4e26d4877..a6ed7f34c 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "4e24b3c360adef8d7761573d789867857586337d" + "sha": "7eaa5853f3a45e3db015a09841b98aeab461e6f3" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" + "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" + "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" } } ], @@ -85,6 +85,7 @@ ".kokoro/test-samples.sh", ".kokoro/trampoline.sh", ".kokoro/trampoline_v2.sh", + ".pre-commit-config.yaml", ".trampolinerc", "CODE_OF_CONDUCT.md", "CONTRIBUTING.rst", @@ -134,7 +135,6 @@ "renovate.json", "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", "samples/snippets/noxfile.py", "scripts/decrypt-secrets.sh", "scripts/readme-gen/readme_gen.py", From 3a25c8cd9bd06e5a8f488945c9bc94380e2bf0d1 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 11 Dec 2020 16:50:47 -0800 Subject: [PATCH 7/9] chore: Re-generated to pick up changes from googleapis. (#126) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * feat: add the Tailing API to get a live stream of the tail end of filtered logs PiperOrigin-RevId: 344435830 Source-Author: Google APIs Source-Date: Thu Nov 26 09:56:05 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: e8857c4c36948e7e0500377cd7fcecbf2459afc8 Source-Link: https://github.com/googleapis/googleapis/commit/e8857c4c36948e7e0500377cd7fcecbf2459afc8 --- google/cloud/logging_v2/proto/logging.proto | 99 +++++++++++++++ .../logging_service_v2/async_client.py | 67 +++++++++- .../services/logging_service_v2/client.py | 54 +++++++- .../logging_service_v2/transports/base.py | 27 ++++ .../logging_service_v2/transports/grpc.py | 28 +++++ .../transports/grpc_asyncio.py | 30 +++++ google/cloud/logging_v2/types/__init__.py | 4 + google/cloud/logging_v2/types/logging.py | 118 ++++++++++++++++++ synth.metadata | 4 +- .../logging_v2/test_logging_service_v2.py | 76 +++++++++++ 10 files changed, 503 insertions(+), 4 deletions(-) diff --git a/google/cloud/logging_v2/proto/logging.proto b/google/cloud/logging_v2/proto/logging.proto index 58647b92f..f8b01a71e 100644 --- a/google/cloud/logging_v2/proto/logging.proto +++ b/google/cloud/logging_v2/proto/logging.proto @@ -125,6 +125,15 @@ service LoggingServiceV2 { }; option (google.api.method_signature) = "parent"; } + + // Streaming read of log entries as they are ingested. Until the stream is + // terminated, it will continue reading logs. + rpc TailLogEntries(stream TailLogEntriesRequest) returns (stream TailLogEntriesResponse) { + option (google.api.http) = { + post: "/v2/entries:tail" + body: "*" + }; + } } // The parameters to DeleteLog. @@ -254,6 +263,11 @@ message ListLogEntriesRequest { // "billingAccounts/[BILLING_ACCOUNT_ID]" // "folders/[FOLDER_ID]" // + // May alternatively be one or more views + // projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] // // Projects listed in the `project_ids` field are added to this list. repeated string resource_names = 8 [ @@ -363,6 +377,19 @@ message ListLogsRequest { // `nextPageToken` from the previous response. The values of other method // parameters should be identical to those in the previous call. string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The resource name that owns the logs: + // projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + // + // To support legacy queries, it could also be: + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + repeated string resource_names = 8 [(google.api.field_behavior) = OPTIONAL]; } // Result returned from ListLogs. @@ -377,3 +404,75 @@ message ListLogsResponse { // method again using the value of `nextPageToken` as `pageToken`. string next_page_token = 2; } + +// The parameters to `TailLogEntries`. +message TailLogEntriesRequest { + // Required. Name of a parent resource from which to retrieve log entries: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + // + // May alternatively be one or more views: + // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + repeated string resource_names = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. A filter that chooses which log entries to return. See [Advanced + // Logs Filters](https://cloud.google.com/logging/docs/view/advanced_filters). + // Only log entries that match the filter are returned. An empty filter + // matches all log entries in the resources listed in `resource_names`. + // Referencing a parent resource that is not in `resource_names` will cause + // the filter to return no results. The maximum length of the filter is 20000 + // characters. + string filter = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The amount of time to buffer log entries at the server before + // being returned to prevent out of order results due to late arriving log + // entries. Valid values are between 0-60000 milliseconds. Defaults to 2000 + // milliseconds. + google.protobuf.Duration buffer_window = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Result returned from `TailLogEntries`. +message TailLogEntriesResponse { + // Information about entries that were omitted from the session. + message SuppressionInfo { + // An indicator of why entries were omitted. + enum Reason { + // Unexpected default. + REASON_UNSPECIFIED = 0; + + // Indicates suppression occurred due to relevant entries being + // received in excess of rate limits. For quotas and limits, see + // [Logging API quotas and + // limits](https://cloud.google.com/logging/quotas#api-limits). + RATE_LIMIT = 1; + + // Indicates suppression occurred due to the client not consuming + // responses quickly enough. + NOT_CONSUMED = 2; + } + + // The reason that entries were omitted from the session. + Reason reason = 1; + + // A lower bound on the count of entries omitted due to `reason`. + int32 suppressed_count = 2; + } + + // A list of log entries. Each response in the stream will order entries with + // increasing values of `LogEntry.timestamp`. Ordering is not guaranteed + // between separate responses. + repeated LogEntry entries = 1; + + // If entries that otherwise would have been included in the session were not + // sent back to the client, counts of relevant entries omitted from the + // session with the reason that they were not included. There will be at most + // one of each reason per response. The counts represent the number of + // suppressed entries since the last streamed response. + repeated SuppressionInfo suppression_info = 2; +} diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index e6dd57247..82ee957a3 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -18,7 +18,16 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import ( + Dict, + AsyncIterable, + Awaitable, + AsyncIterator, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -430,6 +439,12 @@ async def list_log_entries( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + Projects listed in the ``project_ids`` field are added to this list. This corresponds to the ``resource_names`` field @@ -690,6 +705,56 @@ async def list_logs( # Done; return the response. return response + def tail_log_entries( + self, + requests: AsyncIterator[logging.TailLogEntriesRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Args: + requests (AsyncIterator[`~.logging.TailLogEntriesRequest`]): + The request object AsyncIterator. The parameters to `TailLogEntries`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[~.logging.TailLogEntriesResponse]: + Result returned from ``TailLogEntries``. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.tail_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index 79a9ed1af..a54252bf7 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -19,7 +19,17 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import ( + Callable, + Dict, + Optional, + Iterable, + Iterator, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -598,6 +608,12 @@ def list_log_entries( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + Projects listed in the ``project_ids`` field are added to this list. This corresponds to the ``resource_names`` field @@ -833,6 +849,42 @@ def list_logs( # Done; return the response. return response + def tail_log_entries( + self, + requests: Iterator[logging.TailLogEntriesRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[logging.TailLogEntriesResponse]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Args: + requests (Iterator[`~.logging.TailLogEntriesRequest`]): + The request object iterator. The parameters to `TailLogEntries`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[~.logging.TailLogEntriesResponse]: + Result returned from ``TailLogEntries``. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] + + # Send the request. + response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index c8bcbcbf9..be9dcdbfe 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -186,6 +186,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.tail_log_entries: gapic_v1.method.wrap_method( + self.tail_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, + exceptions.InternalServerError, + exceptions.ServiceUnavailable, + ), + ), + default_timeout=3600.0, + client_info=client_info, + ), } @property @@ -244,5 +259,17 @@ def list_logs( ]: raise NotImplementedError() + @property + def tail_log_entries( + self, + ) -> typing.Callable[ + [logging.TailLogEntriesRequest], + typing.Union[ + logging.TailLogEntriesResponse, + typing.Awaitable[logging.TailLogEntriesResponse], + ], + ]: + raise NotImplementedError() + __all__ = ("LoggingServiceV2Transport",) diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 4c0636e47..d774281b9 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -380,5 +380,33 @@ def list_logs( ) return self._stubs["list_logs"] + @property + def tail_log_entries( + self, + ) -> Callable[[logging.TailLogEntriesRequest], logging.TailLogEntriesResponse]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + ~.TailLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs["tail_log_entries"] + __all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 8a26a078e..686eb52e0 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -390,5 +390,35 @@ def list_logs( ) return self._stubs["list_logs"] + @property + def tail_log_entries( + self, + ) -> Callable[ + [logging.TailLogEntriesRequest], Awaitable[logging.TailLogEntriesResponse] + ]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + Awaitable[~.TailLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs["tail_log_entries"] + __all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/google/cloud/logging_v2/types/__init__.py b/google/cloud/logging_v2/types/__init__.py index b24bf3b8c..4c85fbb46 100644 --- a/google/cloud/logging_v2/types/__init__.py +++ b/google/cloud/logging_v2/types/__init__.py @@ -66,6 +66,8 @@ ListMonitoredResourceDescriptorsResponse, ListLogsRequest, ListLogsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, ) from .logging_metrics import ( LogMetric, @@ -125,6 +127,8 @@ "ListMonitoredResourceDescriptorsResponse", "ListLogsRequest", "ListLogsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", "LogMetric", "ListLogMetricsRequest", "ListLogMetricsResponse", diff --git a/google/cloud/logging_v2/types/logging.py b/google/cloud/logging_v2/types/logging.py index 0d44439ab..cec8993f5 100644 --- a/google/cloud/logging_v2/types/logging.py +++ b/google/cloud/logging_v2/types/logging.py @@ -20,6 +20,7 @@ from google.api import monitored_resource_pb2 as monitored_resource # type: ignore from google.cloud.logging_v2.types import log_entry +from google.protobuf import duration_pb2 as duration # type: ignore from google.rpc import status_pb2 as status # type: ignore @@ -36,6 +37,8 @@ "ListMonitoredResourceDescriptorsResponse", "ListLogsRequest", "ListLogsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", }, ) @@ -208,6 +211,12 @@ class ListLogEntriesRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + Projects listed in the ``project_ids`` field are added to this list. filter (str): @@ -358,6 +367,16 @@ class ListLogsRequest(proto.Message): ``pageToken`` must be the value of ``nextPageToken`` from the previous response. The values of other method parameters should be identical to those in the previous call. + resource_names (Sequence[str]): + Optional. The resource name that owns the logs: + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + To support legacy queries, it could also be: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". """ parent = proto.Field(proto.STRING, number=1) @@ -366,6 +385,8 @@ class ListLogsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=3) + resource_names = proto.RepeatedField(proto.STRING, number=8) + class ListLogsResponse(proto.Message): r"""Result returned from ListLogs. @@ -391,4 +412,101 @@ def raw_page(self): next_page_token = proto.Field(proto.STRING, number=2) +class TailLogEntriesRequest(proto.Message): + r"""The parameters to ``TailLogEntries``. + + Attributes: + resource_names (Sequence[str]): + Required. Name of a parent resource from which to retrieve + log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views: + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]". + filter (str): + Optional. A filter that chooses which log entries to return. + See `Advanced Logs + Filters `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources listed + in ``resource_names``. Referencing a parent resource that is + not in ``resource_names`` will cause the filter to return no + results. The maximum length of the filter is 20000 + characters. + buffer_window (~.duration.Duration): + Optional. The amount of time to buffer log + entries at the server before being returned to + prevent out of order results due to late + arriving log entries. Valid values are between + 0-60000 milliseconds. Defaults to 2000 + milliseconds. + """ + + resource_names = proto.RepeatedField(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + + +class TailLogEntriesResponse(proto.Message): + r"""Result returned from ``TailLogEntries``. + + Attributes: + entries (Sequence[~.log_entry.LogEntry]): + A list of log entries. Each response in the stream will + order entries with increasing values of + ``LogEntry.timestamp``. Ordering is not guaranteed between + separate responses. + suppression_info (Sequence[~.logging.TailLogEntriesResponse.SuppressionInfo]): + If entries that otherwise would have been + included in the session were not sent back to + the client, counts of relevant entries omitted + from the session with the reason that they were + not included. There will be at most one of each + reason per response. The counts represent the + number of suppressed entries since the last + streamed response. + """ + + class SuppressionInfo(proto.Message): + r"""Information about entries that were omitted from the session. + + Attributes: + reason (~.logging.TailLogEntriesResponse.SuppressionInfo.Reason): + The reason that entries were omitted from the + session. + suppressed_count (int): + A lower bound on the count of entries omitted due to + ``reason``. + """ + + class Reason(proto.Enum): + r"""An indicator of why entries were omitted.""" + REASON_UNSPECIFIED = 0 + RATE_LIMIT = 1 + NOT_CONSUMED = 2 + + reason = proto.Field( + proto.ENUM, number=1, enum="TailLogEntriesResponse.SuppressionInfo.Reason", + ) + + suppressed_count = proto.Field(proto.INT32, number=2) + + entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) + + suppression_info = proto.RepeatedField( + proto.MESSAGE, number=2, message=SuppressionInfo, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/synth.metadata b/synth.metadata index a6ed7f34c..c3228fbcf 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,8 +11,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "be0bdf86cd31aa7c1a7b30a9a2e9f2fd53ee3d91", - "internalRef": "342353190" + "sha": "e8857c4c36948e7e0500377cd7fcecbf2459afc8", + "internalRef": "344435830" } }, { diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 2c08f63b2..f6cb5d7a1 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1698,6 +1698,81 @@ async def test_list_logs_async_pages(): assert page_.raw_page.next_page_token == token +def test_tail_log_entries( + transport: str = "grpc", request_type=logging.TailLogEntriesRequest +): + client = LoggingServiceV2Client( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([logging.TailLogEntriesResponse()]) + + response = client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, logging.TailLogEntriesResponse) + + +def test_tail_log_entries_from_dict(): + test_tail_log_entries(request_type=dict) + + +@pytest.mark.asyncio +async def test_tail_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest +): + client = LoggingServiceV2AsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[logging.TailLogEntriesResponse()] + ) + + response = await client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, logging.TailLogEntriesResponse) + + +@pytest.mark.asyncio +async def test_tail_log_entries_async_from_dict(): + await test_tail_log_entries_async(request_type=dict) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( @@ -1800,6 +1875,7 @@ def test_logging_service_v2_base_transport(): "list_log_entries", "list_monitored_resource_descriptors", "list_logs", + "tail_log_entries", ) for method in methods: with pytest.raises(NotImplementedError): From 6349b899811cbb16f5548df0b77564b46666c4e7 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 14 Dec 2020 12:42:53 -0800 Subject: [PATCH 8/9] fix: remove client recv msg limit fix: add enums to `types/__init__.py` (#131) PiperOrigin-RevId: 347055288 Source-Author: Google APIs Source-Date: Fri Dec 11 12:44:37 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: dd372aa22ded7a8ba6f0e03a80e06358a3fa0907 Source-Link: https://github.com/googleapis/googleapis/commit/dd372aa22ded7a8ba6f0e03a80e06358a3fa0907 --- .../services/config_service_v2/transports/__init__.py | 1 - .../services/config_service_v2/transports/grpc.py | 10 +++++++++- .../config_service_v2/transports/grpc_asyncio.py | 8 ++++++++ .../services/logging_service_v2/transports/__init__.py | 1 - .../services/logging_service_v2/transports/grpc.py | 10 +++++++++- .../logging_service_v2/transports/grpc_asyncio.py | 8 ++++++++ .../services/metrics_service_v2/transports/__init__.py | 1 - .../services/metrics_service_v2/transports/grpc.py | 10 +++++++++- .../metrics_service_v2/transports/grpc_asyncio.py | 8 ++++++++ google/cloud/logging_v2/types/__init__.py | 3 ++- synth.metadata | 6 +++--- tests/unit/gapic/logging_v2/test_config_service_v2.py | 8 ++++++++ tests/unit/gapic/logging_v2/test_logging_service_v2.py | 8 ++++++++ tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 8 ++++++++ 14 files changed, 80 insertions(+), 10 deletions(-) diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index c4ae13076..30282e2d2 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - __all__ = ( "ConfigServiceV2Transport", "ConfigServiceV2GrpcTransport", diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index f083373b1..a64405fba 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -147,6 +147,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -165,6 +169,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -191,7 +199,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 7376164e4..aa094ea0e 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -192,6 +192,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -210,6 +214,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 910a38ecd..cd979b771 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - __all__ = ( "LoggingServiceV2Transport", "LoggingServiceV2GrpcTransport", diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index d774281b9..f8007bb0d 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -147,6 +147,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -165,6 +169,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -191,7 +199,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 686eb52e0..6adea9ca5 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -192,6 +192,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -210,6 +214,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index eef07abd7..f748403b4 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - __all__ = ( "MetricsServiceV2Transport", "MetricsServiceV2GrpcTransport", diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 0a6f25bd6..1cb9262ab 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -147,6 +147,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -165,6 +169,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -191,7 +199,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 9ec30eed0..ddbd16da6 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -192,6 +192,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -210,6 +214,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/google/cloud/logging_v2/types/__init__.py b/google/cloud/logging_v2/types/__init__.py index 4c85fbb46..dce385af3 100644 --- a/google/cloud/logging_v2/types/__init__.py +++ b/google/cloud/logging_v2/types/__init__.py @@ -54,6 +54,7 @@ GetCmekSettingsRequest, UpdateCmekSettingsRequest, CmekSettings, + LifecycleState, ) from .logging import ( DeleteLogRequest, @@ -79,7 +80,6 @@ DeleteLogMetricRequest, ) - __all__ = ( "LogEntry", "LogEntryOperation", @@ -117,6 +117,7 @@ "GetCmekSettingsRequest", "UpdateCmekSettingsRequest", "CmekSettings", + "LifecycleState", "DeleteLogRequest", "WriteLogEntriesRequest", "WriteLogEntriesResponse", diff --git a/synth.metadata b/synth.metadata index c3228fbcf..6ab2fc041 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-logging.git", - "sha": "7eaa5853f3a45e3db015a09841b98aeab461e6f3" + "sha": "3a25c8cd9bd06e5a8f488945c9bc94380e2bf0d1" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e8857c4c36948e7e0500377cd7fcecbf2459afc8", - "internalRef": "344435830" + "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907", + "internalRef": "347055288" } }, { diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index 25e35e5c6..47a41f25c 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -5382,6 +5382,10 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -5428,6 +5432,10 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index f6cb5d7a1..2b8129f29 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -2048,6 +2048,10 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -2095,6 +2099,10 @@ def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 0cf2e8944..0bc10e4bc 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1993,6 +1993,10 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -2040,6 +2044,10 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel From 2a36af63499728863631f3a767f369f8452e9e42 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 16 Dec 2020 11:07:42 -0800 Subject: [PATCH 9/9] chore: release 2.0.2 (#116) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 9 +++++++++ setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a85cfb01e..658443a46 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +### [2.0.2](https://www.github.com/googleapis/python-logging/compare/v2.0.1...v2.0.2) (2020-12-14) + + +### Bug Fixes + +* Add submodule imports for handlers to logging alias ([#117](https://www.github.com/googleapis/python-logging/issues/117)) ([6843a3a](https://www.github.com/googleapis/python-logging/commit/6843a3aee3c0908ddbc493e7a9ecdddd01df34ef)) +* remove client recv msg limit fix: add enums to `types/__init__.py` ([#131](https://www.github.com/googleapis/python-logging/issues/131)) ([6349b89](https://www.github.com/googleapis/python-logging/commit/6349b899811cbb16f5548df0b77564b46666c4e7)) +* Remove keyword only argument for RequestsMiddleware ([#113](https://www.github.com/googleapis/python-logging/issues/113)) ([e704f28](https://www.github.com/googleapis/python-logging/commit/e704f287a40db38d0da42fa5e21e7a9ef73922ec)) + ### [2.0.1](https://www.github.com/googleapis/python-logging/compare/v2.0.0...v2.0.1) (2020-12-02) diff --git a/setup.py b/setup.py index 635bd1a57..d50ed511d 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.0.1" +version = "2.0.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'