Skip to content

Commit 4698799

Browse files
MiaCYandrewsg
andauthored
docs: add sample and sample test for transfer manager (#1027)
* add sample and sample test for transfer manager download blob as chunks concurrently method * chore: modify format for int * chore: refactor transfer manager sample names and tests --------- Co-authored-by: Andrew Gorcester <[email protected]>
1 parent 6532e7c commit 4698799

6 files changed

+284
-188
lines changed

samples/snippets/snippets_test.py

+30-4
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,10 @@
7272
import storage_set_bucket_default_kms_key
7373
import storage_set_client_endpoint
7474
import storage_set_metadata
75-
import storage_transfer_manager
75+
import storage_transfer_manager_download_all_blobs
76+
import storage_transfer_manager_download_chunks_concurrently
77+
import storage_transfer_manager_upload_directory
78+
import storage_transfer_manager_upload_many_blobs
7679
import storage_upload_file
7780
import storage_upload_from_memory
7881
import storage_upload_from_stream
@@ -686,7 +689,7 @@ def test_transfer_manager_snippets(test_bucket, capsys):
686689
with open(os.path.join(uploads, name), "w") as f:
687690
f.write(name)
688691

689-
storage_transfer_manager.upload_many_blobs_with_transfer_manager(
692+
storage_transfer_manager_upload_many_blobs.upload_many_blobs_with_transfer_manager(
690693
test_bucket.name,
691694
BLOB_NAMES,
692695
source_directory="{}/".format(uploads),
@@ -699,7 +702,7 @@ def test_transfer_manager_snippets(test_bucket, capsys):
699702

700703
with tempfile.TemporaryDirectory() as downloads:
701704
# Download the files.
702-
storage_transfer_manager.download_all_blobs_with_transfer_manager(
705+
storage_transfer_manager_download_all_blobs.download_all_blobs_with_transfer_manager(
703706
test_bucket.name,
704707
destination_directory=os.path.join(downloads, ""),
705708
threads=2,
@@ -729,11 +732,34 @@ def test_transfer_manager_directory_upload(test_bucket, capsys):
729732
with open(os.path.join(uploads, name), "w") as f:
730733
f.write(name)
731734

732-
storage_transfer_manager.upload_directory_with_transfer_manager(
735+
storage_transfer_manager_upload_directory.upload_directory_with_transfer_manager(
733736
test_bucket.name, source_directory="{}/".format(uploads)
734737
)
735738
out, _ = capsys.readouterr()
736739

737740
assert "Found {}".format(len(BLOB_NAMES)) in out
738741
for name in BLOB_NAMES:
739742
assert "Uploaded {}".format(name) in out
743+
744+
745+
def test_transfer_manager_download_chunks_concurrently(test_bucket, capsys):
746+
BLOB_NAME = "test_file.txt"
747+
748+
with tempfile.NamedTemporaryFile() as file:
749+
file.write(b"test")
750+
751+
storage_upload_file.upload_blob(
752+
test_bucket.name, file.name, BLOB_NAME
753+
)
754+
755+
with tempfile.TemporaryDirectory() as downloads:
756+
# Download the file.
757+
storage_transfer_manager_download_chunks_concurrently.download_chunks_concurrently(
758+
test_bucket.name,
759+
BLOB_NAME,
760+
os.path.join(downloads, BLOB_NAME),
761+
processes=8,
762+
)
763+
out, _ = capsys.readouterr()
764+
765+
assert "Downloaded {} to {}".format(BLOB_NAME, os.path.join(downloads, BLOB_NAME)) in out

samples/snippets/storage_transfer_manager.py

-184
This file was deleted.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the 'License');
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
16+
def download_all_blobs_with_transfer_manager(
17+
bucket_name, destination_directory="", threads=4
18+
):
19+
"""Download all of the blobs in a bucket, concurrently in a thread pool.
20+
21+
The filename of each blob once downloaded is derived from the blob name and
22+
the `destination_directory `parameter. For complete control of the filename
23+
of each blob, use transfer_manager.download_many() instead.
24+
25+
Directories will be created automatically as needed, for instance to
26+
accommodate blob names that include slashes.
27+
"""
28+
29+
# The ID of your GCS bucket
30+
# bucket_name = "your-bucket-name"
31+
32+
# The directory on your computer to which to download all of the files. This
33+
# string is prepended (with os.path.join()) to the name of each blob to form
34+
# the full path. Relative paths and absolute paths are both accepted. An
35+
# empty string means "the current working directory". Note that this
36+
# parameter allows accepts directory traversal ("../" etc.) and is not
37+
# intended for unsanitized end user input.
38+
# destination_directory = ""
39+
40+
# The number of threads to use for the operation. The performance impact of
41+
# this value depends on the use case, but generally, smaller files benefit
42+
# from more threads and larger files don't benefit from more threads. Too
43+
# many threads can slow operations, especially with large files, due to
44+
# contention over the Python GIL.
45+
# threads=4
46+
47+
from google.cloud.storage import Client, transfer_manager
48+
49+
storage_client = Client()
50+
bucket = storage_client.bucket(bucket_name)
51+
52+
blob_names = [blob.name for blob in bucket.list_blobs()]
53+
54+
results = transfer_manager.download_many_to_path(
55+
bucket, blob_names, destination_directory=destination_directory, threads=threads
56+
)
57+
58+
for name, result in zip(blob_names, results):
59+
# The results list is either `None` or an exception for each blob in
60+
# the input list, in order.
61+
62+
if isinstance(result, Exception):
63+
print("Failed to download {} due to exception: {}".format(name, result))
64+
else:
65+
print("Downloaded {} to {}.".format(name, destination_directory + name))
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the 'License');
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
16+
def download_chunks_concurrently(bucket_name, blob_name, filename, processes=8):
17+
"""Download a single file in chunks, concurrently."""
18+
19+
# The ID of your GCS bucket
20+
# bucket_name = "your-bucket-name"
21+
22+
# The file to be downloaded
23+
# blob_name = "target-file"
24+
25+
# The destination filename or path
26+
# filename = ""
27+
28+
# The maximum number of worker processes that should be used to handle the
29+
# workload of downloading the blob concurrently. PROCESS worker type uses more
30+
# system resources (both memory and CPU) and can result in faster operations
31+
# when working with large files. The optimal number of workers depends heavily
32+
# on the specific use case. Refer to the docstring of the underlining method
33+
# for more details.
34+
# processes=8
35+
36+
from google.cloud.storage import Client, transfer_manager
37+
38+
storage_client = Client()
39+
bucket = storage_client.bucket(bucket_name)
40+
blob = bucket.blob(blob_name)
41+
42+
transfer_manager.download_chunks_concurrently(blob, filename, max_workers=processes)
43+
44+
print("Downloaded {} to {}.".format(blob_name, filename))

0 commit comments

Comments
 (0)