Skip to content

Commit b4ce8da

Browse files
authored
chore: add argparse to run samples as script (#1538)
* add argparse so that the samples can be run as a a standalone script ```python python samples/snippets/storage_transfer_manager_upload_chunks_concurrently.py --bucket_name <bucket_name> --source_filename <path/to/file> --destination_blob_name <GCS_object_name> ```
1 parent 87f7196 commit b4ce8da

File tree

1 file changed

+38
-0
lines changed

1 file changed

+38
-0
lines changed

samples/snippets/storage_transfer_manager_upload_chunks_concurrently.py

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14+
import argparse
15+
1416

1517
# [START storage_transfer_manager_upload_chunks_concurrently]
1618
def upload_chunks_concurrently(
@@ -54,4 +56,40 @@ def upload_chunks_concurrently(
5456
print(f"File {source_filename} uploaded to {destination_blob_name}.")
5557

5658

59+
if __name__ == "__main__":
60+
argparse = argparse.ArgumentParser(
61+
description="Upload a file to GCS in chunks concurrently."
62+
)
63+
argparse.add_argument(
64+
"--bucket_name", help="The name of the GCS bucket to upload to."
65+
)
66+
argparse.add_argument(
67+
"--source_filename", help="The local path to the file to upload."
68+
)
69+
argparse.add_argument(
70+
"--destination_blob_name", help="The name of the object in GCS."
71+
)
72+
argparse.add_argument(
73+
"--chunk_size",
74+
type=int,
75+
default=32 * 1024 * 1024,
76+
help="The size of each chunk in bytes (default: 32 MiB). The remote\
77+
service has a minimum of 5 MiB and a maximum of 5 GiB",
78+
)
79+
argparse.add_argument(
80+
"--workers",
81+
type=int,
82+
default=8,
83+
help="The number of worker processes to use (default: 8).",
84+
)
85+
args = argparse.parse_args()
86+
upload_chunks_concurrently(
87+
args.bucket_name,
88+
args.source_filename,
89+
args.destination_blob_name,
90+
args.chunk_size,
91+
args.workers,
92+
)
93+
94+
5795
# [END storage_transfer_manager_upload_chunks_concurrently]

0 commit comments

Comments
 (0)