|
11 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 | 12 | # See the License for the specific language governing permissions and
|
13 | 13 | # limitations under the License.
|
| 14 | +import argparse |
| 15 | + |
14 | 16 |
|
15 | 17 | # [START storage_transfer_manager_upload_chunks_concurrently]
|
16 | 18 | def upload_chunks_concurrently(
|
@@ -54,4 +56,40 @@ def upload_chunks_concurrently(
|
54 | 56 | print(f"File {source_filename} uploaded to {destination_blob_name}.")
|
55 | 57 |
|
56 | 58 |
|
| 59 | +if __name__ == "__main__": |
| 60 | + argparse = argparse.ArgumentParser( |
| 61 | + description="Upload a file to GCS in chunks concurrently." |
| 62 | + ) |
| 63 | + argparse.add_argument( |
| 64 | + "--bucket_name", help="The name of the GCS bucket to upload to." |
| 65 | + ) |
| 66 | + argparse.add_argument( |
| 67 | + "--source_filename", help="The local path to the file to upload." |
| 68 | + ) |
| 69 | + argparse.add_argument( |
| 70 | + "--destination_blob_name", help="The name of the object in GCS." |
| 71 | + ) |
| 72 | + argparse.add_argument( |
| 73 | + "--chunk_size", |
| 74 | + type=int, |
| 75 | + default=32 * 1024 * 1024, |
| 76 | + help="The size of each chunk in bytes (default: 32 MiB). The remote\ |
| 77 | + service has a minimum of 5 MiB and a maximum of 5 GiB", |
| 78 | + ) |
| 79 | + argparse.add_argument( |
| 80 | + "--workers", |
| 81 | + type=int, |
| 82 | + default=8, |
| 83 | + help="The number of worker processes to use (default: 8).", |
| 84 | + ) |
| 85 | + args = argparse.parse_args() |
| 86 | + upload_chunks_concurrently( |
| 87 | + args.bucket_name, |
| 88 | + args.source_filename, |
| 89 | + args.destination_blob_name, |
| 90 | + args.chunk_size, |
| 91 | + args.workers, |
| 92 | + ) |
| 93 | + |
| 94 | + |
57 | 95 | # [END storage_transfer_manager_upload_chunks_concurrently]
|
0 commit comments