Skip to content

Commit

Permalink
Merge pull request #48 from RachelTucker/update-5-4
Browse files Browse the repository at this point in the history
Generated 5.4 BP API from contract commit 2742576
  • Loading branch information
david-limbach committed Oct 6, 2021
2 parents 28639d0 + c14c0a8 commit d156ec2
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 2 deletions.
7 changes: 7 additions & 0 deletions README.md
Expand Up @@ -84,3 +84,10 @@ To put data to a Spectra S3 appliance you have to do it inside the context of wh
[An example of getting data with the Python SDK](samples/gettingData.py)

[An example of how give objects on the server a different name than what is on the filesystem, and how to delete objects by folder](samples/renaming.py)

## Creating a New Release
Update the version of the SDK before creating a new release. The format is `<major>.<minor>.<patch>`, where the
`<major>.<minor>` numbers must match the version of BP. The `<patch>` is an incrementing number that increments with
each SDK release for a given major/minor release.

The release number is specified in `setup.py`.
8 changes: 7 additions & 1 deletion ds3/ds3.py
Expand Up @@ -330,8 +330,10 @@ def __init__(self):
'DefaultVerifyDataPriorToImport': None,
'Id': None,
'InstanceId': None,
'IomCacheLimitationPercent': None,
'IomEnabled': None,
'LastHeartbeat': None,
'MaxAggregatedBlobsPerChunk': None,
'PartiallyVerifyLastPercentOfTapes': None,
'UnavailableMediaPolicy': None,
'UnavailablePoolMaxJobRetryInMins': None,
Expand Down Expand Up @@ -3169,7 +3171,7 @@ def __init__(self, full_details=None):

class ModifyDataPathBackendSpectraS3Request(AbstractRequest):

def __init__(self, activated=None, allow_new_job_requests=None, auto_activate_timeout_in_mins=None, auto_inspect=None, cache_available_retry_after_in_seconds=None, default_verify_data_after_import=None, default_verify_data_prior_to_import=None, iom_enabled=None, partially_verify_last_percent_of_tapes=None, unavailable_media_policy=None, unavailable_pool_max_job_retry_in_mins=None, unavailable_tape_partition_max_job_retry_in_mins=None):
def __init__(self, activated=None, allow_new_job_requests=None, auto_activate_timeout_in_mins=None, auto_inspect=None, cache_available_retry_after_in_seconds=None, default_verify_data_after_import=None, default_verify_data_prior_to_import=None, iom_cache_limitation_percent=None, iom_enabled=None, max_aggregated_blobs_per_chunk=None, partially_verify_last_percent_of_tapes=None, unavailable_media_policy=None, unavailable_pool_max_job_retry_in_mins=None, unavailable_tape_partition_max_job_retry_in_mins=None):
super(ModifyDataPathBackendSpectraS3Request, self).__init__()
if activated is not None:
self.query_params['activated'] = activated
Expand All @@ -3185,8 +3187,12 @@ def __init__(self, activated=None, allow_new_job_requests=None, auto_activate_ti
self.query_params['default_verify_data_after_import'] = default_verify_data_after_import
if default_verify_data_prior_to_import is not None:
self.query_params['default_verify_data_prior_to_import'] = default_verify_data_prior_to_import
if iom_cache_limitation_percent is not None:
self.query_params['iom_cache_limitation_percent'] = iom_cache_limitation_percent
if iom_enabled is not None:
self.query_params['iom_enabled'] = iom_enabled
if max_aggregated_blobs_per_chunk is not None:
self.query_params['max_aggregated_blobs_per_chunk'] = max_aggregated_blobs_per_chunk
if partially_verify_last_percent_of_tapes is not None:
self.query_params['partially_verify_last_percent_of_tapes'] = partially_verify_last_percent_of_tapes
if unavailable_media_policy is not None:
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -14,7 +14,7 @@
from distutils.core import setup

setup(name='DS3 SDK',
version='5.3.0',
version='5.4.0',
description='Python3 SDK and CLI for Spectra S3',
author_email='developer@spectralogic.com',
packages=['ds3'])

0 comments on commit d156ec2

Please sign in to comment.