From 26bb6bddb7bba052efe872a22d712cc82ef61137 Mon Sep 17 00:00:00 2001 From: "michael.richey" Date: Mon, 7 Apr 2025 16:48:16 -0400 Subject: [PATCH 1/6] Split resource files into multiple files --- datadog_sync/commands/shared/options.py | 10 ++++ datadog_sync/constants.py | 1 + datadog_sync/utils/state.py | 4 ++ datadog_sync/utils/storage/local_file.py | 62 +++++++++++++++++++----- 4 files changed, 64 insertions(+), 13 deletions(-) diff --git a/datadog_sync/commands/shared/options.py b/datadog_sync/commands/shared/options.py index 597b63ef..33493f3c 100644 --- a/datadog_sync/commands/shared/options.py +++ b/datadog_sync/commands/shared/options.py @@ -187,6 +187,16 @@ def click_config_file_provider(ctx: Context, opts: CustomOptionClass, value: Non help="Enables sync-cli metrics being sent to both source and destination", cls=CustomOptionClass, ), + option( + "--resource-per-file", + required=False, + is_flag=True, + default=False, + show_default=True, + help="By default resource files contain many resources of the same resource type, setting this flag to true " + "will create a resource file for each individual resource.", + cls=CustomOptionClass, + ), ] _storage_options = [ diff --git a/datadog_sync/constants.py b/datadog_sync/constants.py index 21c88544..90904ad5 100644 --- a/datadog_sync/constants.py +++ b/datadog_sync/constants.py @@ -64,6 +64,7 @@ SOURCE_PATH_DEFAULT = "resources/source" DESTINATION_PATH_PARAM = "destination_resources_path" DESTINATION_PATH_DEFAULT = "resources/destination" +RESOURCE_PER_FILE = "resource_per_file" # Commands diff --git a/datadog_sync/utils/state.py b/datadog_sync/utils/state.py index baaa5350..8f07a6fa 100644 --- a/datadog_sync/utils/state.py +++ b/datadog_sync/utils/state.py @@ -10,6 +10,7 @@ AWS_BUCKET_KEY_PREFIX_SOURCE, DESTINATION_PATH_DEFAULT, DESTINATION_PATH_PARAM, + RESOURCE_PER_FILE, SOURCE_PATH_DEFAULT, SOURCE_PATH_PARAM, ) @@ -21,12 +22,15 @@ class State: def __init__(self, type_: StorageType = StorageType.LOCAL_FILE, **kwargs: object) -> None: + resource_per_file = kwargs.get(RESOURCE_PER_FILE, False) + if type_ == StorageType.LOCAL_FILE: source_resources_path = kwargs.get(SOURCE_PATH_PARAM, SOURCE_PATH_DEFAULT) destination_resources_path = kwargs.get(DESTINATION_PATH_PARAM, DESTINATION_PATH_DEFAULT) self._storage: BaseStorage = LocalFile( source_resources_path=source_resources_path, destination_resources_path=destination_resources_path, + resource_per_file=resource_per_file, ) elif type_ == StorageType.AWS_S3_BUCKET: source_resources_path = kwargs.get(AWS_BUCKET_KEY_PREFIX_SOURCE, SOURCE_PATH_DEFAULT) diff --git a/datadog_sync/utils/storage/local_file.py b/datadog_sync/utils/storage/local_file.py index 27b0ef80..a0fc26ba 100644 --- a/datadog_sync/utils/storage/local_file.py +++ b/datadog_sync/utils/storage/local_file.py @@ -22,9 +22,17 @@ class LocalFile(BaseStorage): def __init__( - self, source_resources_path=SOURCE_PATH_DEFAULT, destination_resources_path=DESTINATION_PATH_DEFAULT + self, + source_resources_path=SOURCE_PATH_DEFAULT, + destination_resources_path=DESTINATION_PATH_DEFAULT, + resource_per_file=False, ) -> None: super().__init__() + # resource_per_file is a boolean, when False we maintain the behavior of storing all the resources + # by their resource type, so there is one file for all the monitors and another file for all the + # dashboards. In that case files are named {resource_type}.json. When the boolean is True each resource + # will be in its own file. The file anme will be {resource_type}.{identifier}.json. + self.resource_per_file = resource_per_file self.source_resources_path = source_resources_path self.destination_resources_path = destination_resources_path @@ -35,21 +43,31 @@ def get(self, origin: Origin) -> StorageData: for file in os.listdir(self.source_resources_path): if file.endswith(".json"): resource_type = file.split(".")[0] - with open(self.source_resources_path + f"/{file}") as f: + with open(self.source_resources_path + f"/{file}", "r", encoding="utf-8") as input_file: try: - data.source[resource_type] = json.load(f) + if not data.source[resource_type]: + data.source[resource_type] = json.load(input_file) + else: + data.source[resource_type].update(json.load(input_file)) except json.decoder.JSONDecodeError: - log.warning(f"invalid json in source resource file: {resource_type}") + log.warning(f"invalid json in source resource file: {file}") if origin in [Origin.DESTINATION, Origin.ALL] and os.path.exists(self.destination_resources_path): for file in os.listdir(self.destination_resources_path): if file.endswith(".json"): resource_type = file.split(".")[0] - with open(self.destination_resources_path + f"/{file}") as f: + with open( + self.destination_resources_path + f"/{file}", + "r", + encoding="utf-8", + ) as input_file: try: - data.destination[resource_type] = json.load(f) + if not data.destination[resource_type]: + data.destination[resource_type] = json.load(input_file) + else: + data.destination[resource_type].update(json.load(input_file)) except json.decoder.JSONDecodeError: - log.warning(f"invalid json in destination resource file: {resource_type}") + log.warning(f"invalid json in destination resource file: {file}") return data @@ -64,11 +82,29 @@ def put(self, origin: Origin, data: StorageData) -> None: def write_resources_file(self, origin: Origin, data: StorageData) -> None: if origin in [Origin.SOURCE, Origin.ALL]: - for resource_type, v in data.source.items(): - with open(self.source_resources_path + f"/{resource_type}.json", "w+") as f: - json.dump(v, f) + for resource_type, value in data.source.items(): + filename = f"{self.source_resources_path}/{resource_type}" + if self.resource_per_file: + filename += f".{value.keys()[0]}" + filename += ".json" + if self.resource_per_file: + for _id, resource in value.items(): + with open(filename, "w+", encoding="utf-8") as out_file: + json.dump({_id: resource}, out_file) + else: + with open(filename, "w+", encoding="utf-8") as out_file: + json.dump(value, out_file) if origin in [Origin.DESTINATION, Origin.ALL]: - for resource_type, v in data.destination.items(): - with open(self.destination_resources_path + f"/{resource_type}.json", "w+") as f: - json.dump(v, f) + for resource_type, value in data.destination.items(): + filename = f"{self.destination_resources_path}/{resource_type}" + if self.resource_per_file: + filename += f".{value.keys()[0]}" + filename += ".json" + if self.resource_per_file: + for _id, resource in value.items(): + with open(filename, "w+", encoding="utf-8") as out_file: + json.dump({_id: resource}, out_file) + else: + with open(filename, "w+", encoding="utf-8") as out_file: + json.dump(value, out_file) From aea66ffc9ebf3412df25cf05c50648ee92ccfca1 Mon Sep 17 00:00:00 2001 From: "michael.richey" Date: Tue, 8 Apr 2025 16:32:37 -0400 Subject: [PATCH 2/6] Added multiple files to aws path --- datadog_sync/utils/storage/aws_s3_bucket.py | 64 ++++++++++++++++----- datadog_sync/utils/storage/local_file.py | 10 ++-- 2 files changed, 54 insertions(+), 20 deletions(-) diff --git a/datadog_sync/utils/storage/aws_s3_bucket.py b/datadog_sync/utils/storage/aws_s3_bucket.py index f0039f19..0937a0b9 100644 --- a/datadog_sync/utils/storage/aws_s3_bucket.py +++ b/datadog_sync/utils/storage/aws_s3_bucket.py @@ -26,12 +26,18 @@ def __init__( self, source_resources_path=SOURCE_PATH_DEFAULT, destination_resources_path=DESTINATION_PATH_DEFAULT, + resource_per_file=False, config=None, ) -> None: log.info("AWS S3 init called") super().__init__() self.source_resources_path = source_resources_path self.destination_resources_path = destination_resources_path + # resource_per_file is a boolean, when False we maintain the behavior of storing all the resources + # by their resource type, so there is one file for all the monitors and another file for all the + # dashboards. In that case files are named {resource_type}.json. When the boolean is True each resource + # will be in its own file. The file anme will be {resource_type}.{identifier}.json. + self.resource_per_file = resource_per_file if not config: raise ValueError("No S3 configuration passed in") self.client = boto3.client( @@ -60,7 +66,10 @@ def get(self, origin: Origin) -> StorageData: ) content_body = response.get("Body") try: - data.source[resource_type] = json.load(content_body) + if not data.source[resource_type]: + data.source[resource_type] = json.load(content_body) + else: + data.source[resource_type].update(json.load(content_body)) except json.decoder.JSONDecodeError: log.warning(f"invalid json in aws source resource file: {resource_type}") @@ -77,7 +86,10 @@ def get(self, origin: Origin) -> StorageData: ) content_body = response.get("Body") try: - data.destination[resource_type] = json.load(content_body) + if not data.destination[resource_type]: + data.destination[resource_type] = json.load(content_body) + else: + data.destination[resource_type].update(json.load(content_body)) except json.decoder.JSONDecodeError: log.warning(f"invalid json in aws destination resource file: {resource_type}") @@ -87,18 +99,42 @@ def put(self, origin: Origin, data: StorageData) -> None: log.info("AWS S3 put called") if origin in [Origin.SOURCE, Origin.ALL]: for resource_type, resource_data in data.source.items(): - binary_data = bytes(json.dumps(resource_data), "UTF-8") - self.client.put_object( - Body=binary_data, - Bucket=self.bucket_name, - Key=f"{self.source_resources_path}/{resource_type}.json", - ) + key = f"{self.source_resources_path}/{resource_type}" + if self.resource_per_file: + for _id, resource in resource_data.items(): + key += f".{_id}.json" + binary_data = bytes(json.dumps({_id: resource}), "UTF-8") + self.client.put_object( + Body=binary_data, + Bucket=self.bucket_name, + Key=key, + ) + else: + key += ".json" + binary_data = bytes(json.dumps(resource_data), "UTF-8") + self.client.put_object( + Body=binary_data, + Bucket=self.bucket_name, + Key=key, + ) if origin in [Origin.DESTINATION, Origin.ALL]: for resource_type, resource_data in data.destination.items(): - binary_data = bytes(json.dumps(resource_data), "UTF-8") - self.client.put_object( - Body=binary_data, - Bucket=self.bucket_name, - Key=f"{self.destination_resources_path}/{resource_type}.json", - ) + key = f"{self.destination_resources_path}/{resource_type}" + if self.resource_per_file: + for _id, resource in resource_data.items(): + key += f".{_id}.json" + binary_data = bytes(json.dumps({_id: resource}), "UTF-8") + self.client.put_object( + Body=binary_data, + Bucket=self.bucket_name, + Key=key, + ) + else: + key += ".json" + binary_data = bytes(json.dumps(resource_data), "UTF-8") + self.client.put_object( + Body=binary_data, + Bucket=self.bucket_name, + Key=key, + ) diff --git a/datadog_sync/utils/storage/local_file.py b/datadog_sync/utils/storage/local_file.py index a0fc26ba..e04beede 100644 --- a/datadog_sync/utils/storage/local_file.py +++ b/datadog_sync/utils/storage/local_file.py @@ -84,27 +84,25 @@ def write_resources_file(self, origin: Origin, data: StorageData) -> None: if origin in [Origin.SOURCE, Origin.ALL]: for resource_type, value in data.source.items(): filename = f"{self.source_resources_path}/{resource_type}" - if self.resource_per_file: - filename += f".{value.keys()[0]}" - filename += ".json" if self.resource_per_file: for _id, resource in value.items(): + filename += f".{_id}.json" with open(filename, "w+", encoding="utf-8") as out_file: json.dump({_id: resource}, out_file) else: + filename += ".json" with open(filename, "w+", encoding="utf-8") as out_file: json.dump(value, out_file) if origin in [Origin.DESTINATION, Origin.ALL]: for resource_type, value in data.destination.items(): filename = f"{self.destination_resources_path}/{resource_type}" - if self.resource_per_file: - filename += f".{value.keys()[0]}" - filename += ".json" if self.resource_per_file: for _id, resource in value.items(): + filename += f".{_id}.json" with open(filename, "w+", encoding="utf-8") as out_file: json.dump({_id: resource}, out_file) else: + filename += ".json" with open(filename, "w+", encoding="utf-8") as out_file: json.dump(value, out_file) From feb7c0134e030dd8d145cceccaa4ed2ff31ea27e Mon Sep 17 00:00:00 2001 From: "michael.richey" Date: Wed, 9 Apr 2025 13:09:42 -0400 Subject: [PATCH 3/6] Some general fixes for the filenames and passing in the boolean --- datadog_sync/utils/configuration.py | 12 ++++++++++-- datadog_sync/utils/state.py | 1 + datadog_sync/utils/storage/aws_s3_bucket.py | 12 ++++++------ datadog_sync/utils/storage/local_file.py | 18 +++++++----------- 4 files changed, 24 insertions(+), 19 deletions(-) diff --git a/datadog_sync/utils/configuration.py b/datadog_sync/utils/configuration.py index e1dbff07..96daa906 100644 --- a/datadog_sync/utils/configuration.py +++ b/datadog_sync/utils/configuration.py @@ -19,6 +19,7 @@ FORCE, LOCAL_STORAGE_TYPE, LOGGER_NAME, + RESOURCE_PER_FILE, S3_STORAGE_TYPE, SOURCE_PATH_DEFAULT, SOURCE_PATH_PARAM, @@ -177,8 +178,13 @@ def build_config(cmd: Command, **kwargs: Optional[Any]) -> Configuration: if storage_type == S3_STORAGE_TYPE: logger.info("Using AWS S3 to store state files") storage_type = StorageType.AWS_S3_BUCKET - source_resources_path = kwargs.get("aws_bucket_key_prefix_source", SOURCE_PATH_DEFAULT) - destination_resources_path = kwargs.get("aws_bucket_key_prefix_destination", DESTINATION_PATH_DEFAULT) + + local_source_resources_path = kwargs.get(SOURCE_PATH_PARAM, SOURCE_PATH_DEFAULT) + source_resources_path = kwargs.get("aws_bucket_key_prefix_source", local_source_resources_path) + + local_destination_resources_path = kwargs.get(DESTINATION_PATH_PARAM, DESTINATION_PATH_DEFAULT) + destination_resources_path = kwargs.get("aws_bucket_key_prefix_destination", local_destination_resources_path) + for aws_config_property in AWS_CONFIG_PROPERTIES: property_value = kwargs.get(aws_config_property, None) if not property_value: @@ -200,12 +206,14 @@ def build_config(cmd: Command, **kwargs: Optional[Any]) -> Configuration: source_client = CustomClient(destination_api_url, destination_auth, retry_timeout, timeout, send_metrics) source_resources_path = f"{destination_resources_path}/.backup/{str(time.time())}" + resource_per_file = kwargs.get(RESOURCE_PER_FILE, False) # Initialize state state = State( type_=storage_type, source_resources_path=source_resources_path, destination_resources_path=destination_resources_path, config=config, + resource_per_file=resource_per_file, ) # Initialize Configuration diff --git a/datadog_sync/utils/state.py b/datadog_sync/utils/state.py index 8f07a6fa..7eff4ac1 100644 --- a/datadog_sync/utils/state.py +++ b/datadog_sync/utils/state.py @@ -42,6 +42,7 @@ def __init__(self, type_: StorageType = StorageType.LOCAL_FILE, **kwargs: object source_resources_path=source_resources_path, destination_resources_path=destination_resources_path, config=config, + resource_per_file=resource_per_file, ) else: raise NotImplementedError(f"Storage type {type_} not implemented") diff --git a/datadog_sync/utils/storage/aws_s3_bucket.py b/datadog_sync/utils/storage/aws_s3_bucket.py index 0937a0b9..759a0462 100644 --- a/datadog_sync/utils/storage/aws_s3_bucket.py +++ b/datadog_sync/utils/storage/aws_s3_bucket.py @@ -99,10 +99,10 @@ def put(self, origin: Origin, data: StorageData) -> None: log.info("AWS S3 put called") if origin in [Origin.SOURCE, Origin.ALL]: for resource_type, resource_data in data.source.items(): - key = f"{self.source_resources_path}/{resource_type}" + base_key = f"{self.source_resources_path}/{resource_type}" if self.resource_per_file: for _id, resource in resource_data.items(): - key += f".{_id}.json" + key = f"{base_key}.{_id}.json" binary_data = bytes(json.dumps({_id: resource}), "UTF-8") self.client.put_object( Body=binary_data, @@ -110,7 +110,7 @@ def put(self, origin: Origin, data: StorageData) -> None: Key=key, ) else: - key += ".json" + key = f"{base_key}.json" binary_data = bytes(json.dumps(resource_data), "UTF-8") self.client.put_object( Body=binary_data, @@ -120,10 +120,10 @@ def put(self, origin: Origin, data: StorageData) -> None: if origin in [Origin.DESTINATION, Origin.ALL]: for resource_type, resource_data in data.destination.items(): - key = f"{self.destination_resources_path}/{resource_type}" + base_key = f"{self.destination_resources_path}/{resource_type}" if self.resource_per_file: for _id, resource in resource_data.items(): - key += f".{_id}.json" + key = f"{base_key}.{_id}.json" binary_data = bytes(json.dumps({_id: resource}), "UTF-8") self.client.put_object( Body=binary_data, @@ -131,7 +131,7 @@ def put(self, origin: Origin, data: StorageData) -> None: Key=key, ) else: - key += ".json" + key = f"{base_key}.json" binary_data = bytes(json.dumps(resource_data), "UTF-8") self.client.put_object( Body=binary_data, diff --git a/datadog_sync/utils/storage/local_file.py b/datadog_sync/utils/storage/local_file.py index e04beede..4b82af6d 100644 --- a/datadog_sync/utils/storage/local_file.py +++ b/datadog_sync/utils/storage/local_file.py @@ -56,11 +56,7 @@ def get(self, origin: Origin) -> StorageData: for file in os.listdir(self.destination_resources_path): if file.endswith(".json"): resource_type = file.split(".")[0] - with open( - self.destination_resources_path + f"/{file}", - "r", - encoding="utf-8", - ) as input_file: + with open(self.destination_resources_path + f"/{file}", "r", encoding="utf-8") as input_file: try: if not data.destination[resource_type]: data.destination[resource_type] = json.load(input_file) @@ -83,26 +79,26 @@ def put(self, origin: Origin, data: StorageData) -> None: def write_resources_file(self, origin: Origin, data: StorageData) -> None: if origin in [Origin.SOURCE, Origin.ALL]: for resource_type, value in data.source.items(): - filename = f"{self.source_resources_path}/{resource_type}" + base_filename = f"{self.source_resources_path}/{resource_type}" if self.resource_per_file: for _id, resource in value.items(): - filename += f".{_id}.json" + filename = f"{base_filename}.{_id}.json" with open(filename, "w+", encoding="utf-8") as out_file: json.dump({_id: resource}, out_file) else: - filename += ".json" + filename = f"{base_filename}.json" with open(filename, "w+", encoding="utf-8") as out_file: json.dump(value, out_file) if origin in [Origin.DESTINATION, Origin.ALL]: for resource_type, value in data.destination.items(): - filename = f"{self.destination_resources_path}/{resource_type}" + base_filename = f"{self.destination_resources_path}/{resource_type}" if self.resource_per_file: for _id, resource in value.items(): - filename += f".{_id}.json" + filename = f"{base_filename}.{_id}.json" with open(filename, "w+", encoding="utf-8") as out_file: json.dump({_id: resource}, out_file) else: - filename += ".json" + filename = f"{base_filename}.json" with open(filename, "w+", encoding="utf-8") as out_file: json.dump(value, out_file) From eedf88451ceca929a3c821b1a1c00072d1107d93 Mon Sep 17 00:00:00 2001 From: "michael.richey" Date: Thu, 17 Apr 2025 11:07:51 -0400 Subject: [PATCH 4/6] Fix for windows filenames --- datadog_sync/utils/storage/aws_s3_bucket.py | 10 ++-------- datadog_sync/utils/storage/local_file.py | 14 ++++---------- 2 files changed, 6 insertions(+), 18 deletions(-) diff --git a/datadog_sync/utils/storage/aws_s3_bucket.py b/datadog_sync/utils/storage/aws_s3_bucket.py index 759a0462..b83d6724 100644 --- a/datadog_sync/utils/storage/aws_s3_bucket.py +++ b/datadog_sync/utils/storage/aws_s3_bucket.py @@ -66,10 +66,7 @@ def get(self, origin: Origin) -> StorageData: ) content_body = response.get("Body") try: - if not data.source[resource_type]: - data.source[resource_type] = json.load(content_body) - else: - data.source[resource_type].update(json.load(content_body)) + data.source[resource_type].update(json.load(content_body)) except json.decoder.JSONDecodeError: log.warning(f"invalid json in aws source resource file: {resource_type}") @@ -86,10 +83,7 @@ def get(self, origin: Origin) -> StorageData: ) content_body = response.get("Body") try: - if not data.destination[resource_type]: - data.destination[resource_type] = json.load(content_body) - else: - data.destination[resource_type].update(json.load(content_body)) + data.destination[resource_type].update(json.load(content_body)) except json.decoder.JSONDecodeError: log.warning(f"invalid json in aws destination resource file: {resource_type}") diff --git a/datadog_sync/utils/storage/local_file.py b/datadog_sync/utils/storage/local_file.py index 4b82af6d..5ee45c0f 100644 --- a/datadog_sync/utils/storage/local_file.py +++ b/datadog_sync/utils/storage/local_file.py @@ -45,10 +45,7 @@ def get(self, origin: Origin) -> StorageData: resource_type = file.split(".")[0] with open(self.source_resources_path + f"/{file}", "r", encoding="utf-8") as input_file: try: - if not data.source[resource_type]: - data.source[resource_type] = json.load(input_file) - else: - data.source[resource_type].update(json.load(input_file)) + data.source[resource_type].update(json.load(input_file)) except json.decoder.JSONDecodeError: log.warning(f"invalid json in source resource file: {file}") @@ -58,10 +55,7 @@ def get(self, origin: Origin) -> StorageData: resource_type = file.split(".")[0] with open(self.destination_resources_path + f"/{file}", "r", encoding="utf-8") as input_file: try: - if not data.destination[resource_type]: - data.destination[resource_type] = json.load(input_file) - else: - data.destination[resource_type].update(json.load(input_file)) + data.destination[resource_type].update(json.load(input_file)) except json.decoder.JSONDecodeError: log.warning(f"invalid json in destination resource file: {file}") @@ -82,7 +76,7 @@ def write_resources_file(self, origin: Origin, data: StorageData) -> None: base_filename = f"{self.source_resources_path}/{resource_type}" if self.resource_per_file: for _id, resource in value.items(): - filename = f"{base_filename}.{_id}.json" + filename = f"{base_filename}.{_id.replace(':','.')}.json" # windows can't handle ":" with open(filename, "w+", encoding="utf-8") as out_file: json.dump({_id: resource}, out_file) else: @@ -95,7 +89,7 @@ def write_resources_file(self, origin: Origin, data: StorageData) -> None: base_filename = f"{self.destination_resources_path}/{resource_type}" if self.resource_per_file: for _id, resource in value.items(): - filename = f"{base_filename}.{_id}.json" + filename = f"{base_filename}.{_id.replace(':','.')}.json" # windows can't handle ":" with open(filename, "w+", encoding="utf-8") as out_file: json.dump({_id: resource}, out_file) else: From f5b89db292d2c865df07bc259fc8e77245b36ce5 Mon Sep 17 00:00:00 2001 From: "michael.richey" Date: Thu, 17 Apr 2025 11:35:26 -0400 Subject: [PATCH 5/6] black --- datadog_sync/utils/storage/local_file.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_sync/utils/storage/local_file.py b/datadog_sync/utils/storage/local_file.py index 5ee45c0f..09a98fc3 100644 --- a/datadog_sync/utils/storage/local_file.py +++ b/datadog_sync/utils/storage/local_file.py @@ -76,7 +76,7 @@ def write_resources_file(self, origin: Origin, data: StorageData) -> None: base_filename = f"{self.source_resources_path}/{resource_type}" if self.resource_per_file: for _id, resource in value.items(): - filename = f"{base_filename}.{_id.replace(':','.')}.json" # windows can't handle ":" + filename = f"{base_filename}.{_id.replace(':','.')}.json" # windows can't handle ":" with open(filename, "w+", encoding="utf-8") as out_file: json.dump({_id: resource}, out_file) else: @@ -89,7 +89,7 @@ def write_resources_file(self, origin: Origin, data: StorageData) -> None: base_filename = f"{self.destination_resources_path}/{resource_type}" if self.resource_per_file: for _id, resource in value.items(): - filename = f"{base_filename}.{_id.replace(':','.')}.json" # windows can't handle ":" + filename = f"{base_filename}.{_id.replace(':','.')}.json" # windows can't handle ":" with open(filename, "w+", encoding="utf-8") as out_file: json.dump({_id: resource}, out_file) else: From 630411900912d8b93eae8d2e09cdd754bc04f18d Mon Sep 17 00:00:00 2001 From: "michael.richey" Date: Thu, 17 Apr 2025 14:13:57 -0400 Subject: [PATCH 6/6] Fix anme->name --- datadog_sync/utils/storage/aws_s3_bucket.py | 2 +- datadog_sync/utils/storage/local_file.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_sync/utils/storage/aws_s3_bucket.py b/datadog_sync/utils/storage/aws_s3_bucket.py index b83d6724..87e1939d 100644 --- a/datadog_sync/utils/storage/aws_s3_bucket.py +++ b/datadog_sync/utils/storage/aws_s3_bucket.py @@ -36,7 +36,7 @@ def __init__( # resource_per_file is a boolean, when False we maintain the behavior of storing all the resources # by their resource type, so there is one file for all the monitors and another file for all the # dashboards. In that case files are named {resource_type}.json. When the boolean is True each resource - # will be in its own file. The file anme will be {resource_type}.{identifier}.json. + # will be in its own file. The file name will be {resource_type}.{identifier}.json. self.resource_per_file = resource_per_file if not config: raise ValueError("No S3 configuration passed in") diff --git a/datadog_sync/utils/storage/local_file.py b/datadog_sync/utils/storage/local_file.py index 09a98fc3..40fb463e 100644 --- a/datadog_sync/utils/storage/local_file.py +++ b/datadog_sync/utils/storage/local_file.py @@ -31,7 +31,7 @@ def __init__( # resource_per_file is a boolean, when False we maintain the behavior of storing all the resources # by their resource type, so there is one file for all the monitors and another file for all the # dashboards. In that case files are named {resource_type}.json. When the boolean is True each resource - # will be in its own file. The file anme will be {resource_type}.{identifier}.json. + # will be in its own file. The file name will be {resource_type}.{identifier}.json. self.resource_per_file = resource_per_file self.source_resources_path = source_resources_path self.destination_resources_path = destination_resources_path