Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions datadog_sync/commands/shared/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,16 @@ def click_config_file_provider(ctx: Context, opts: CustomOptionClass, value: Non
help="Enables sync-cli metrics being sent to both source and destination",
cls=CustomOptionClass,
),
option(
"--resource-per-file",
required=False,
is_flag=True,
default=False,
show_default=True,
help="By default resource files contain many resources of the same resource type, setting this flag to true "
"will create a resource file for each individual resource.",
cls=CustomOptionClass,
),
]

_storage_options = [
Expand Down
1 change: 1 addition & 0 deletions datadog_sync/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
SOURCE_PATH_DEFAULT = "resources/source"
DESTINATION_PATH_PARAM = "destination_resources_path"
DESTINATION_PATH_DEFAULT = "resources/destination"
RESOURCE_PER_FILE = "resource_per_file"


# Commands
Expand Down
12 changes: 10 additions & 2 deletions datadog_sync/utils/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
FORCE,
LOCAL_STORAGE_TYPE,
LOGGER_NAME,
RESOURCE_PER_FILE,
S3_STORAGE_TYPE,
SOURCE_PATH_DEFAULT,
SOURCE_PATH_PARAM,
Expand Down Expand Up @@ -177,8 +178,13 @@ def build_config(cmd: Command, **kwargs: Optional[Any]) -> Configuration:
if storage_type == S3_STORAGE_TYPE:
logger.info("Using AWS S3 to store state files")
storage_type = StorageType.AWS_S3_BUCKET
source_resources_path = kwargs.get("aws_bucket_key_prefix_source", SOURCE_PATH_DEFAULT)
destination_resources_path = kwargs.get("aws_bucket_key_prefix_destination", DESTINATION_PATH_DEFAULT)

local_source_resources_path = kwargs.get(SOURCE_PATH_PARAM, SOURCE_PATH_DEFAULT)
source_resources_path = kwargs.get("aws_bucket_key_prefix_source", local_source_resources_path)

local_destination_resources_path = kwargs.get(DESTINATION_PATH_PARAM, DESTINATION_PATH_DEFAULT)
destination_resources_path = kwargs.get("aws_bucket_key_prefix_destination", local_destination_resources_path)

for aws_config_property in AWS_CONFIG_PROPERTIES:
property_value = kwargs.get(aws_config_property, None)
if not property_value:
Expand All @@ -200,12 +206,14 @@ def build_config(cmd: Command, **kwargs: Optional[Any]) -> Configuration:
source_client = CustomClient(destination_api_url, destination_auth, retry_timeout, timeout, send_metrics)
source_resources_path = f"{destination_resources_path}/.backup/{str(time.time())}"

resource_per_file = kwargs.get(RESOURCE_PER_FILE, False)
# Initialize state
state = State(
type_=storage_type,
source_resources_path=source_resources_path,
destination_resources_path=destination_resources_path,
config=config,
resource_per_file=resource_per_file,
)

# Initialize Configuration
Expand Down
5 changes: 5 additions & 0 deletions datadog_sync/utils/state.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
AWS_BUCKET_KEY_PREFIX_SOURCE,
DESTINATION_PATH_DEFAULT,
DESTINATION_PATH_PARAM,
RESOURCE_PER_FILE,
SOURCE_PATH_DEFAULT,
SOURCE_PATH_PARAM,
)
Expand All @@ -21,12 +22,15 @@

class State:
def __init__(self, type_: StorageType = StorageType.LOCAL_FILE, **kwargs: object) -> None:
resource_per_file = kwargs.get(RESOURCE_PER_FILE, False)

if type_ == StorageType.LOCAL_FILE:
source_resources_path = kwargs.get(SOURCE_PATH_PARAM, SOURCE_PATH_DEFAULT)
destination_resources_path = kwargs.get(DESTINATION_PATH_PARAM, DESTINATION_PATH_DEFAULT)
self._storage: BaseStorage = LocalFile(
source_resources_path=source_resources_path,
destination_resources_path=destination_resources_path,
resource_per_file=resource_per_file,
)
elif type_ == StorageType.AWS_S3_BUCKET:
source_resources_path = kwargs.get(AWS_BUCKET_KEY_PREFIX_SOURCE, SOURCE_PATH_DEFAULT)
Expand All @@ -38,6 +42,7 @@ def __init__(self, type_: StorageType = StorageType.LOCAL_FILE, **kwargs: object
source_resources_path=source_resources_path,
destination_resources_path=destination_resources_path,
config=config,
resource_per_file=resource_per_file,
)
else:
raise NotImplementedError(f"Storage type {type_} not implemented")
Expand Down
58 changes: 44 additions & 14 deletions datadog_sync/utils/storage/aws_s3_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,18 @@ def __init__(
self,
source_resources_path=SOURCE_PATH_DEFAULT,
destination_resources_path=DESTINATION_PATH_DEFAULT,
resource_per_file=False,
config=None,
) -> None:
log.info("AWS S3 init called")
super().__init__()
self.source_resources_path = source_resources_path
self.destination_resources_path = destination_resources_path
# resource_per_file is a boolean, when False we maintain the behavior of storing all the resources
# by their resource type, so there is one file for all the monitors and another file for all the
# dashboards. In that case files are named {resource_type}.json. When the boolean is True each resource
# will be in its own file. The file name will be {resource_type}.{identifier}.json.
self.resource_per_file = resource_per_file
if not config:
raise ValueError("No S3 configuration passed in")
self.client = boto3.client(
Expand Down Expand Up @@ -60,7 +66,7 @@ def get(self, origin: Origin) -> StorageData:
)
content_body = response.get("Body")
try:
data.source[resource_type] = json.load(content_body)
data.source[resource_type].update(json.load(content_body))
except json.decoder.JSONDecodeError:
log.warning(f"invalid json in aws source resource file: {resource_type}")

Expand All @@ -77,7 +83,7 @@ def get(self, origin: Origin) -> StorageData:
)
content_body = response.get("Body")
try:
data.destination[resource_type] = json.load(content_body)
data.destination[resource_type].update(json.load(content_body))
except json.decoder.JSONDecodeError:
log.warning(f"invalid json in aws destination resource file: {resource_type}")

Expand All @@ -87,18 +93,42 @@ def put(self, origin: Origin, data: StorageData) -> None:
log.info("AWS S3 put called")
if origin in [Origin.SOURCE, Origin.ALL]:
for resource_type, resource_data in data.source.items():
binary_data = bytes(json.dumps(resource_data), "UTF-8")
self.client.put_object(
Body=binary_data,
Bucket=self.bucket_name,
Key=f"{self.source_resources_path}/{resource_type}.json",
)
base_key = f"{self.source_resources_path}/{resource_type}"
if self.resource_per_file:
for _id, resource in resource_data.items():
key = f"{base_key}.{_id}.json"
binary_data = bytes(json.dumps({_id: resource}), "UTF-8")
self.client.put_object(
Body=binary_data,
Bucket=self.bucket_name,
Key=key,
)
else:
key = f"{base_key}.json"
binary_data = bytes(json.dumps(resource_data), "UTF-8")
self.client.put_object(
Body=binary_data,
Bucket=self.bucket_name,
Key=key,
)

if origin in [Origin.DESTINATION, Origin.ALL]:
for resource_type, resource_data in data.destination.items():
binary_data = bytes(json.dumps(resource_data), "UTF-8")
self.client.put_object(
Body=binary_data,
Bucket=self.bucket_name,
Key=f"{self.destination_resources_path}/{resource_type}.json",
)
base_key = f"{self.destination_resources_path}/{resource_type}"
if self.resource_per_file:
for _id, resource in resource_data.items():
key = f"{base_key}.{_id}.json"
binary_data = bytes(json.dumps({_id: resource}), "UTF-8")
self.client.put_object(
Body=binary_data,
Bucket=self.bucket_name,
Key=key,
)
else:
key = f"{base_key}.json"
binary_data = bytes(json.dumps(resource_data), "UTF-8")
self.client.put_object(
Body=binary_data,
Bucket=self.bucket_name,
Key=key,
)
50 changes: 37 additions & 13 deletions datadog_sync/utils/storage/local_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,17 @@
class LocalFile(BaseStorage):

def __init__(
self, source_resources_path=SOURCE_PATH_DEFAULT, destination_resources_path=DESTINATION_PATH_DEFAULT
self,
source_resources_path=SOURCE_PATH_DEFAULT,
destination_resources_path=DESTINATION_PATH_DEFAULT,
resource_per_file=False,
) -> None:
super().__init__()
# resource_per_file is a boolean, when False we maintain the behavior of storing all the resources
# by their resource type, so there is one file for all the monitors and another file for all the
# dashboards. In that case files are named {resource_type}.json. When the boolean is True each resource
# will be in its own file. The file name will be {resource_type}.{identifier}.json.
self.resource_per_file = resource_per_file
self.source_resources_path = source_resources_path
self.destination_resources_path = destination_resources_path

Expand All @@ -35,21 +43,21 @@ def get(self, origin: Origin) -> StorageData:
for file in os.listdir(self.source_resources_path):
if file.endswith(".json"):
resource_type = file.split(".")[0]
with open(self.source_resources_path + f"/{file}") as f:
with open(self.source_resources_path + f"/{file}", "r", encoding="utf-8") as input_file:
try:
data.source[resource_type] = json.load(f)
data.source[resource_type].update(json.load(input_file))
except json.decoder.JSONDecodeError:
log.warning(f"invalid json in source resource file: {resource_type}")
log.warning(f"invalid json in source resource file: {file}")

if origin in [Origin.DESTINATION, Origin.ALL] and os.path.exists(self.destination_resources_path):
for file in os.listdir(self.destination_resources_path):
if file.endswith(".json"):
resource_type = file.split(".")[0]
with open(self.destination_resources_path + f"/{file}") as f:
with open(self.destination_resources_path + f"/{file}", "r", encoding="utf-8") as input_file:
try:
data.destination[resource_type] = json.load(f)
data.destination[resource_type].update(json.load(input_file))
except json.decoder.JSONDecodeError:
log.warning(f"invalid json in destination resource file: {resource_type}")
log.warning(f"invalid json in destination resource file: {file}")

return data

Expand All @@ -64,11 +72,27 @@ def put(self, origin: Origin, data: StorageData) -> None:

def write_resources_file(self, origin: Origin, data: StorageData) -> None:
if origin in [Origin.SOURCE, Origin.ALL]:
for resource_type, v in data.source.items():
with open(self.source_resources_path + f"/{resource_type}.json", "w+") as f:
json.dump(v, f)
for resource_type, value in data.source.items():
base_filename = f"{self.source_resources_path}/{resource_type}"
if self.resource_per_file:
for _id, resource in value.items():
filename = f"{base_filename}.{_id.replace(':','.')}.json" # windows can't handle ":"
with open(filename, "w+", encoding="utf-8") as out_file:
json.dump({_id: resource}, out_file)
else:
filename = f"{base_filename}.json"
with open(filename, "w+", encoding="utf-8") as out_file:
json.dump(value, out_file)

if origin in [Origin.DESTINATION, Origin.ALL]:
for resource_type, v in data.destination.items():
with open(self.destination_resources_path + f"/{resource_type}.json", "w+") as f:
json.dump(v, f)
for resource_type, value in data.destination.items():
base_filename = f"{self.destination_resources_path}/{resource_type}"
if self.resource_per_file:
for _id, resource in value.items():
filename = f"{base_filename}.{_id.replace(':','.')}.json" # windows can't handle ":"
with open(filename, "w+", encoding="utf-8") as out_file:
json.dump({_id: resource}, out_file)
else:
filename = f"{base_filename}.json"
with open(filename, "w+", encoding="utf-8") as out_file:
json.dump(value, out_file)