Skip to content

Commit

Permalink
check only env var for provider type and move code to separate function
Browse files Browse the repository at this point in the history
  • Loading branch information
cgoodfred committed Dec 13, 2023
1 parent f66ce15 commit a9e27af
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 33 deletions.
65 changes: 35 additions & 30 deletions koku/subs/subs_data_messenger.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,36 +98,7 @@ def process_and_send_subs_message(self, upload_keys):
msg_count = 0
for row in reader:
if self.provider_type == Provider.PROVIDER_AZURE:
# Azure can unexplicably generate strange records with a second entry per day
# so we track the resource ids we've seen for a specific day so we don't send a record twice
if self.date_map.get(row["subs_start_time"]) and row["subs_resource_id"] in self.date_map.get(
row["subs_start_time"]
):
continue
self.date_map[row["subs_start_time"]].append(row["subs_resource_id"])
instance_id = self.determine_azure_instance_id(row)
if not instance_id:
continue
# Azure is daily records but subs need hourly records
start = parser.parse(row["subs_start_time"])
LOG.info(f"start\n{start}\n{type(start)}")
for i in range(int(row["subs_usage_quantity"])):
end = start + timedelta(hours=1)
msg = self.build_subs_msg(
instance_id,
row["subs_account"],
str(start),
str(end),
row["subs_vcpu"],
row["subs_sla"],
row["subs_usage"],
row["subs_role"],
row["subs_product_ids"].split("-"),
)
# move to the next hour in the range
start = end
self.send_kafka_message(msg)
msg_count += 1
msg_count += self.process_azure_row(row)
else:
# row["subs_product_ids"] is a string of numbers separated by '-' to be sent as a list
msg = self.build_subs_msg(
Expand Down Expand Up @@ -184,3 +155,37 @@ def build_subs_msg(
"billing_account_id": billing_account_id,
}
return bytes(json.dumps(subs_json), "utf-8")

def process_azure_row(self, row):
"""Process an Azure row into subs kafka messages."""
msg_count = 0
# Azure can unexplicably generate strange records with a second entry per day
# so we track the resource ids we've seen for a specific day so we don't send a record twice
if self.date_map.get(row["subs_start_time"]) and row["subs_resource_id"] in self.date_map.get(
row["subs_start_time"]
):
return msg_count
self.date_map[row["subs_start_time"]].append(row["subs_resource_id"])
instance_id = self.determine_azure_instance_id(row)
if not instance_id:
return msg_count

Check warning on line 171 in koku/subs/subs_data_messenger.py

View check run for this annotation

Codecov / codecov/patch

koku/subs/subs_data_messenger.py#L171

Added line #L171 was not covered by tests
# Azure is daily records but subs need hourly records
start = parser.parse(row["subs_start_time"])
for i in range(int(row["subs_usage_quantity"])):
end = start + timedelta(hours=1)
msg = self.build_subs_msg(
instance_id,
row["subs_account"],
str(start),
str(end),
row["subs_vcpu"],
row["subs_sla"],
row["subs_usage"],
row["subs_role"],
row["subs_product_ids"].split("-"),
)
# move to the next hour in the range
start = end
self.send_kafka_message(msg)
msg_count += 1
return msg_count
4 changes: 1 addition & 3 deletions koku/subs/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,7 @@ def extract_subs_data_from_reports(reports_to_extract, metered):
tracing_id = report.get("tracing_id", report.get("manifest_uuid", str(uuid.uuid4())))
context = {"schema": schema_name, "provider_type": provider_type, "provider_uuid": provider_uuid}
# SUBS provider type enablement is handled through the ENABLE_SUBS_PROVIDER_TYPES environment variable
if (provider_type.rstrip("-local") not in settings.ENABLE_SUBS_PROVIDER_TYPES) and (
not settings.ENABLE_SUBS_DEBUG
):
if provider_type.rstrip("-local") not in settings.ENABLE_SUBS_PROVIDER_TYPES:
LOG.info(log_json(tracing_id, msg="provider type not valid for subs processing", context=context))
continue
if not enable_subs_extraction(schema_name, metered):
Expand Down

0 comments on commit a9e27af

Please sign in to comment.