Skip to content

Commit

Permalink
feat: modify change v_workspace_id fileter location (cloudforet-io#199)
Browse files Browse the repository at this point in the history
  • Loading branch information
ImMin5 committed Apr 26, 2024
1 parent ef0ca65 commit 3538727
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 63 deletions.
7 changes: 4 additions & 3 deletions src/spaceone/cost_analysis/manager/cost_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,6 @@ def analyze_costs(self, query, domain_id, target="SECONDARY_PREFERRED"):
_LOGGER.debug(f"[analyze_costs] query: {query}")

query = self._change_filter_project_group_id(query, domain_id)
query = self._change_filter_v_workspace_id(query, domain_id)
return self.cost_model.analyze(**query)

def analyze_monthly_costs(self, query, domain_id, target="SECONDARY_PREFERRED"):
Expand All @@ -147,7 +146,6 @@ def analyze_monthly_costs(self, query, domain_id, target="SECONDARY_PREFERRED"):
_LOGGER.debug(f"[analyze_monthly_costs] query: {query}")

query = self._change_filter_project_group_id(query, domain_id)
query = self._change_filter_v_workspace_id(query, domain_id)
response = self.monthly_cost_model.analyze(**query)
return response

Expand All @@ -158,7 +156,6 @@ def analyze_yearly_costs(self, query, domain_id, target="SECONDARY_PREFERRED"):
_LOGGER.debug(f"[analyze_yearly_costs] query: {query}")

query = self._change_filter_project_group_id(query, domain_id)
query = self._change_filter_v_workspace_id(query, domain_id)
return self.monthly_cost_model.analyze(**query)

@cache.cacheable(
Expand Down Expand Up @@ -203,6 +200,9 @@ def analyze_costs_by_granularity(
self._check_date_range(query)
granularity = query["granularity"]

# Change filter v_workspace_id to workspace_id
query = self._change_filter_v_workspace_id(query, domain_id)

# Save query history to speed up data loading
query_hash: str = utils.dict_to_hash(query)
self.create_cost_query_history(query, query_hash, domain_id, data_source_id)
Expand Down Expand Up @@ -513,6 +513,7 @@ def _change_response_workspace_group_by(
workspace_id = result.get("workspace_id")
if workspace_id in workspace_id_map:
result["workspace_id"] = workspace_id_map[workspace_id]
response["results"] = results
return response

def _get_workspace_id_from_v_workspace_id(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,8 +221,6 @@ def _get_data_source_account_vo(
]
}

_LOGGER.debug(f"[_get_data_source_account_vo] query: {query}")

data_source_account_vos, total_count = self.list_data_source_accounts(query)
data_source_account_vo = None
if total_count > 0:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,7 @@ class DataSourceAccount(MongoModel):
data_source_id = StringField(max_length=255, required=True)
name = StringField(max_length=255)
is_sync = BooleanField(default=False)
v_service_account_id = StringField(max_length=40, generate_id="v_sa", unique=True)
v_project_id = StringField(max_length=40, generate_id="v_project", unique=True)
v_workspace_id = StringField(
max_length=40, generate_id="v_workspace_id", unique=True
)
service_account_id = StringField(max_length=255, default=None, null=True)
project_id = StringField(max_length=255, default=None, null=True)
v_workspace_id = StringField(max_length=40, generate_id="v-workspace", unique=True)
workspace_id = StringField(max_length=40, default=None, null=True)
domain_id = StringField(max_length=40)
created_at = DateTimeField(auto_now_add=True)
Expand All @@ -26,8 +20,6 @@ class DataSourceAccount(MongoModel):
"updatable_fields": [
"name",
"is_sync",
"service_account_id",
"project_id",
"workspace_id",
"updated_at",
],
Expand Down
98 changes: 49 additions & 49 deletions src/spaceone/cost_analysis/service/job_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ def get_cost_data(self, params):
is_canceled = False

for costs_data in self.ds_plugin_mgr.get_cost_data(
options, secret_data, schema, task_options, domain_id
options, secret_data, schema, task_options, domain_id
):
results = costs_data.get("results", [])
for cost_data in results:
Expand Down Expand Up @@ -434,11 +434,11 @@ def create_cost_job(self, data_source_vo: DataSource, job_options):
return job_vo

def _list_secret_ids_from_secret_type(
self,
data_source_vo: DataSource,
secret_type: str,
workspace_id: str,
domain_id: str,
self,
data_source_vo: DataSource,
secret_type: str,
workspace_id: str,
domain_id: str,
):
secret_ids = []

Expand All @@ -459,7 +459,7 @@ def _list_secret_ids_from_secret_type(
return secret_ids

def _list_secret_ids_from_secret_filter(
self, secret_filter, provider: str, workspace_id: str, domain_id: str
self, secret_filter, provider: str, workspace_id: str, domain_id: str
):
secret_manager: SecretManager = self.locator.get_manager(SecretManager)

Expand All @@ -474,7 +474,7 @@ def _list_secret_ids_from_secret_filter(

@staticmethod
def _set_secret_filter(
secret_filter, provider: str, workspace_id: str, domain_id: str
secret_filter, provider: str, workspace_id: str, domain_id: str
):
_filter = [{"k": "domain_id", "v": domain_id, "o": "eq"}]

Expand All @@ -489,8 +489,8 @@ def _set_secret_filter(
{"k": "secret_id", "v": secret_filter["secrets"], "o": "in"}
)
if (
"service_accounts" in secret_filter
and secret_filter["service_accounts"]
"service_accounts" in secret_filter
and secret_filter["service_accounts"]
):
_filter.append(
{
Expand Down Expand Up @@ -586,10 +586,10 @@ def _create_cost_data(self, cost_data, job_task_vo, cost_options):
self.cost_mgr.create_cost(cost_data, execute_rollback=False)

def _is_job_failed(
self,
job_id: str,
domain_id: str,
workspace_id: str,
self,
job_id: str,
domain_id: str,
workspace_id: str,
):
job_vo: Job = self.job_mgr.get_job(job_id, domain_id, workspace_id)

Expand All @@ -599,12 +599,12 @@ def _is_job_failed(
return False

def _close_job(
self,
job_id: str,
data_source_id: str,
domain_id: str,
data_keys: list,
workspace_id: str = None,
self,
job_id: str,
data_source_id: str,
domain_id: str,
data_keys: list,
workspace_id: str = None,
) -> None:
job_vo: Job = self.job_mgr.get_job(job_id, domain_id, workspace_id)
no_preload_cache = job_vo.options.get("no_preload_cache", False)
Expand Down Expand Up @@ -754,7 +754,7 @@ def _delete_old_cost_data(self, data_source_id, domain_id):
monthly_cost_vos.delete()

def _delete_changed_cost_data(
self, job_vo: Job, start, end, change_filter, domain_id
self, job_vo: Job, start, end, change_filter, domain_id
):
query = {
"filter": [
Expand Down Expand Up @@ -800,7 +800,7 @@ def _aggregate_cost_data(self, job_vo: Job, data_keys: list):

for job_task_id in job_task_ids:
for billed_month in self._distinct_billed_month(
data_source_id, domain_id, job_id, job_task_id
data_source_id, domain_id, job_id, job_task_id
):
self._aggregate_monthly_cost_data(
data_source_id,
Expand Down Expand Up @@ -832,14 +832,14 @@ def _distinct_billed_month(self, data_source_id, domain_id, job_id, job_task_id)
return values

def _aggregate_monthly_cost_data(
self,
data_source_id: str,
domain_id: str,
job_id: str,
job_task_id: str,
billed_month: str,
data_keys: list,
workspace_id: str = None,
self,
data_source_id: str,
domain_id: str,
job_id: str,
job_task_id: str,
billed_month: str,
data_keys: list,
workspace_id: str = None,
):
query = {
"group_by": [
Expand Down Expand Up @@ -906,7 +906,7 @@ def _get_all_data_sources(self):
)

def _check_duplicate_job(
self, data_source_id: str, domain_id: str, this_job_vo: Job
self, data_source_id: str, domain_id: str, this_job_vo: Job
):
query = {
"filter": [
Expand Down Expand Up @@ -944,11 +944,11 @@ def _get_job_task_ids(self, job_id, domain_id):
return job_task_ids

def _get_data_source_account_map(
self,
data_source_id: str,
domain_id: str,
workspace_id: str,
resource_group: str,
self,
data_source_id: str,
domain_id: str,
workspace_id: str,
resource_group: str,
) -> Dict[str, DataSourceAccount]:
data_source_account_map = {}
conditions = {
Expand All @@ -970,11 +970,11 @@ def _get_data_source_account_map(
return data_source_account_map

def _get_linked_accounts_from_data_source_vo(
self,
data_source_vo: DataSource,
options: dict,
secret_data: dict,
schema: dict = None,
self,
data_source_vo: DataSource,
options: dict,
secret_data: dict,
schema: dict = None,
) -> list:
linked_accounts = []

Expand Down Expand Up @@ -1010,14 +1010,14 @@ def _get_linked_accounts_from_data_source_vo(
)
)

if data_source_account_vo.workspace_id:
linked_accounts.append(
{
"account_id": data_source_account_vo.account_id,
"name": data_source_account_vo.name,
"is_sync": data_source_account_vo.is_sync,
}
)
# if data_source_account_vo.workspace_id:
linked_accounts.append(
{
"account_id": data_source_account_vo.account_id,
"name": data_source_account_vo.name,
"is_sync": data_source_account_vo.is_sync,
}
)

_LOGGER.debug(
f"[_get_linked_accounts_from_data_source_vo] linked_accounts total count: {len(linked_accounts)} / {data_source_id}"
Expand Down

0 comments on commit 3538727

Please sign in to comment.