From 35387276d2ac53d6dc09637c5965f3fd7f51a649 Mon Sep 17 00:00:00 2001 From: ImMin5 Date: Fri, 26 Apr 2024 20:09:06 +0900 Subject: [PATCH] feat: modify change v_workspace_id fileter location (#199) --- .../cost_analysis/manager/cost_manager.py | 7 +- .../manager/data_source_account_manager.py | 2 - .../model/data_source_account/database.py | 10 +- .../cost_analysis/service/job_service.py | 98 +++++++++---------- 4 files changed, 54 insertions(+), 63 deletions(-) diff --git a/src/spaceone/cost_analysis/manager/cost_manager.py b/src/spaceone/cost_analysis/manager/cost_manager.py index 1594232..fc5339e 100644 --- a/src/spaceone/cost_analysis/manager/cost_manager.py +++ b/src/spaceone/cost_analysis/manager/cost_manager.py @@ -137,7 +137,6 @@ def analyze_costs(self, query, domain_id, target="SECONDARY_PREFERRED"): _LOGGER.debug(f"[analyze_costs] query: {query}") query = self._change_filter_project_group_id(query, domain_id) - query = self._change_filter_v_workspace_id(query, domain_id) return self.cost_model.analyze(**query) def analyze_monthly_costs(self, query, domain_id, target="SECONDARY_PREFERRED"): @@ -147,7 +146,6 @@ def analyze_monthly_costs(self, query, domain_id, target="SECONDARY_PREFERRED"): _LOGGER.debug(f"[analyze_monthly_costs] query: {query}") query = self._change_filter_project_group_id(query, domain_id) - query = self._change_filter_v_workspace_id(query, domain_id) response = self.monthly_cost_model.analyze(**query) return response @@ -158,7 +156,6 @@ def analyze_yearly_costs(self, query, domain_id, target="SECONDARY_PREFERRED"): _LOGGER.debug(f"[analyze_yearly_costs] query: {query}") query = self._change_filter_project_group_id(query, domain_id) - query = self._change_filter_v_workspace_id(query, domain_id) return self.monthly_cost_model.analyze(**query) @cache.cacheable( @@ -203,6 +200,9 @@ def analyze_costs_by_granularity( self._check_date_range(query) granularity = query["granularity"] + # Change filter v_workspace_id to workspace_id + query = self._change_filter_v_workspace_id(query, domain_id) + # Save query history to speed up data loading query_hash: str = utils.dict_to_hash(query) self.create_cost_query_history(query, query_hash, domain_id, data_source_id) @@ -513,6 +513,7 @@ def _change_response_workspace_group_by( workspace_id = result.get("workspace_id") if workspace_id in workspace_id_map: result["workspace_id"] = workspace_id_map[workspace_id] + response["results"] = results return response def _get_workspace_id_from_v_workspace_id( diff --git a/src/spaceone/cost_analysis/manager/data_source_account_manager.py b/src/spaceone/cost_analysis/manager/data_source_account_manager.py index 358de67..216dfdb 100644 --- a/src/spaceone/cost_analysis/manager/data_source_account_manager.py +++ b/src/spaceone/cost_analysis/manager/data_source_account_manager.py @@ -221,8 +221,6 @@ def _get_data_source_account_vo( ] } - _LOGGER.debug(f"[_get_data_source_account_vo] query: {query}") - data_source_account_vos, total_count = self.list_data_source_accounts(query) data_source_account_vo = None if total_count > 0: diff --git a/src/spaceone/cost_analysis/model/data_source_account/database.py b/src/spaceone/cost_analysis/model/data_source_account/database.py index b495765..c50f5d0 100644 --- a/src/spaceone/cost_analysis/model/data_source_account/database.py +++ b/src/spaceone/cost_analysis/model/data_source_account/database.py @@ -10,13 +10,7 @@ class DataSourceAccount(MongoModel): data_source_id = StringField(max_length=255, required=True) name = StringField(max_length=255) is_sync = BooleanField(default=False) - v_service_account_id = StringField(max_length=40, generate_id="v_sa", unique=True) - v_project_id = StringField(max_length=40, generate_id="v_project", unique=True) - v_workspace_id = StringField( - max_length=40, generate_id="v_workspace_id", unique=True - ) - service_account_id = StringField(max_length=255, default=None, null=True) - project_id = StringField(max_length=255, default=None, null=True) + v_workspace_id = StringField(max_length=40, generate_id="v-workspace", unique=True) workspace_id = StringField(max_length=40, default=None, null=True) domain_id = StringField(max_length=40) created_at = DateTimeField(auto_now_add=True) @@ -26,8 +20,6 @@ class DataSourceAccount(MongoModel): "updatable_fields": [ "name", "is_sync", - "service_account_id", - "project_id", "workspace_id", "updated_at", ], diff --git a/src/spaceone/cost_analysis/service/job_service.py b/src/spaceone/cost_analysis/service/job_service.py index d901498..316d991 100644 --- a/src/spaceone/cost_analysis/service/job_service.py +++ b/src/spaceone/cost_analysis/service/job_service.py @@ -260,7 +260,7 @@ def get_cost_data(self, params): is_canceled = False for costs_data in self.ds_plugin_mgr.get_cost_data( - options, secret_data, schema, task_options, domain_id + options, secret_data, schema, task_options, domain_id ): results = costs_data.get("results", []) for cost_data in results: @@ -434,11 +434,11 @@ def create_cost_job(self, data_source_vo: DataSource, job_options): return job_vo def _list_secret_ids_from_secret_type( - self, - data_source_vo: DataSource, - secret_type: str, - workspace_id: str, - domain_id: str, + self, + data_source_vo: DataSource, + secret_type: str, + workspace_id: str, + domain_id: str, ): secret_ids = [] @@ -459,7 +459,7 @@ def _list_secret_ids_from_secret_type( return secret_ids def _list_secret_ids_from_secret_filter( - self, secret_filter, provider: str, workspace_id: str, domain_id: str + self, secret_filter, provider: str, workspace_id: str, domain_id: str ): secret_manager: SecretManager = self.locator.get_manager(SecretManager) @@ -474,7 +474,7 @@ def _list_secret_ids_from_secret_filter( @staticmethod def _set_secret_filter( - secret_filter, provider: str, workspace_id: str, domain_id: str + secret_filter, provider: str, workspace_id: str, domain_id: str ): _filter = [{"k": "domain_id", "v": domain_id, "o": "eq"}] @@ -489,8 +489,8 @@ def _set_secret_filter( {"k": "secret_id", "v": secret_filter["secrets"], "o": "in"} ) if ( - "service_accounts" in secret_filter - and secret_filter["service_accounts"] + "service_accounts" in secret_filter + and secret_filter["service_accounts"] ): _filter.append( { @@ -586,10 +586,10 @@ def _create_cost_data(self, cost_data, job_task_vo, cost_options): self.cost_mgr.create_cost(cost_data, execute_rollback=False) def _is_job_failed( - self, - job_id: str, - domain_id: str, - workspace_id: str, + self, + job_id: str, + domain_id: str, + workspace_id: str, ): job_vo: Job = self.job_mgr.get_job(job_id, domain_id, workspace_id) @@ -599,12 +599,12 @@ def _is_job_failed( return False def _close_job( - self, - job_id: str, - data_source_id: str, - domain_id: str, - data_keys: list, - workspace_id: str = None, + self, + job_id: str, + data_source_id: str, + domain_id: str, + data_keys: list, + workspace_id: str = None, ) -> None: job_vo: Job = self.job_mgr.get_job(job_id, domain_id, workspace_id) no_preload_cache = job_vo.options.get("no_preload_cache", False) @@ -754,7 +754,7 @@ def _delete_old_cost_data(self, data_source_id, domain_id): monthly_cost_vos.delete() def _delete_changed_cost_data( - self, job_vo: Job, start, end, change_filter, domain_id + self, job_vo: Job, start, end, change_filter, domain_id ): query = { "filter": [ @@ -800,7 +800,7 @@ def _aggregate_cost_data(self, job_vo: Job, data_keys: list): for job_task_id in job_task_ids: for billed_month in self._distinct_billed_month( - data_source_id, domain_id, job_id, job_task_id + data_source_id, domain_id, job_id, job_task_id ): self._aggregate_monthly_cost_data( data_source_id, @@ -832,14 +832,14 @@ def _distinct_billed_month(self, data_source_id, domain_id, job_id, job_task_id) return values def _aggregate_monthly_cost_data( - self, - data_source_id: str, - domain_id: str, - job_id: str, - job_task_id: str, - billed_month: str, - data_keys: list, - workspace_id: str = None, + self, + data_source_id: str, + domain_id: str, + job_id: str, + job_task_id: str, + billed_month: str, + data_keys: list, + workspace_id: str = None, ): query = { "group_by": [ @@ -906,7 +906,7 @@ def _get_all_data_sources(self): ) def _check_duplicate_job( - self, data_source_id: str, domain_id: str, this_job_vo: Job + self, data_source_id: str, domain_id: str, this_job_vo: Job ): query = { "filter": [ @@ -944,11 +944,11 @@ def _get_job_task_ids(self, job_id, domain_id): return job_task_ids def _get_data_source_account_map( - self, - data_source_id: str, - domain_id: str, - workspace_id: str, - resource_group: str, + self, + data_source_id: str, + domain_id: str, + workspace_id: str, + resource_group: str, ) -> Dict[str, DataSourceAccount]: data_source_account_map = {} conditions = { @@ -970,11 +970,11 @@ def _get_data_source_account_map( return data_source_account_map def _get_linked_accounts_from_data_source_vo( - self, - data_source_vo: DataSource, - options: dict, - secret_data: dict, - schema: dict = None, + self, + data_source_vo: DataSource, + options: dict, + secret_data: dict, + schema: dict = None, ) -> list: linked_accounts = [] @@ -1010,14 +1010,14 @@ def _get_linked_accounts_from_data_source_vo( ) ) - if data_source_account_vo.workspace_id: - linked_accounts.append( - { - "account_id": data_source_account_vo.account_id, - "name": data_source_account_vo.name, - "is_sync": data_source_account_vo.is_sync, - } - ) + # if data_source_account_vo.workspace_id: + linked_accounts.append( + { + "account_id": data_source_account_vo.account_id, + "name": data_source_account_vo.name, + "is_sync": data_source_account_vo.is_sync, + } + ) _LOGGER.debug( f"[_get_linked_accounts_from_data_source_vo] linked_accounts total count: {len(linked_accounts)} / {data_source_id}"