diff --git a/tencentcloud/data_source_tc_dbbrain_security_audit_log_export_tasks.go b/tencentcloud/data_source_tc_dbbrain_security_audit_log_export_tasks.go new file mode 100644 index 0000000000..6e9348a4c6 --- /dev/null +++ b/tencentcloud/data_source_tc_dbbrain_security_audit_log_export_tasks.go @@ -0,0 +1,227 @@ +/* +Use this data source to query detailed information of dbbrain securityAuditLogExportTasks + +Example Usage + +```hcl +resource "tencentcloud_dbbrain_security_audit_log_export_task" "task" { + sec_audit_group_id = "sec_audit_group_id" + start_time = "start_time" + end_time = "end_time" + product = "mysql" + danger_levels = [0,1,2] +} + +data "tencentcloud_dbbrain_security_audit_log_export_tasks" "tasks" { + sec_audit_group_id = "sec_audit_group_id" + product = "mysql" + async_request_ids = [tencentcloud_dbbrain_security_audit_log_export_task.task.async_request_id] +} +``` +*/ +package tencentcloud + +import ( + "context" + "log" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + dbbrain "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/dbbrain/v20210527" + "github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/internal/helper" +) + +func dataSourceTencentCloudDbbrainSecurityAuditLogExportTasks() *schema.Resource { + return &schema.Resource{ + Read: dataSourceTencentCloudDbbrainSecurityAuditLogExportTasksRead, + Schema: map[string]*schema.Schema{ + "sec_audit_group_id": { + Type: schema.TypeString, + Required: true, + Description: "security audit group id.", + }, + + "product": { + Type: schema.TypeString, + Required: true, + Description: "product, optional value is mysql.", + }, + + "async_request_ids": { + Type: schema.TypeSet, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + Optional: true, + Description: "async request id list.", + }, + + "list": { + Type: schema.TypeList, + Computed: true, + Description: "security audit log export task list.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "async_request_id": { + Type: schema.TypeInt, + Computed: true, + Description: "async request id.", + }, + "start_time": { + Type: schema.TypeString, + Computed: true, + Description: "start time.", + }, + "end_time": { + Type: schema.TypeString, + Computed: true, + Description: "end time.", + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: "create time.", + }, + "status": { + Type: schema.TypeString, + Computed: true, + Description: "status.", + }, + "progress": { + Type: schema.TypeInt, + Computed: true, + Description: "task progress.", + }, + "log_start_time": { + Type: schema.TypeString, + Computed: true, + Description: "log start time.", + }, + "log_end_time": { + Type: schema.TypeString, + Computed: true, + Description: "log end time.", + }, + "total_size": { + Type: schema.TypeInt, + Computed: true, + Description: "the total size of log.", + }, + "danger_levels": { + Type: schema.TypeSet, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + Computed: true, + Description: "danger level list.", + }, + }, + }, + }, + + "result_output_file": { + Type: schema.TypeString, + Optional: true, + Description: "Used to save results.", + }, + }, + } +} + +func dataSourceTencentCloudDbbrainSecurityAuditLogExportTasksRead(d *schema.ResourceData, meta interface{}) error { + defer logElapsed("data_source.tencentcloud_dbbrain_security_audit_log_export_tasks.read")() + defer inconsistentCheck(d, meta)() + + logId := getLogId(contextNil) + ctx := context.WithValue(context.TODO(), logIdKey, logId) + var sag_id string + + paramMap := make(map[string]interface{}) + if v, ok := d.GetOk("sec_audit_group_id"); ok { + paramMap["sec_audit_group_id"] = helper.String(v.(string)) + sag_id = v.(string) + } + + if v, ok := d.GetOk("product"); ok { + paramMap["product"] = helper.String(v.(string)) + } + + if v, ok := d.GetOk("async_request_ids"); ok { + async_request_idSet := v.(*schema.Set).List() + tmpList := make([]*uint64, 0, len(async_request_idSet)) + for i := range async_request_idSet { + async_request_id := async_request_idSet[i].(int) + tmpList = append(tmpList, helper.IntUint64(async_request_id)) + } + paramMap["async_request_ids"] = tmpList + } + + dbbrainService := DbbrainService{client: meta.(*TencentCloudClient).apiV3Conn} + + var tasks []*dbbrain.SecLogExportTaskInfo + err := resource.Retry(readRetryTimeout, func() *resource.RetryError { + results, e := dbbrainService.DescribeDbbrainSecurityAuditLogExportTasksByFilter(ctx, paramMap) + if e != nil { + return retryError(e) + } + tasks = results + return nil + }) + if err != nil { + log.Printf("[CRITAL]%s read Dbbrain tasks failed, reason:%+v", logId, err) + return err + } + + ids := make([]string, 0, len(tasks)) + taskList := make([]map[string]interface{}, 0, len(tasks)) + + if tasks != nil { + + for _, task := range tasks { + taskMap := map[string]interface{}{} + if task.AsyncRequestId != nil { + taskMap["async_request_id"] = task.AsyncRequestId + } + if task.StartTime != nil { + taskMap["start_time"] = task.StartTime + } + if task.EndTime != nil { + taskMap["end_time"] = task.EndTime + } + if task.CreateTime != nil { + taskMap["create_time"] = task.CreateTime + } + if task.Status != nil { + taskMap["status"] = task.Status + } + if task.Progress != nil { + taskMap["progress"] = task.Progress + } + if task.LogStartTime != nil { + taskMap["log_start_time"] = task.LogStartTime + } + if task.LogEndTime != nil { + taskMap["log_end_time"] = task.LogEndTime + } + if task.TotalSize != nil { + taskMap["total_size"] = task.TotalSize + } + if task.DangerLevels != nil { + taskMap["danger_levels"] = task.DangerLevels + } + ids = append(ids, sag_id+FILED_SP+helper.UInt64ToStr(*task.AsyncRequestId)) + taskList = append(taskList, taskMap) + } + d.SetId(helper.DataResourceIdsHash(ids)) + _ = d.Set("list", taskList) + } + + output, ok := d.GetOk("result_output_file") + if ok && output.(string) != "" { + if e := writeToFile(output.(string), taskList); e != nil { + return e + } + } + + return nil +} diff --git a/tencentcloud/data_source_tc_dbbrain_security_audit_log_export_tasks_test.go b/tencentcloud/data_source_tc_dbbrain_security_audit_log_export_tasks_test.go new file mode 100644 index 0000000000..a057d5be7f --- /dev/null +++ b/tencentcloud/data_source_tc_dbbrain_security_audit_log_export_tasks_test.go @@ -0,0 +1,50 @@ +package tencentcloud + +import ( + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" +) + +func TestAccTencentCloudDbbrainSecurityAuditLogExportTasksDataSource(t *testing.T) { + t.Parallel() + loc, _ := time.LoadLocation("Asia/Chongqing") + startTime := time.Now().Add(-2 * time.Hour).In(loc).Format("2006-01-02T15:04:05+08:00") + endTime := time.Now().Add(2 * time.Hour).In(loc).Format("2006-01-02T15:04:05+08:00") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDbbrainSecurityAuditLogExportTasks(startTime, endTime), + Check: resource.ComposeTestCheckFunc( + testAccCheckTencentCloudDataSourceID("data.tencentcloud_dbbrain_security_audit_log_export_tasks.tasks"), + resource.TestCheckResourceAttrSet("data.tencentcloud_dbbrain_security_audit_log_export_tasks.tasks", "list.#"), + ), + }, + }, + }) +} + +func testAccDataSourceDbbrainSecurityAuditLogExportTasks(st, et string) string { + return fmt.Sprintf(` + +resource "tencentcloud_dbbrain_security_audit_log_export_task" "task" { + sec_audit_group_id = "%s" + start_time = "%s" + end_time = "%s" + product = "mysql" + danger_levels = [0,1,2] +} + +data "tencentcloud_dbbrain_security_audit_log_export_tasks" "tasks" { + sec_audit_group_id = "%s" + product = "mysql" + async_request_ids = [tencentcloud_dbbrain_security_audit_log_export_task.task.async_request_id] +} + +`, defaultDbBrainsagId, st, et, defaultDbBrainsagId) +} diff --git a/tencentcloud/data_source_tc_dbbrain_sql_filters.go b/tencentcloud/data_source_tc_dbbrain_sql_filters.go new file mode 100644 index 0000000000..bc2eba419d --- /dev/null +++ b/tencentcloud/data_source_tc_dbbrain_sql_filters.go @@ -0,0 +1,241 @@ +/* +Use this data source to query detailed information of dbbrain sqlFilters + +Example Usage + +```hcl +resource "tencentcloud_dbbrain_sql_filter" "sql_filter" { + instance_id = "mysql_ins_id" + session_token { + user = "user" + password = "password" + } + sql_type = "SELECT" + filter_key = "test" + max_concurrency = 10 + duration = 3600 +} + +data "tencentcloud_dbbrain_sql_filters" "sql_filters" { + instance_id = "mysql_ins_id" + filter_ids = [tencentcloud_dbbrain_sql_filter.sql_filter.filter_id] + } +``` +*/ +package tencentcloud + +import ( + "context" + "log" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + dbbrain "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/dbbrain/v20210527" + "github.com/tencentcloudstack/terraform-provider-tencentcloud/tencentcloud/internal/helper" +) + +func dataSourceTencentCloudDbbrainSqlFilters() *schema.Resource { + return &schema.Resource{ + Read: dataSourceTencentCloudDbbrainSqlFiltersRead, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + Description: "instance id.", + }, + + "filter_ids": { + Type: schema.TypeSet, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + Optional: true, + Description: "filter id list.", + }, + + "statuses": { + Type: schema.TypeSet, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Optional: true, + Description: "status list.", + }, + + "list": { + Type: schema.TypeList, + Computed: true, + Description: "sql filter list.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "id": { + Type: schema.TypeInt, + Computed: true, + Description: "task id.", + }, + "status": { + Type: schema.TypeString, + Computed: true, + Description: "task status, optional value is RUNNING, FINISHED, TERMINATED.", + }, + "sql_type": { + Type: schema.TypeString, + Computed: true, + Description: "sql type, optional value is SELECT, UPDATE, DELETE, INSERT, REPLACE.", + }, + "origin_keys": { + Type: schema.TypeString, + Computed: true, + Description: "origin keys.", + }, + "origin_rule": { + Type: schema.TypeString, + Computed: true, + Description: "origin rule.", + }, + "rejected_sql_count": { + Type: schema.TypeInt, + Computed: true, + Description: "rejected sql count.", + }, + "current_concurrency": { + Type: schema.TypeInt, + Computed: true, + Description: "current concurrency.", + }, + "max_concurrency": { + Type: schema.TypeInt, + Computed: true, + Description: "maxmum concurrency.", + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: "create time.", + }, + "current_time": { + Type: schema.TypeString, + Computed: true, + Description: "current time.", + }, + "expire_time": { + Type: schema.TypeString, + Computed: true, + Description: "expire time.", + }, + }, + }, + }, + + "result_output_file": { + Type: schema.TypeString, + Optional: true, + Description: "Used to save results.", + }, + }, + } +} + +func dataSourceTencentCloudDbbrainSqlFiltersRead(d *schema.ResourceData, meta interface{}) error { + defer logElapsed("data_source.tencentcloud_dbbrain_sql_filters.read")() + defer inconsistentCheck(d, meta)() + + logId := getLogId(contextNil) + ctx := context.WithValue(context.TODO(), logIdKey, logId) + + paramMap := make(map[string]interface{}) + if v, ok := d.GetOk("instance_id"); ok { + paramMap["instance_id"] = helper.String(v.(string)) + } + + if v, ok := d.GetOk("filter_ids"); ok { + filter_idSet := v.(*schema.Set).List() + tmpList := make([]*int64, 0, len(filter_idSet)) + for i := range filter_idSet { + filter_id := filter_idSet[i].(int) + tmpList = append(tmpList, helper.IntInt64(filter_id)) + } + paramMap["filter_ids"] = tmpList + } + + if v, ok := d.GetOk("statuses"); ok { + statuseSet := v.(*schema.Set).List() + tmpList := make([]*string, 0, len(statuseSet)) + for i := range statuseSet { + status := statuseSet[i].(string) + tmpList = append(tmpList, helper.String(status)) + } + paramMap["statuses"] = tmpList + } + + dbbrainService := DbbrainService{client: meta.(*TencentCloudClient).apiV3Conn} + + var items []*dbbrain.SQLFilter + err := resource.Retry(readRetryTimeout, func() *resource.RetryError { + results, e := dbbrainService.DescribeDbbrainSqlFiltersByFilter(ctx, paramMap) + if e != nil { + return retryError(e) + } + items = results + return nil + }) + if err != nil { + log.Printf("[CRITAL]%s read Dbbrain items failed, reason:%+v", logId, err) + return err + } + + ids := make([]string, 0, len(items)) + itemList := make([]map[string]interface{}, 0, len(items)) + + if items != nil { + for _, item := range items { + itemMap := map[string]interface{}{} + if item.Id != nil { + itemMap["id"] = item.Id + } + if item.Status != nil { + itemMap["status"] = item.Status + } + if item.SqlType != nil { + itemMap["sql_type"] = item.SqlType + } + if item.OriginKeys != nil { + itemMap["origin_keys"] = item.OriginKeys + } + if item.OriginRule != nil { + itemMap["origin_rule"] = item.OriginRule + } + if item.RejectedSqlCount != nil { + itemMap["rejected_sql_count"] = item.RejectedSqlCount + } + if item.CurrentConcurrency != nil { + itemMap["current_concurrency"] = item.CurrentConcurrency + } + if item.MaxConcurrency != nil { + itemMap["max_concurrency"] = item.MaxConcurrency + } + if item.CreateTime != nil { + itemMap["create_time"] = item.CreateTime + } + if item.CurrentTime != nil { + itemMap["current_time"] = item.CurrentTime + } + if item.ExpireTime != nil { + itemMap["expire_time"] = item.ExpireTime + } + ids = append(ids, helper.Int64ToStr(*item.Id)) + itemList = append(itemList, itemMap) + } + d.SetId(helper.DataResourceIdsHash(ids)) + _ = d.Set("list", itemList) + } + + output, ok := d.GetOk("result_output_file") + if ok && output.(string) != "" { + if e := writeToFile(output.(string), itemList); e != nil { + return e + } + } + + return nil +} diff --git a/tencentcloud/data_source_tc_dbbrain_sql_filters_test.go b/tencentcloud/data_source_tc_dbbrain_sql_filters_test.go new file mode 100644 index 0000000000..34ab53a291 --- /dev/null +++ b/tencentcloud/data_source_tc_dbbrain_sql_filters_test.go @@ -0,0 +1,48 @@ +package tencentcloud + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" +) + +func TestAccTencentCloudDbbrainSqlFiltersDataSource(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDbbrainSqlFilters(), + Check: resource.ComposeTestCheckFunc( + testAccCheckTencentCloudDataSourceID("data.tencentcloud_dbbrain_sql_filters.sql_filters"), + resource.TestCheckResourceAttrSet("data.tencentcloud_dbbrain_sql_filters.sql_filters", "list.#"), + ), + }, + }, + }) +} + +func testAccDataSourceDbbrainSqlFilters() string { + return fmt.Sprintf(`%s + +resource "tencentcloud_dbbrain_sql_filter" "sql_filter" { + instance_id = local.mysql_id + session_token { + user = "keep_dbbrain" + password = "Test@123456#" + } + sql_type = "SELECT" + filter_key = "test" + max_concurrency = 10 + duration = 3600 +} + +data "tencentcloud_dbbrain_sql_filters" "sql_filters" { + instance_id = local.mysql_id + filter_ids = [tencentcloud_dbbrain_sql_filter.sql_filter.filter_id] + } + `, CommonPresetMysql) +} diff --git a/tencentcloud/provider.go b/tencentcloud/provider.go index 7880d15462..78538ca12e 100644 --- a/tencentcloud/provider.go +++ b/tencentcloud/provider.go @@ -800,6 +800,9 @@ TDSQL-C for PostgreSQL(TDCPG) tencentcloud_tdcpg_instance DBbrain + Data Source + tencentcloud_dbbrain_sql_filters + tencentcloud_dbbrain_security_audit_log_export_tasks Resource tencentcloud_dbbrain_sql_filter tencentcloud_dbbrain_security_audit_log_export_task @@ -1125,6 +1128,8 @@ func Provider() terraform.ResourceProvider { "tencentcloud_dnspod_records": dataSourceTencentCloudDnspodRecords(), "tencentcloud_tat_command": dataSourceTencentCloudTatCommand(), "tencentcloud_tat_invoker": dataSourceTencentCloudTatInvoker(), + "tencentcloud_dbbrain_sql_filters": dataSourceTencentCloudDbbrainSqlFilters(), + "tencentcloud_dbbrain_security_audit_log_export_tasks": dataSourceTencentCloudDbbrainSecurityAuditLogExportTasks(), }, ResourcesMap: map[string]*schema.Resource{ diff --git a/tencentcloud/resource_tc_dbbrain_security_audit_log_export_task.go b/tencentcloud/resource_tc_dbbrain_security_audit_log_export_task.go index f3efd2e4fd..fb44ecc0ca 100644 --- a/tencentcloud/resource_tc_dbbrain_security_audit_log_export_task.go +++ b/tencentcloud/resource_tc_dbbrain_security_audit_log_export_task.go @@ -75,6 +75,12 @@ func resourceTencentCloudDbbrainSecurityAuditLogExportTask() *schema.Resource { ForceNew: true, Description: "List of log risk levels, supported values include: 0 no risk; 1 low risk; 2 medium risk; 3 high risk.", }, + + "async_request_id": { + Type: schema.TypeInt, + Computed: true, + Description: "request of async id.", + }, }, } } @@ -197,6 +203,10 @@ func resourceTencentCloudDbbrainSecurityAuditLogExportTaskRead(d *schema.Resourc _ = d.Set("danger_levels", securityAuditLogExportTask.DangerLevels) } + if securityAuditLogExportTask.AsyncRequestId != nil { + _ = d.Set("async_request_id", securityAuditLogExportTask.AsyncRequestId) + } + return nil } diff --git a/tencentcloud/resource_tc_dbbrain_security_audit_log_export_task_test.go b/tencentcloud/resource_tc_dbbrain_security_audit_log_export_task_test.go index 51c2de1479..026bcc07ef 100644 --- a/tencentcloud/resource_tc_dbbrain_security_audit_log_export_task_test.go +++ b/tencentcloud/resource_tc_dbbrain_security_audit_log_export_task_test.go @@ -26,8 +26,11 @@ func testSweepDbbrainSecurityAuditLogExportTask(r string) error { cli, _ := sharedClientForRegion(r) dbbrainService := DbbrainService{client: cli.(*TencentCloudClient).apiV3Conn} sagId := helper.String(defaultDbBrainsagId) + param := map[string]interface{}{ + "sec_audit_group_id": sagId, + } - ret, err := dbbrainService.DescribeDbbrainSecurityAuditLogExportTasks(ctx, sagId, nil, nil) + ret, err := dbbrainService.DescribeDbbrainSecurityAuditLogExportTasksByFilter(ctx, param) if err != nil { return err } @@ -35,7 +38,7 @@ func testSweepDbbrainSecurityAuditLogExportTask(r string) error { return fmt.Errorf("Dbbrain security audit log export tasks not exists.") } - for _, v := range ret.Tasks { + for _, v := range ret { delId := *v.AsyncRequestId err := resource.Retry(readRetryTimeout, func() *resource.RetryError { diff --git a/tencentcloud/resource_tc_dbbrain_sql_filter.go b/tencentcloud/resource_tc_dbbrain_sql_filter.go index 096bb11e74..a4f4a529a8 100644 --- a/tencentcloud/resource_tc_dbbrain_sql_filter.go +++ b/tencentcloud/resource_tc_dbbrain_sql_filter.go @@ -120,6 +120,12 @@ func resourceTencentCloudDbbrainSqlFilter() *schema.Resource { Computed: true, Description: "filter status.", }, + + "filter_id": { + Type: schema.TypeInt, + Computed: true, + Description: "filter id.", + }, }, } } @@ -276,6 +282,10 @@ func resourceTencentCloudDbbrainSqlFilterRead(d *schema.ResourceData, meta inter _ = d.Set("status", sqlFilter.Status) } + if sqlFilter.Id != nil { + _ = d.Set("filter_id", sqlFilter.Id) + } + return nil } diff --git a/tencentcloud/service_tencentcloud_dbbrain.go b/tencentcloud/service_tencentcloud_dbbrain.go index 467a8192d5..5774702edf 100644 --- a/tencentcloud/service_tencentcloud_dbbrain.go +++ b/tencentcloud/service_tencentcloud_dbbrain.go @@ -15,7 +15,15 @@ type DbbrainService struct { } func (me *DbbrainService) DescribeDbbrainSqlFilter(ctx context.Context, instanceId, filterId *string) (sqlFilter *dbbrain.SQLFilter, errRet error) { - ret, errRet := me.DescribeDbbrainSqlFilters(ctx, instanceId, []*int64{helper.StrToInt64Point(*filterId)}) + param := make(map[string]interface{}) + if instanceId != nil { + param["instance_id"] = instanceId + } + if filterId != nil { + param["filter_ids"] = []*int64{helper.StrToInt64Point(*filterId)} + } + + ret, errRet := me.DescribeDbbrainSqlFiltersByFilter(ctx, param) if errRet != nil { return } @@ -25,7 +33,7 @@ func (me *DbbrainService) DescribeDbbrainSqlFilter(ctx context.Context, instance return } -func (me *DbbrainService) DescribeDbbrainSqlFilters(ctx context.Context, instanceId *string, filterIds []*int64) (sqlFilters []*dbbrain.SQLFilter, errRet error) { +func (me *DbbrainService) DescribeDbbrainSqlFiltersByFilter(ctx context.Context, param map[string]interface{}) (sqlFilters []*dbbrain.SQLFilter, errRet error) { var ( logId = getLogId(ctx) request = dbbrain.NewDescribeSqlFiltersRequest() @@ -34,27 +42,51 @@ func (me *DbbrainService) DescribeDbbrainSqlFilters(ctx context.Context, instanc defer func() { if errRet != nil { log.Printf("[CRITAL]%s api[%s] fail, request body [%s], reason[%s]\n", - logId, "query object", request.ToJsonString(), errRet.Error()) + logId, "query objects", request.ToJsonString(), errRet.Error()) } }() - request.InstanceId = instanceId - if filterIds != nil { - request.FilterIds = filterIds - } - response, err := me.client.UseDbbrainClient().DescribeSqlFilters(request) - if err != nil { - log.Printf("[CRITAL]%s api[%s] fail, request body [%s], reason[%s]\n", - logId, request.GetAction(), request.ToJsonString(), err.Error()) - errRet = err - return + for k, v := range param { + if k == "instance_id" { + request.InstanceId = v.(*string) + } + + if k == "filter_ids" { + request.FilterIds = v.([]*int64) + } + + if k == "statuses" { + request.Statuses = v.([]*string) + } } - log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", - logId, request.GetAction(), request.ToJsonString(), response.ToJsonString()) - if len(response.Response.Items) < 1 { - return + ratelimit.Check(request.GetAction()) + + var offset int64 = 0 + var pageSize int64 = 20 + + for { + request.Offset = &offset + request.Limit = &pageSize + ratelimit.Check(request.GetAction()) + response, err := me.client.UseDbbrainClient().DescribeSqlFilters(request) + if err != nil { + log.Printf("[CRITAL]%s api[%s] fail, request body [%s], reason[%s]\n", + logId, request.GetAction(), request.ToJsonString(), err.Error()) + errRet = err + return + } + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", + logId, request.GetAction(), request.ToJsonString(), response.ToJsonString()) + + if response == nil || len(response.Response.Items) < 1 { + break + } + sqlFilters = append(sqlFilters, response.Response.Items...) + if len(response.Response.Items) < int(pageSize) { + break + } + offset += pageSize } - sqlFilters = response.Response.Items return } @@ -118,17 +150,30 @@ func (me *DbbrainService) DeleteDbbrainSqlFilterById(ctx context.Context, instan } func (me *DbbrainService) DescribeDbbrainSecurityAuditLogExportTask(ctx context.Context, secAuditGroupId, asyncRequestId, product *string) (task *dbbrain.SecLogExportTaskInfo, errRet error) { - ret, errRet := me.DescribeDbbrainSecurityAuditLogExportTasks(ctx, secAuditGroupId, []*string{asyncRequestId}, product) + param := make(map[string]interface{}) + if secAuditGroupId != nil { + param["sec_audit_group_id"] = secAuditGroupId + } + if asyncRequestId != nil { + param["async_request_ids"] = []*uint64{helper.StrToUint64Point(*asyncRequestId)} + } + if product != nil { + param["product"] = product + } else { + param["product"] = helper.String("mysql") + } + + ret, errRet := me.DescribeDbbrainSecurityAuditLogExportTasksByFilter(ctx, param) if errRet != nil { return } if ret != nil { - return ret.Tasks[0], nil + return ret[0], nil } return } -func (me *DbbrainService) DescribeDbbrainSecurityAuditLogExportTasks(ctx context.Context, secAuditGroupId *string, asyncRequestId []*string, product *string) (params *dbbrain.DescribeSecurityAuditLogExportTasksResponseParams, errRet error) { +func (me *DbbrainService) DescribeDbbrainSecurityAuditLogExportTasksByFilter(ctx context.Context, param map[string]interface{}) (securityAuditLogExportTasks []*dbbrain.SecLogExportTaskInfo, errRet error) { var ( logId = getLogId(ctx) request = dbbrain.NewDescribeSecurityAuditLogExportTasksRequest() @@ -141,31 +186,47 @@ func (me *DbbrainService) DescribeDbbrainSecurityAuditLogExportTasks(ctx context } }() - request.SecAuditGroupId = secAuditGroupId + for k, v := range param { + if k == "sec_audit_group_id" { + request.SecAuditGroupId = v.(*string) + } - if asyncRequestId != nil { - request.AsyncRequestIds = helper.StringsToUint64Pointer(asyncRequestId) - } + if k == "product" { + request.Product = v.(*string) + } - if product != nil { - request.Product = product - } else { - request.Product = helper.String("mysql") + if k == "async_request_ids" { + request.AsyncRequestIds = v.([]*uint64) + } } + ratelimit.Check(request.GetAction()) - response, err := me.client.UseDbbrainClient().DescribeSecurityAuditLogExportTasks(request) - if err != nil { - log.Printf("[CRITAL]%s api[%s] fail, request body [%s], reason[%s]\n", - logId, request.GetAction(), request.ToJsonString(), err.Error()) - errRet = err - return - } - log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", - logId, request.GetAction(), request.ToJsonString(), response.ToJsonString()) - if len(response.Response.Tasks) < 1 { - return + var offset uint64 = 0 + var pageSize uint64 = 20 + + for { + request.Offset = &offset + request.Limit = &pageSize + ratelimit.Check(request.GetAction()) + response, err := me.client.UseDbbrainClient().DescribeSecurityAuditLogExportTasks(request) + if err != nil { + log.Printf("[CRITAL]%s api[%s] fail, request body [%s], reason[%s]\n", + logId, request.GetAction(), request.ToJsonString(), err.Error()) + errRet = err + return + } + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", + logId, request.GetAction(), request.ToJsonString(), response.ToJsonString()) + + if response == nil || len(response.Response.Tasks) < 1 { + break + } + securityAuditLogExportTasks = append(securityAuditLogExportTasks, response.Response.Tasks...) + if len(response.Response.Tasks) < int(pageSize) { + break + } + offset += pageSize } - params = response.Response return } @@ -195,6 +256,7 @@ func (me *DbbrainService) DeleteDbbrainSecurityAuditLogExportTaskById(ctx contex errRet = err return err } + log.Printf("[DEBUG]%s api[%s] success, request body [%s], response body [%s]\n", logId, request.GetAction(), request.ToJsonString(), response.ToJsonString()) diff --git a/website/docs/d/dbbrain_security_audit_log_export_tasks.html.markdown b/website/docs/d/dbbrain_security_audit_log_export_tasks.html.markdown new file mode 100644 index 0000000000..f9a59a07d7 --- /dev/null +++ b/website/docs/d/dbbrain_security_audit_log_export_tasks.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "DBbrain" +layout: "tencentcloud" +page_title: "TencentCloud: tencentcloud_dbbrain_security_audit_log_export_tasks" +sidebar_current: "docs-tencentcloud-datasource-dbbrain_security_audit_log_export_tasks" +description: |- + Use this data source to query detailed information of dbbrain securityAuditLogExportTasks +--- + +# tencentcloud_dbbrain_security_audit_log_export_tasks + +Use this data source to query detailed information of dbbrain securityAuditLogExportTasks + +## Example Usage + +```hcl +resource "tencentcloud_dbbrain_security_audit_log_export_task" "task" { + sec_audit_group_id = "sec_audit_group_id" + start_time = "start_time" + end_time = "end_time" + product = "mysql" + danger_levels = [0, 1, 2] +} + +data "tencentcloud_dbbrain_security_audit_log_export_tasks" "tasks" { + sec_audit_group_id = "sec_audit_group_id" + product = "mysql" + async_request_ids = [tencentcloud_dbbrain_security_audit_log_export_task.task.async_request_id] +} +``` + +## Argument Reference + +The following arguments are supported: + +* `product` - (Required, String) product, optional value is mysql. +* `sec_audit_group_id` - (Required, String) security audit group id. +* `async_request_ids` - (Optional, Set: [`Int`]) async request id list. +* `result_output_file` - (Optional, String) Used to save results. + +## Attributes Reference + +In addition to all arguments above, the following attributes are exported: + +* `list` - security audit log export task list. + * `async_request_id` - async request id. + * `create_time` - create time. + * `danger_levels` - danger level list. + * `end_time` - end time. + * `log_end_time` - log end time. + * `log_start_time` - log start time. + * `progress` - task progress. + * `start_time` - start time. + * `status` - status. + * `total_size` - the total size of log. + + diff --git a/website/docs/d/dbbrain_sql_filters.html.markdown b/website/docs/d/dbbrain_sql_filters.html.markdown new file mode 100644 index 0000000000..9a386113e5 --- /dev/null +++ b/website/docs/d/dbbrain_sql_filters.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "DBbrain" +layout: "tencentcloud" +page_title: "TencentCloud: tencentcloud_dbbrain_sql_filters" +sidebar_current: "docs-tencentcloud-datasource-dbbrain_sql_filters" +description: |- + Use this data source to query detailed information of dbbrain sqlFilters +--- + +# tencentcloud_dbbrain_sql_filters + +Use this data source to query detailed information of dbbrain sqlFilters + +## Example Usage + +```hcl +resource "tencentcloud_dbbrain_sql_filter" "sql_filter" { + instance_id = "mysql_ins_id" + session_token { + user = "user" + password = "password" + } + sql_type = "SELECT" + filter_key = "test" + max_concurrency = 10 + duration = 3600 +} + +data "tencentcloud_dbbrain_sql_filters" "sql_filters" { + instance_id = "mysql_ins_id" + filter_ids = [tencentcloud_dbbrain_sql_filter.sql_filter.filter_id] +} +``` + +## Argument Reference + +The following arguments are supported: + +* `instance_id` - (Required, String) instance id. +* `filter_ids` - (Optional, Set: [`Int`]) filter id list. +* `result_output_file` - (Optional, String) Used to save results. +* `statuses` - (Optional, Set: [`String`]) status list. + +## Attributes Reference + +In addition to all arguments above, the following attributes are exported: + +* `list` - sql filter list. + * `create_time` - create time. + * `current_concurrency` - current concurrency. + * `current_time` - current time. + * `expire_time` - expire time. + * `id` - task id. + * `max_concurrency` - maxmum concurrency. + * `origin_keys` - origin keys. + * `origin_rule` - origin rule. + * `rejected_sql_count` - rejected sql count. + * `sql_type` - sql type, optional value is SELECT, UPDATE, DELETE, INSERT, REPLACE. + * `status` - task status, optional value is RUNNING, FINISHED, TERMINATED. + + diff --git a/website/docs/r/dbbrain_security_audit_log_export_task.html.markdown b/website/docs/r/dbbrain_security_audit_log_export_task.html.markdown index 4c7cdd8e57..5f104ce465 100644 --- a/website/docs/r/dbbrain_security_audit_log_export_task.html.markdown +++ b/website/docs/r/dbbrain_security_audit_log_export_task.html.markdown @@ -38,6 +38,6 @@ The following arguments are supported: In addition to all arguments above, the following attributes are exported: * `id` - ID of the resource. - +* `async_request_id` - request of async id. diff --git a/website/docs/r/dbbrain_sql_filter.html.markdown b/website/docs/r/dbbrain_sql_filter.html.markdown index 076c381ef8..45d5fe8469 100644 --- a/website/docs/r/dbbrain_sql_filter.html.markdown +++ b/website/docs/r/dbbrain_sql_filter.html.markdown @@ -65,6 +65,6 @@ The `session_token` object supports the following: In addition to all arguments above, the following attributes are exported: * `id` - ID of the resource. - +* `filter_id` - filter id. diff --git a/website/tencentcloud.erb b/website/tencentcloud.erb index a13da260df..81610604a7 100644 --- a/website/tencentcloud.erb +++ b/website/tencentcloud.erb @@ -943,7 +943,17 @@
  • DBbrain