-
Notifications
You must be signed in to change notification settings - Fork 392
/
Copy pathresource_datadog_logs_integration_pipeline.go
89 lines (80 loc) · 3.61 KB
/
resource_datadog_logs_integration_pipeline.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
package datadog
import (
"context"
"github.com/terraform-providers/terraform-provider-datadog/datadog/internal/utils"
"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)
func resourceDatadogLogsIntegrationPipeline() *schema.Resource {
return &schema.Resource{
Description: "Provides a Datadog Logs Pipeline API resource to manage the integrations. Integration pipelines are the pipelines that are automatically installed for your organization when sending the logs with specific sources. You don't need to maintain or update these types of pipelines. Keeping them as resources, however, allows you to manage the order of your pipelines by referencing them in your `datadog_logs_pipeline_order` resource. If you don't need the `pipeline_order` feature, this resource declaration can be omitted.",
CreateContext: resourceDatadogLogsIntegrationPipelineCreate,
UpdateContext: resourceDatadogLogsIntegrationPipelineUpdate,
ReadContext: resourceDatadogLogsIntegrationPipelineRead,
DeleteContext: resourceDatadogLogsIntegrationPipelineDelete,
Importer: &schema.ResourceImporter{
StateContext: schema.ImportStatePassthroughContext,
},
SchemaFunc: func() map[string]*schema.Schema {
return map[string]*schema.Schema{
"is_enabled": {
Description: "Boolean value to enable your pipeline.",
Type: schema.TypeBool,
Optional: true,
},
}
},
}
}
func resourceDatadogLogsIntegrationPipelineCreate(_ context.Context, _ *schema.ResourceData, _ interface{}) diag.Diagnostics {
return diag.Errorf("cannot create an integration pipeline, please import it first to make changes")
}
func updateLogsIntegrationPipelineState(d *schema.ResourceData, pipeline *datadogV1.LogsPipeline) diag.Diagnostics {
if err := d.Set("is_enabled", pipeline.GetIsEnabled()); err != nil {
return diag.FromErr(err)
}
return nil
}
func resourceDatadogLogsIntegrationPipelineRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
providerConf := meta.(*ProviderConfiguration)
apiInstances := providerConf.DatadogApiInstances
auth := providerConf.Auth
ddPipeline, httpresp, err := apiInstances.GetLogsPipelinesApiV1().
GetLogsPipeline(auth, d.Id())
if err != nil {
if httpresp != nil && httpresp.StatusCode == 400 {
d.SetId("")
return nil
}
return utils.TranslateClientErrorDiag(err, httpresp, "error getting logs integration pipeline")
}
if err := utils.CheckForUnparsed(ddPipeline); err != nil {
return diag.FromErr(err)
}
if !ddPipeline.GetIsReadOnly() {
d.SetId("")
return nil
}
return updateLogsIntegrationPipelineState(d, &ddPipeline)
}
func resourceDatadogLogsIntegrationPipelineUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
var ddPipeline datadogV1.LogsPipeline
ddPipeline.SetIsEnabled(d.Get("is_enabled").(bool))
providerConf := meta.(*ProviderConfiguration)
apiInstances := providerConf.DatadogApiInstances
auth := providerConf.Auth
updatedPipeline, httpResponse, err := apiInstances.GetLogsPipelinesApiV1().
UpdateLogsPipeline(auth, d.Id(), ddPipeline)
if err != nil {
return utils.TranslateClientErrorDiag(err, httpResponse, "error updating logs integration pipeline")
}
if err := utils.CheckForUnparsed(updatedPipeline); err != nil {
return diag.FromErr(err)
}
d.SetId(*updatedPipeline.Id)
return updateLogsIntegrationPipelineState(d, &updatedPipeline)
}
func resourceDatadogLogsIntegrationPipelineDelete(_ context.Context, _ *schema.ResourceData, _ interface{}) diag.Diagnostics {
return nil
}