This repository has been archived by the owner on Aug 16, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 14
/
bigquery_datasets.go
162 lines (154 loc) · 6.97 KB
/
bigquery_datasets.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
package bigquery
import (
"context"
"errors"
"fmt"
"net/http"
"github.com/cloudquery/cq-provider-gcp/client"
"github.com/cloudquery/cq-provider-sdk/provider/diag"
"github.com/cloudquery/cq-provider-sdk/provider/schema"
"google.golang.org/api/bigquery/v2"
"google.golang.org/api/googleapi"
)
func BigqueryDatasets() *schema.Table {
return &schema.Table{
Name: "gcp_bigquery_datasets",
Description: "dataset resources in the project",
Resolver: fetchBigqueryDatasets,
IgnoreError: client.IgnoreErrorHandler,
Multiplex: client.ProjectMultiplexEnabledAPIs(client.BigQueryService),
DeleteFilter: client.DeleteProjectFilter,
Options: schema.TableCreationOptions{PrimaryKeys: []string{"project_id", "id"}},
Columns: []schema.Column{
{
Name: "project_id",
Description: "GCP Project Id of the resource",
Type: schema.TypeString,
Resolver: client.ResolveProject,
},
{
Name: "creation_time",
Description: "The time when this dataset was created, in milliseconds since the epoch",
Type: schema.TypeBigInt,
},
{
Name: "default_encryption_configuration_kms_key_name",
Description: "Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table The BigQuery Service Account associated with your project requires access to this encryption key",
Type: schema.TypeString,
Resolver: schema.PathResolver("DefaultEncryptionConfiguration.KmsKeyName"),
},
{
Name: "default_partition_expiration_ms",
Description: "The default partition expiration for all partitioned tables in the dataset, in milliseconds Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones The storage in a partition will have an expiration time of its partition time plus this value Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table If you provide an explicit timePartitioningexpirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property",
Type: schema.TypeBigInt,
},
{
Name: "default_table_expiration_ms",
Description: "The default lifetime of all tables in the dataset, in milliseconds The minimum value is 3600000 milliseconds (one hour) Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones When the expirationTime for a given table is reached, that table will be deleted automatically If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property",
Type: schema.TypeBigInt,
},
{
Name: "description",
Description: "A user-friendly description of the dataset",
Type: schema.TypeString,
},
{
Name: "etag",
Description: "A hash of the resource",
Type: schema.TypeString,
},
{
Name: "friendly_name",
Description: "A descriptive name for the dataset",
Type: schema.TypeString,
},
{
Name: "id",
Description: "The fully-qualified unique name of the dataset in the format projectId:datasetId The dataset name without the project name is given in the datasetId field When creating a new dataset, leave this field blank, and instead specify the datasetId field",
Type: schema.TypeString,
},
{
Name: "kind",
Description: "The resource type",
Type: schema.TypeString,
},
{
Name: "labels",
Description: "The labels associated with this dataset You can use these to organize and group your datasets You can set this property when inserting or updating a dataset See Creating and Updating Dataset Labels for more information",
Type: schema.TypeJSON,
},
{
Name: "last_modified_time",
Description: "The date when this dataset or any of its tables was last modified, in milliseconds since the epoch",
Type: schema.TypeBigInt,
},
{
Name: "location",
Description: "The geographic location where the dataset should reside The default value is US See details at https://cloudgooglecom/bigquery/docs/locations",
Type: schema.TypeString,
},
{
Name: "satisfies_pzs",
Description: "Reserved for future use",
Type: schema.TypeBool,
Resolver: schema.PathResolver("SatisfiesPZS"),
},
{
Name: "self_link",
Description: "A URL that can be used to access the resource again You can use this URL in Get or Update requests to the resource",
Type: schema.TypeString,
},
},
Relations: []*schema.Table{
BigqueryDatasetAccesses(),
BigqueryDatasetTables(),
},
}
}
// ====================================================================================================================
// Table Resolver Functions
// ====================================================================================================================
func fetchBigqueryDatasets(ctx context.Context, meta schema.ClientMeta, parent *schema.Resource, res chan<- interface{}) error {
c := meta.(*client.Client)
nextPageToken := ""
for {
call := c.Services.BigQuery.Datasets.
List(c.ProjectId).
PageToken(nextPageToken)
list, err := c.RetryingDo(ctx, call)
if err != nil {
if isAccessErrorToIgnore(err, c.ProjectId) {
return diag.FromError(err, diag.USER, diag.WithType(diag.ACCESS),
diag.WithDetails("Please verify the project id was configured correctly or BigQuery API is enabled in current project. Project names can't be used when fetching BigQuery resources"))
}
return diag.WrapError(err)
}
output := list.(*bigquery.DatasetList)
for _, d := range output.Datasets {
call := c.Services.BigQuery.Datasets.
Get(c.ProjectId, d.DatasetReference.DatasetId)
dataset, err := c.RetryingDo(ctx, call)
if err != nil {
return diag.WrapError(err)
}
res <- dataset.(*bigquery.Dataset)
}
if output.NextPageToken == "" {
break
}
nextPageToken = output.NextPageToken
}
return nil
}
func isAccessErrorToIgnore(err error, projectId string) bool {
var gerr *googleapi.Error
if ok := errors.As(err, &gerr); ok {
if gerr.Code == http.StatusBadRequest &&
len(gerr.Errors) > 0 &&
gerr.Errors[0].Reason == "invalid" &&
gerr.Errors[0].Message == fmt.Sprintf("The project %s has not enabled BigQuery.", projectId) {
return true
}
}
return false
}