Skip to content

Commit

Permalink
feat(bigquery): add ExportDataStatstics to QueryStatistics (#9371)
Browse files Browse the repository at this point in the history
  • Loading branch information
alvarowolfx committed Feb 5, 2024
1 parent fa31ec0 commit 261c8d9
Show file tree
Hide file tree
Showing 2 changed files with 97 additions and 0 deletions.
72 changes: 72 additions & 0 deletions bigquery/integration_test.go
Expand Up @@ -2762,6 +2762,78 @@ func TestIntegration_ExtractExternal(t *testing.T) {
}
}

func TestIntegration_ExportDataStatistics(t *testing.T) {
// Create a table, extract it to GCS using EXPORT DATA statement.
if client == nil {
t.Skip("Integration tests skipped")
}
ctx := context.Background()
schema := Schema{
{Name: "name", Type: StringFieldType},
{Name: "num", Type: IntegerFieldType},
}
table := newTable(t, schema)
defer table.Delete(ctx)

// Extract to a GCS object as CSV.
bucketName := testutil.ProjID()
uri := fmt.Sprintf("gs://%s/bq-export-test-*.csv", bucketName)
defer func() {
it := storageClient.Bucket(bucketName).Objects(ctx, &storage.Query{
MatchGlob: "bq-export-test-*.csv",
})
for {
obj, err := it.Next()
if err == iterator.Done {
break
}
if err != nil {
t.Logf("failed to delete bucket: %v", err)
continue
}
err = storageClient.Bucket(bucketName).Object(obj.Name).Delete(ctx)
t.Logf("deleted object %s: %v", obj.Name, err)
}
}()

// EXPORT DATA to GCS object.
sql := fmt.Sprintf(`EXPORT DATA
OPTIONS (
uri = '%s',
format = 'CSV',
overwrite = true,
header = true,
field_delimiter = ';'
)
AS (
SELECT 'a' as name, 1 as num
UNION ALL
SELECT 'b' as name, 2 as num
UNION ALL
SELECT 'c' as name, 3 as num
);`,
uri)
stats, _, err := runQuerySQL(ctx, sql)
if err != nil {
t.Fatal(err)
}

qStats, ok := stats.Details.(*QueryStatistics)
if !ok {
t.Fatalf("expected query statistics not present")
}

if qStats.ExportDataStatistics == nil {
t.Fatal("jobStatus missing ExportDataStatistics")
}
if qStats.ExportDataStatistics.FileCount != 1 {
t.Fatalf("expected ExportDataStatistics to have 1 file, but got %d files", qStats.ExportDataStatistics.FileCount)
}
if qStats.ExportDataStatistics.RowCount != 3 {
t.Fatalf("expected ExportDataStatistics to have 3 rows, got %d rows", qStats.ExportDataStatistics.RowCount)
}
}

func TestIntegration_ReadNullIntoStruct(t *testing.T) {
// Reading a null into a struct field should return an error (not panic).
if client == nil {
Expand Down
25 changes: 25 additions & 0 deletions bigquery/job.go
Expand Up @@ -505,6 +505,30 @@ type QueryStatistics struct {

// The DDL target table, present only for CREATE/DROP FUNCTION/PROCEDURE queries.
DDLTargetRoutine *Routine

// Statistics for the EXPORT DATA statement as part of Query Job.
ExportDataStatistics *ExportDataStatistics
}

// ExportDataStatistics represents statistics for
// a EXPORT DATA statement as part of Query Job.
type ExportDataStatistics struct {
// Number of destination files generated.
FileCount int64

// Number of destination rows generated.
RowCount int64
}

func bqToExportDataStatistics(in *bq.ExportDataStatistics) *ExportDataStatistics {
if in == nil {
return nil
}
stats := &ExportDataStatistics{
FileCount: in.FileCount,
RowCount: in.RowCount,
}
return stats
}

// BIEngineStatistics contains query statistics specific to the use of BI Engine.
Expand Down Expand Up @@ -1028,6 +1052,7 @@ func (j *Job) setStatistics(s *bq.JobStatistics, c *Client) {
DDLTargetTable: bqToTable(s.Query.DdlTargetTable, c),
DDLOperationPerformed: s.Query.DdlOperationPerformed,
DDLTargetRoutine: bqToRoutine(s.Query.DdlTargetRoutine, c),
ExportDataStatistics: bqToExportDataStatistics(s.Query.ExportDataStatistics),
StatementType: s.Query.StatementType,
TotalBytesBilled: s.Query.TotalBytesBilled,
TotalBytesProcessed: s.Query.TotalBytesProcessed,
Expand Down

0 comments on commit 261c8d9

Please sign in to comment.