Convert Apache Avro schema (it supports schemas with array/record types) to BigQuery Table Schema.
go install github.com/go-syar/avro-schema-bq@latest
avro-bq-schema schema.avsc > bq.json
Create BQ Table with Avro schema by providing variables projectID, datasetID, tableID, serviceAccount, schemaFilePath
table.CreateBQTableWithSA(projectID string, datasetID string, tableID string, serviceAccount string, schemaFilePath string) error
// service account := "service-account.json"
schema.ConvertAvroToBigQuery(avroSchema map[string]interface{}) ([]*bigquery.FieldSchema, error)
schemaFilePath := $ your-(.avsc)file-path
avroSchemaContent, err := ioutil.ReadFile(schemaFilePath)
if err != nil {
fmt.Println("Error reading Avro schema file:", err)
return
}
var avroSchema map[string]interface{}
err = json.Unmarshal(avroSchemaContent, &avroSchema)
if err != nil {
fmt.Println("Error parsing Avro schema:", err)
return
}
jsonData, err := json.MarshalIndent(bqFields, "", " ")
if err != nil {
fmt.Println("Error marshaling BigQuery schema to JSON:", err)
return
}
err = ioutil.WriteFile("schema/test_data/bq_schema.json", jsonData, 0644)
if err != nil {
fmt.Println("Error writing JSON data to file:", err)
return
}