Skip to content

Commit

Permalink
feat: UUID backend changes (#815)
Browse files Browse the repository at this point in the history
* feat: uuid changes

* changes

* linting

* test cases

* fixes

tests

tests

tests

test fix

* fixes

* changes

* changes

* changes

* removing-ui-changes

* comment changes

* session file compatibility

* fix tests
  • Loading branch information
asthamohta committed Apr 30, 2024
1 parent 4a48bcc commit 39d30ae
Show file tree
Hide file tree
Showing 19 changed files with 264 additions and 48 deletions.
2 changes: 2 additions & 0 deletions common/constants/constants.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,8 @@ const (
// Metadata table names
SMT_JOB_TABLE string = "SMT_JOB"
SMT_RESOURCE_TABLE string = "SMT_RESOURCE"
// Auto Generated Keys
UUID string = "UUID"
// Default gcs path of the Dataflow template.
DEFAULT_TEMPLATE_PATH string = "gs://dataflow-templates/latest/flex/Cloud_Datastream_to_Spanner"
)
35 changes: 17 additions & 18 deletions internal/convert.go
Original file line number Diff line number Diff line change
Expand Up @@ -223,27 +223,26 @@ type MonitoringResources struct {
type ShardResources struct {
DatastreamResources DatastreamResources
PubsubResources PubsubResources
DataflowResources DataflowResources
GcsResources GcsResources
DataflowResources DataflowResources
GcsResources GcsResources
MonitoringResources MonitoringResources
}


// Stores information related to the streaming migration process.
type streamingStats struct {
Streaming bool // Flag for confirmation of streaming migration.
TotalRecords map[string]map[string]int64 // Tablewise count of records received for processing, broken down by record type i.e. INSERT, MODIFY & REMOVE.
BadRecords map[string]map[string]int64 // Tablewise count of records not converted successfully, broken down by record type.
DroppedRecords map[string]map[string]int64 // Tablewise count of records successfully converted but failed to written on Spanner, broken down by record type.
SampleBadRecords []string // Records that generated errors during conversion.
SampleBadWrites []string // Records that faced errors while writing to Cloud Spanner.
DatastreamResources DatastreamResources
DataflowResources DataflowResources
PubsubResources PubsubResources
GcsResources GcsResources
MonitoringResources MonitoringResources
ShardToShardResourcesMap map[string]ShardResources
AggMonitoringResources MonitoringResources
Streaming bool // Flag for confirmation of streaming migration.
TotalRecords map[string]map[string]int64 // Tablewise count of records received for processing, broken down by record type i.e. INSERT, MODIFY & REMOVE.
BadRecords map[string]map[string]int64 // Tablewise count of records not converted successfully, broken down by record type.
DroppedRecords map[string]map[string]int64 // Tablewise count of records successfully converted but failed to written on Spanner, broken down by record type.
SampleBadRecords []string // Records that generated errors during conversion.
SampleBadWrites []string // Records that faced errors while writing to Cloud Spanner.
DatastreamResources DatastreamResources
DataflowResources DataflowResources
PubsubResources PubsubResources
GcsResources GcsResources
MonitoringResources MonitoringResources
ShardToShardResourcesMap map[string]ShardResources
AggMonitoringResources MonitoringResources
}

type PubsubCfg struct {
Expand Down Expand Up @@ -439,7 +438,7 @@ func (conv *Conv) AddShardIdColumn() {
colName := conv.buildColumnNameWithBase(t, ShardIdColumn)
columnId := GenerateColumnId()
ct.ColIds = append(ct.ColIds, columnId)
ct.ColDefs[columnId] = ddl.ColumnDef{Name: colName, Id: columnId, T: ddl.Type{Name: ddl.String, Len: 50}, NotNull: false}
ct.ColDefs[columnId] = ddl.ColumnDef{Name: colName, Id: columnId, T: ddl.Type{Name: ddl.String, Len: 50}, NotNull: false, AutoGen: ddl.AutoGenCol{Name: "", GenerationType: ""}}
ct.ShardIdColumn = columnId
conv.SpSchema[t] = ct
var issues []SchemaIssue
Expand Down Expand Up @@ -475,7 +474,7 @@ func (conv *Conv) AddPrimaryKeys() {
k := conv.buildColumnNameWithBase(t, SyntheticPrimaryKey)
columnId := GenerateColumnId()
ct.ColIds = append(ct.ColIds, columnId)
ct.ColDefs[columnId] = ddl.ColumnDef{Name: k, Id: columnId, T: ddl.Type{Name: ddl.String, Len: 50}}
ct.ColDefs[columnId] = ddl.ColumnDef{Name: k, Id: columnId, T: ddl.Type{Name: ddl.String, Len: 50}, AutoGen: ddl.AutoGenCol{Name: "", GenerationType: ""}}
ct.PrimaryKeys = []ddl.IndexKey{{ColId: columnId, Order: 1}}
conv.SyntheticPKeys[t] = SyntheticPKey{columnId, 0}
addMissingPrimaryKeyWarning(ct.Id, columnId, conv, MissingPrimaryKey)
Expand Down
16 changes: 11 additions & 5 deletions schema/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,11 @@ type Table struct {
// Column represents a database column.
// TODO: add support for foreign keys.
type Column struct {
Name string
Type Type
NotNull bool
Ignored Ignored
Id string
Name string
Type Type
NotNull bool
Ignored Ignored
Id string
}

// ForeignKey represents a foreign key.
Expand Down Expand Up @@ -119,6 +119,12 @@ type Ignored struct {
AutoIncrement bool
}

type AutoGenCol struct {
Name string
// Type of autogenerated column, example, pre-defined(uuid) or user-defined(sequence)
GenerationType string
}

// Print converts ty to a string suitable for printing.
func (ty Type) Print() string {
s := ty.Name
Expand Down
4 changes: 4 additions & 0 deletions sources/common/toddl.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,10 @@ func (ss *SchemaToSpannerImpl) SchemaToSpannerDDLHelper(conv *internal.Conv, tod
NotNull: isNotNull,
Comment: "From: " + quoteIfNeeded(srcCol.Name) + " " + srcCol.Type.Print(),
Id: srcColId,
AutoGen: ddl.AutoGenCol{
Name: "",
GenerationType: "",
},
}
if !checkIfColumnIsPartOfPK(srcColId, srcTable.PrimaryKeys) {
totalNonKeyColumnSize += getColumnSize(ty.Name, ty.Len)
Expand Down
25 changes: 24 additions & 1 deletion spanner/ddl/ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,7 @@ type ColumnDef struct {
NotNull bool
Comment string
Id string
AutoGen AutoGenCol
}

// Config controls how AST nodes are printed (aka unparsed).
Expand Down Expand Up @@ -231,8 +232,10 @@ func (cd ColumnDef) PrintColumnDef(c Config) (string, string) {
var s string
if c.SpDialect == constants.DIALECT_POSTGRESQL {
s = fmt.Sprintf("%s %s", c.quote(cd.Name), cd.T.PGPrintColumnDefType())
s += cd.AutoGen.PGPrintAutoGenCol()
} else {
s = fmt.Sprintf("%s %s", c.quote(cd.Name), cd.T.PrintColumnDefType())
s += cd.AutoGen.PrintAutoGenCol()
}
if cd.NotNull {
s += " NOT NULL"
Expand Down Expand Up @@ -375,6 +378,26 @@ type CreateIndex struct {
// interleaving clauses yet, so we omit them for now.
}

type AutoGenCol struct {
Name string
// Type of autogenerated column, example, pre-defined(uuid) or user-defined(sequence)
GenerationType string
}

func (agc AutoGenCol) PrintAutoGenCol() string {
if agc.Name == constants.UUID && agc.GenerationType == "Pre-defined" {
return " DEFAULT (GENERATE_UUID())"
}
return ""
}

func (agc AutoGenCol) PGPrintAutoGenCol() string {
if agc.Name == constants.UUID && agc.GenerationType == "Pre-defined" {
return " DEFAULT (spanner.generate_uuid())"
}
return ""
}

// PrintCreateIndex unparses a CREATE INDEX statement.
func (ci CreateIndex) PrintCreateIndex(ct CreateTable, c Config) string {
var keys []string
Expand All @@ -400,7 +423,7 @@ func (ci CreateIndex) PrintCreateIndex(ct CreateTable, c Config) string {
if ci.StoredColumnIds != nil {
storedColumns := []string{}
for _, colId := range ci.StoredColumnIds {
if (!isStoredColumnKeyPartOfPrimaryKey(ct, colId)) {
if !isStoredColumnKeyPartOfPrimaryKey(ct, colId) {
storedColumns = append(storedColumns, c.quote(ct.ColDefs[colId].Name))
}
}
Expand Down
26 changes: 26 additions & 0 deletions spanner/ddl/ast_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -408,6 +408,32 @@ func TestPrintForeignKeyAlterTable(t *testing.T) {
}
}

func TestPrintAutoGenCol(t *testing.T) {
tests := []struct {
agc AutoGenCol
expected string
}{
{AutoGenCol{Name: constants.UUID, GenerationType: "Pre-defined"}, " DEFAULT (GENERATE_UUID())"},
{AutoGenCol{GenerationType: "", Name: ""}, ""},
}
for _, tc := range tests {
assert.Equal(t, tc.expected, tc.agc.PrintAutoGenCol())
}
}

func TestPGPrintAutoGenCol(t *testing.T) {
tests := []struct {
agc AutoGenCol
expected string
}{
{AutoGenCol{Name: constants.UUID, GenerationType: "Pre-defined"}, " DEFAULT (spanner.generate_uuid())"},
{AutoGenCol{GenerationType: "", Name: ""}, ""},
}
for _, tc := range tests {
assert.Equal(t, tc.expected, tc.agc.PGPrintAutoGenCol())
}
}

func TestGetDDL(t *testing.T) {
s := Schema{
"t1": CreateTable{
Expand Down
2 changes: 1 addition & 1 deletion ui/dist/ui/index.html

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,7 @@ <h3 class="title">
EDIT
</button>
</div>
<!--[TODO:khajanchi] Remove the predicate && false once transformations is in place-->
<div *ngIf="!isEditMode && !currentObject.isDeleted && false">
<div *ngIf="!isEditMode && !currentObject.isDeleted">
<button mat-stroked-button color="primary" (click)="addNewColumn()"
*ngIf="currentObject!.isSpannerNode">
<mat-icon class="edit-icon">edit</mat-icon>
Expand Down
4 changes: 4 additions & 0 deletions ui/src/app/services/fetch/fetch.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,10 @@ export class FetchService {
return this.http.get(`${this.url}/typemap`)
}

getAutoGenMap() {
return this.http.get(`${this.url}/autoGenMap`)
}

getSpannerDefaultTypeMap() {
return this.http.get(`${this.url}/spannerDefaultTypeMap`)
}
Expand Down
64 changes: 64 additions & 0 deletions webv2/api/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ var postgresTypeMap = make(map[string][]types.TypeIssue)
var sqlserverTypeMap = make(map[string][]types.TypeIssue)
var oracleTypeMap = make(map[string][]types.TypeIssue)

var autoGenMap = make(map[string][]types.AutoGen)

func init() {
sessionState := session.GetSessionState()
utilities.InitObjectId()
Expand Down Expand Up @@ -335,6 +337,23 @@ func GetTypeMap(w http.ResponseWriter, r *http.Request) {
json.NewEncoder(w).Encode(filteredTypeMap)
}

func GetAutoGenMap(w http.ResponseWriter, r *http.Request) {

sessionState := session.GetSessionState()
if sessionState.Conv == nil || sessionState.Driver == "" {
http.Error(w, fmt.Sprintf("Schema is not converted or Driver is not configured properly. Please retry converting the database to Spanner."), http.StatusNotFound)
return
}
sessionState.Conv.ConvLock.Lock()
defer sessionState.Conv.ConvLock.Unlock()
switch sessionState.Driver {
case constants.MYSQL:
initializeAutoGenMap()
}
w.WriteHeader(http.StatusOK)
json.NewEncoder(w).Encode(autoGenMap)
}

// GetTableWithErrors checks the errors in the spanner schema
// and returns a list of tables with errors
func GetTableWithErrors(w http.ResponseWriter, r *http.Request) {
Expand Down Expand Up @@ -1534,3 +1553,48 @@ func addShardIdToForeignKeyPerTable(isAddedAtFirst bool, table ddl.CreateTable)
sessionState.Conv.SpSchema[table.Id].ForeignKeys[i] = fk
}
}

func initializeAutoGenMap() {
sessionState := session.GetSessionState()
autoGenMap = make(map[string][]types.AutoGen)
switch sessionState.Conv.SpDialect {
case constants.DIALECT_POSTGRESQL:
makePostgresDialectAutoGenMap()
return
default:
makeGoogleSqlDialectAutoGenMap()
return
}
}

func makePostgresDialectAutoGenMap() {
for _, srcTypeName := range []string{ddl.Bool, ddl.Date, ddl.Float64, ddl.Int64, ddl.PGBytea, ddl.PGFloat8, ddl.PGInt8, ddl.PGJSONB, ddl.PGTimestamptz, ddl.PGVarchar, ddl.Numeric} {
autoGenMap[srcTypeName] = []types.AutoGen{
{
Name: "",
GenerationType: "",
},
}
}
autoGenMap[ddl.PGVarchar] = append(autoGenMap[ddl.PGVarchar],
types.AutoGen{
Name: "UUID",
GenerationType: "Pre-defined",
})
}

func makeGoogleSqlDialectAutoGenMap() {
for _, srcTypeName := range []string{ddl.Bool, ddl.Bytes, ddl.Date, ddl.Float64, ddl.Int64, ddl.String, ddl.Timestamp, ddl.Numeric, ddl.JSON} {
autoGenMap[srcTypeName] = []types.AutoGen{
{
Name: "",
GenerationType: "",
},
}
}
autoGenMap[ddl.String] = append(autoGenMap[ddl.String],
types.AutoGen{
Name: "UUID",
GenerationType: "Pre-defined",
})
}
Loading

0 comments on commit 39d30ae

Please sign in to comment.