Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,9 @@ For more examples how to use the tool, please check out the [wiki](https://githu

# Known Issues

1. If you have a unique index on a foreign key column then there are chance the constraint creation would fail, since mockd doesn't pick up unique value for foriegn key value it picks up random values from the reference table.
2. CHECK constraints isn't supported , so recreating check constraints would fail, use `custom` subcommand to control the data being inserted
3. On Greenplum Datbase tables are not supported (due to check constraint issues defined above), so use the `custom` sub command to define the data to be inserted to the column with check constraints
1. If you have a composite unique index where one column is part of foreign key column then there are chances the constraint creation would fail.
2. Fixing CHECK constraints isn't supported due to complexity, so recreating check constraints would fail, use `custom` subcommand to control the data being inserted
3. On Greenplum Database partition tables are not supported (due to check constraint issues defined above), so use the `custom` sub command to define the data to be inserted to the column with check constraints
4. Custom data types are not supported, use `custom` sub command to control the data for that custom data types

# Developers / Collaboration
Expand Down
4 changes: 4 additions & 0 deletions cmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,10 @@ var tablesCmd = &cobra.Command{
if IsStringEmpty(cmdOptions.Tab.SchemaName) {
Fatalf("Cannot have the schema name empty, please check the arguments")
}
// If number of columns is greater than the limit, then error out
if cmdOptions.Tab.MaxColumns > 1600 {
Fatalf("Postgres cannot have more than 1600 columns, check the arguments")
}
},
PostRun: func(cmd *cobra.Command, args []string) {
Info("Successfully completed running the table sub command")
Expand Down
4 changes: 2 additions & 2 deletions constraintsBackup.go
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,9 @@ func RemoveConstraints(table string) {
for _, c := range constraints {
// Generate the DROP DDL command
if c.Constrainttype == "index" { // if the constraint is a index
statement = fmt.Sprintf("DROP INDEX %s CASCADE;", c.Constraintname)
statement = fmt.Sprintf("DROP INDEX \"%s\" CASCADE;", c.Constraintname)
} else { // if the constraint is a constraint
statement = fmt.Sprintf("ALTER TABLE %s DROP CONSTRAINT %s CASCADE;", table, c.Constraintname)
statement = fmt.Sprintf("ALTER TABLE %s DROP CONSTRAINT \"%s\" CASCADE;", table, c.Constraintname)
}

// Execute the statement
Expand Down
53 changes: 45 additions & 8 deletions constraintsRestore.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ func FixConstraints() {
case v == "UNIQUE": // Run the same logic as primary key
fixPKey(con)
//case v == "CHECK": // TODO: Its hard to predict the check constraint ATM
// fixCheck(db, con)
// fixCheck(db, con)
case v == "FOREIGN":
fixFKey(con)
}
Expand Down Expand Up @@ -196,12 +196,17 @@ func recreateAllConstraints() {
_, err := ExecuteDB(content)
if err != nil && !IgnoreErrorString(fmt.Sprintf("%s", err)) {
Debugf("Error creating constraint %s, err: %v", content, err)
err = WriteToFile(failedConstraintsFile, content+"\n")
if err != nil {
Fatalf("Error when saving the failed restore to file %s, err %v",
failedConstraintsFile, err)
// Try an attempt to recreate constraint again after deleting the
// violating row
successOrFailure := deleteViolatingPkOrUkConstriants(content)
if !successOrFailure { // didn't succeed, ask the user to fix it manually
err = WriteToFile(failedConstraintsFile, content+"\n")
if err != nil {
Fatalf("Error when saving the failed restore to file %s, err %v",
failedConstraintsFile, err)
}
AnyError = true
}
AnyError = true
}
bar.Add(1)
}
Expand All @@ -210,7 +215,39 @@ func recreateAllConstraints() {
}

if AnyError {
Warnf("There have been issue creating few constraints, all the "+
"constraints that failed has been saved on to file: %s", failedConstraintsFile)
Warnf("There have been issue creating few constraints and would need manual cleanup at your end, "+
"all the constraints that failed has been saved on to file: %s", failedConstraintsFile)
}
}

// we tried to fix the primary key violation, but due to the nature
// of how we fix the constraints like PK (or UK) followed by FK , there
// are chances that we might inject duplicate keys again, for eg.s if
// there is a PK ( or UK ) on a FK reference table. so the aim here
// is, we will delete the rows that violates it and hoping that it will help in
// recreating the constraints. Yes we will loose that row at least that help to
// recreate constraints ( fingers crossed :) )
func deleteViolatingPkOrUkConstriants(con string) bool {
Debugf("Attempting to run the constraint command %s second time, after deleting violating rows", con)
// does the DDL contain PK or UK keyword then do the following
// rest send them back for user to fix it.
if isSubStringAvailableOnString(con, "ADD CONSTRAINT.*PRIMARY KEY|ADD CONSTRAINT.*UNIQUE") {
column, _ := ColExtractor(con, `\(.*?\)`)
table, _ := ColExtractor(con, `ALTER TABLE(.*)ADD CONSTRAINT`)
table = strings.Trim(strings.Trim(table, "ALTER TABLE"), "ADD CONSTRAINT")
column = strings.Trim(column, "()")
err := deleteViolatingConstraintKeys(table, column)
if err != nil { // we failed to delete the the constraint violation rows
Debugf("Error when deleting rows from the constraint violation rows: %v", err)
return false
}
_, err = ExecuteDB(con) // retry to create the constraint again
if err != nil { // we failed to recreate the constraint
Debugf("Error when 2nd attempt to recreate constraint: %v", err)
return false
}
// successfully cleaned it up
return true
}
return false
}
9 changes: 9 additions & 0 deletions helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,15 @@ func BracketsExists(dt string) bool {
}
}

// Does the string contain the substring
func isSubStringAvailableOnString(s string, criteria string) bool {
var re = regexp.MustCompile(fmt.Sprintf("%s", criteria))
if re.MatchString(s) {
return true
}
return false
}

// Built a method to find if the values exits with a slice
func StringContains(item string, slice []string) bool {
set := make(map[string]struct{}, len(slice))
Expand Down
40 changes: 33 additions & 7 deletions sql.go
Original file line number Diff line number Diff line change
Expand Up @@ -244,9 +244,11 @@ func GetPGConstraintDDL(conntype string) []DBConstraints {
Debugf("Extracting the DDL of the %s constraints", conntype)
var result []DBConstraints
query := `
SELECT n.nspname
|| '.'
|| c.relname tablename,
SELECT '"'
|| n.nspname
|| '"."'
|| c.relname
|| '"' tablename,
con.conname constraintname,
pg_catalog.Pg_get_constraintdef(con.oid, true) constraintKey
FROM pg_catalog.pg_class c,
Expand Down Expand Up @@ -277,10 +279,12 @@ func GetPGIndexDDL() []DBIndex {
Debugf("Extracting the unique indexes")
var result []DBIndex
query := `
SELECT schemaname
|| '.'
|| tablename tablename,
indexdef indexdef
SELECT '"'
|| schemaname
|| '"."'
|| tablename
|| '"' tablename,
indexdef indexdef
FROM pg_indexes
WHERE schemaname IN (SELECT nspname
FROM pg_namespace
Expand Down Expand Up @@ -546,3 +550,25 @@ WHERE %[2]s = '%[6]s'
Fatalf("Error when updating the foreign key for table %s, err: %v", key.Table, err)
}
}

// Delete the violating key
func deleteViolatingConstraintKeys(tab string, column string) error {
Debugf("Deleting the rows the table that violates the constraints: %s:(%s)", tab, column )
query := `
DELETE
FROM %[1]s
WHERE (
%[2]s) IN
(
SELECT %[2]s
FROM %[1]s
GROUP BY %[2]s
HAVING count(*) > 1);
`
query = fmt.Sprintf(query, tab, column)
_, err := ExecuteDB(query)
if err != nil {
return err
}
return nil
}
2 changes: 1 addition & 1 deletion worker.go
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ func GenerateTableName(tab, schema string) string {
// Throw warning if there is skipped tables
func skipTablesWarning() {
if len(skippedTab) > 0 {
Warnf("These tables are skipped since these datatypes are not supported by %s: %s",
Warnf("These tables are skipped since these data types are not supported by %s: %s",
programName, strings.Join(skippedTab, ","))
}
}