From 3bf90a9b4b1c6e4c6e33610d2e1b7ee9ff72cf7d Mon Sep 17 00:00:00 2001 From: Avi Deitcher Date: Wed, 23 Apr 2025 13:54:48 +0300 Subject: [PATCH] support for dumping triggers and functions and procedures Signed-off-by: Avi Deitcher --- cmd/dump.go | 14 +++ docs/configuration.md | 2 + go.mod | 2 +- go.sum | 6 + pkg/core/dump.go | 4 + pkg/core/dumpoptions.go | 2 + pkg/database/dump.go | 4 + pkg/database/mysql/dump.go | 243 +++++++++++++++++++++++++++++++++--- pkg/database/mysql/table.go | 9 +- pkg/database/mysql/view.go | 58 +++++++-- test/backup_test.go | 94 ++++++++++++-- 11 files changed, 394 insertions(+), 44 deletions(-) diff --git a/cmd/dump.go b/cmd/dump.go index 48ae09f4..8d6b07ea 100644 --- a/cmd/dump.go +++ b/cmd/dump.go @@ -111,6 +111,15 @@ func dumpCmd(passedExecs execs, cmdConfig *cmdConfiguration) (*cobra.Command, er if !v.IsSet("compact") && dumpConfig != nil && dumpConfig.Compact != nil { compact = *dumpConfig.Compact } + // should we dump triggers and functions and procedures? + triggers := v.GetBool("triggers") + if !v.IsSet("triggers") && dumpConfig != nil && dumpConfig.Triggers != nil { + triggers = *dumpConfig.Triggers + } + routines := v.GetBool("routines") + if !v.IsSet("routines") && dumpConfig != nil && dumpConfig.Routines != nil { + routines = *dumpConfig.Routines + } maxAllowedPacket := v.GetInt("max-allowed-packet") if !v.IsSet("max-allowed-packet") && dumpConfig != nil && dumpConfig.MaxAllowedPacket != nil && *dumpConfig.MaxAllowedPacket != 0 { maxAllowedPacket = *dumpConfig.MaxAllowedPacket @@ -242,6 +251,8 @@ func dumpCmd(passedExecs execs, cmdConfig *cmdConfiguration) (*cobra.Command, er PostBackupScripts: postBackupScripts, SuppressUseDatabase: noDatabaseName, Compact: compact, + Triggers: triggers, + Routines: routines, MaxAllowedPacket: maxAllowedPacket, Run: uid, FilenamePattern: filenamePattern, @@ -318,6 +329,9 @@ S3: If it is a URL of the format s3://bucketname/path then it will connect via S // max-allowed-packet size flags.Int("max-allowed-packet", defaultMaxAllowedPacket, "Maximum size of the buffer for client/server communication, similar to mysqldump's max_allowed_packet. 0 means to use the default size.") + // whether to include triggers and functions + flags.Bool("triggers-and-functions", false, "Whether to include triggers and functions in the dump.") + cmd.MarkFlagsMutuallyExclusive("once", "cron") cmd.MarkFlagsMutuallyExclusive("once", "begin") cmd.MarkFlagsMutuallyExclusive("once", "frequency") diff --git a/docs/configuration.md b/docs/configuration.md index b396fb18..d6438406 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -88,6 +88,7 @@ The following are the environment variables, CLI flags and configuration file op | SMB username, used only if a target does not have one | BRP | `smb-user` | `SMB_USER` | `dump.targets[smb-target].username` | | | SMB password, used only if a target does not have one | BRP | `smb-pass` | `SMB_PASS` | `dump.targets[smb-target].password` | | | compression to use, one of: `bzip2`, `gzip` | BP | `compression` | `DB_DUMP_COMPRESSION` | `dump.compression` | `gzip` | +| whether to include triggers, procedures and functions | B | `triggers-and-functions` | `DB_DUMP_TRIGGERS_AND_FUNCTIONS` | `dump.triggersAndFunctions` | `false` | | when in container, run the dump or restore with `nice`/`ionice` | BR | `` | `NICE` | `` | `false` | | filename to save the target backup file | B | `dump --filename-pattern` | `DB_DUMP_FILENAME_PATTERN` | `dump.filenamePattern` | | | directory with scripts to execute before backup | B | `dump --pre-backup-scripts` | `DB_DUMP_PRE_BACKUP_SCRIPTS` | `dump.scripts.preBackup` | in container, `/scripts.d/pre-backup/` | @@ -135,6 +136,7 @@ for details of each. * `once`: run once and exit * `compression`: the compression to use * `compact`: compact the dump + * `triggersAndFunctions`: include triggers and functions and procedures in the dump * `maxAllowedPacket`: max packet size * `filenamePattern`: the filename pattern * `scripts`: diff --git a/go.mod b/go.mod index 3feb0fb5..7af50e5a 100644 --- a/go.mod +++ b/go.mod @@ -31,7 +31,7 @@ require ( ) require ( - github.com/databacker/api/go/api v0.0.0-20250418100420-12e1adda1303 + github.com/databacker/api/go/api v0.0.0-20250423183243-7775066c265e github.com/google/go-cmp v0.6.0 go.opentelemetry.io/otel v1.31.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 diff --git a/go.sum b/go.sum index 42323cac..c22d19ae 100644 --- a/go.sum +++ b/go.sum @@ -81,6 +81,12 @@ github.com/databacker/api/go/api v0.0.0-20250418091750-e67e3226ca5f h1:vuPsDEgli github.com/databacker/api/go/api v0.0.0-20250418091750-e67e3226ca5f/go.mod h1:bQhbl71Lk1ATni0H+u249hjoQ8ShAdVNcNjnw6z+SbE= github.com/databacker/api/go/api v0.0.0-20250418100420-12e1adda1303 h1:TVLyJzdvDvWIEs1/v6G0rQPpZeUsArQ7skzicjfCV8I= github.com/databacker/api/go/api v0.0.0-20250418100420-12e1adda1303/go.mod h1:bQhbl71Lk1ATni0H+u249hjoQ8ShAdVNcNjnw6z+SbE= +github.com/databacker/api/go/api v0.0.0-20250423104730-2789787a240e h1:0ITg+YYAjyvM+rXirvZvx/PBLhMhHG+Nj5h+1flzWwY= +github.com/databacker/api/go/api v0.0.0-20250423104730-2789787a240e/go.mod h1:bQhbl71Lk1ATni0H+u249hjoQ8ShAdVNcNjnw6z+SbE= +github.com/databacker/api/go/api v0.0.0-20250423151229-1987d37f6e2f h1:JTyrIdH5lGiObGO7qFmON8ACIaCb5es+gjySkL/YWYc= +github.com/databacker/api/go/api v0.0.0-20250423151229-1987d37f6e2f/go.mod h1:bQhbl71Lk1ATni0H+u249hjoQ8ShAdVNcNjnw6z+SbE= +github.com/databacker/api/go/api v0.0.0-20250423183243-7775066c265e h1:5K7IbijS9p+dezx9m45CjFCR2Sf6BfT/tb540aEw66k= +github.com/databacker/api/go/api v0.0.0-20250423183243-7775066c265e/go.mod h1:bQhbl71Lk1ATni0H+u249hjoQ8ShAdVNcNjnw6z+SbE= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/pkg/core/dump.go b/pkg/core/dump.go index de504889..c1d7eb66 100644 --- a/pkg/core/dump.go +++ b/pkg/core/dump.go @@ -36,6 +36,8 @@ func (e *Executor) Dump(ctx context.Context, opts DumpOptions) (DumpResults, err compressor := opts.Compressor encryptor := opts.Encryptor compact := opts.Compact + triggers := opts.Triggers + routines := opts.Routines suppressUseDatabase := opts.SuppressUseDatabase maxAllowedPacket := opts.MaxAllowedPacket filenamePattern := opts.FilenamePattern @@ -105,6 +107,8 @@ func (e *Executor) Dump(ctx context.Context, opts DumpOptions) (DumpResults, err dbDumpCtx, dbDumpSpan := tracer.Start(ctx, "database_dump") if err := database.Dump(dbDumpCtx, dbconn, database.DumpOpts{ Compact: compact, + Triggers: triggers, + Routines: routines, SuppressUseDatabase: suppressUseDatabase, MaxAllowedPacket: maxAllowedPacket, }, dw); err != nil { diff --git a/pkg/core/dumpoptions.go b/pkg/core/dumpoptions.go index 82883f74..ee9cb5d1 100644 --- a/pkg/core/dumpoptions.go +++ b/pkg/core/dumpoptions.go @@ -19,6 +19,8 @@ type DumpOptions struct { PreBackupScripts string PostBackupScripts string Compact bool + Triggers bool + Routines bool SuppressUseDatabase bool MaxAllowedPacket int Run uuid.UUID diff --git a/pkg/database/dump.go b/pkg/database/dump.go index e5704a00..9f48a797 100644 --- a/pkg/database/dump.go +++ b/pkg/database/dump.go @@ -10,6 +10,8 @@ import ( type DumpOpts struct { Compact bool + Triggers bool + Routines bool SuppressUseDatabase bool MaxAllowedPacket int } @@ -36,6 +38,8 @@ func Dump(ctx context.Context, dbconn Connection, opts DumpOpts, writers []DumpW Schema: schema, Host: dbconn.Host, Compact: opts.Compact, + Triggers: opts.Triggers, + Routines: opts.Routines, SuppressUseDatabase: opts.SuppressUseDatabase, MaxAllowedPacket: opts.MaxAllowedPacket, } diff --git a/pkg/database/mysql/dump.go b/pkg/database/mysql/dump.go index 6e194b68..e31df4ac 100644 --- a/pkg/database/mysql/dump.go +++ b/pkg/database/mysql/dump.go @@ -19,7 +19,10 @@ import ( "context" "database/sql" "errors" + "fmt" "io" + "slices" + "strings" "text/template" "time" ) @@ -41,15 +44,18 @@ type Data struct { LockTables bool Schema string Compact bool + Triggers bool + Routines bool Host string SuppressUseDatabase bool Charset string Collation string - tx *sql.Tx - headerTmpl *template.Template - footerTmpl *template.Template - err error + tx *sql.Tx + headerTmpl *template.Template + footerTmpl *template.Template + routinesHeaderTmpl *template.Template + err error } type metaData struct { @@ -112,6 +118,14 @@ const footerTmpl = `/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; const footerTmplCompact = `` +const routinesHeader = ` +-- +-- Dumping routines for database '{{ .Database }}' +-- +` + +const routinesHeaderCompact = `` + const nullType = "NULL" // Dump data using struct @@ -156,16 +170,16 @@ func (data *Data) Dump() error { return err } - tables, err := data.getTables() + tables, views, err := data.getTables() if err != nil { return err } // Lock all tables before dumping if present - if data.LockTables && len(tables) > 0 { + if data.LockTables && (len(tables) > 0 || len(views) > 0) { var b bytes.Buffer b.WriteString("LOCK TABLES ") - for index, table := range tables { + for index, table := range append(tables, views...) { if index != 0 { b.WriteString(",") } @@ -181,11 +195,64 @@ func (data *Data) Dump() error { }() } + // get the triggers for the current schema, structured by table + var triggers map[string][]string + if data.Triggers { + triggers, err = data.dumpTriggers() + if err != nil { + return err + } + } + + slices.SortFunc(tables, func(a, b Table) int { + return strings.Compare(strings.ToLower(a.Name()), strings.ToLower(b.Name())) + }) + slices.SortFunc(views, func(a, b Table) int { + return strings.Compare(strings.ToLower(a.Name()), strings.ToLower(b.Name())) + }) for _, name := range tables { - if err := data.dumpTable(name); err != nil { + if err := data.dumpTable(name, 0); err != nil { + return err + } + // dump triggers for the current table + if len(triggers) > 0 { + if trigger, ok := triggers[name.Name()]; ok { + for _, t := range trigger { + if _, err := data.Out.Write([]byte(t)); err != nil { + return err + } + } + } + } + } + + // Dump the dummy views, if any + for _, name := range views { + if err := data.dumpTable(name, 0); err != nil { + return err + } + } + + // Dump routines (functions and procedures) + if data.Routines { + if err := data.routinesHeaderTmpl.Execute(data.Out, meta); err != nil { + return err + } + if err := data.dumpFunctions(); err != nil { + return err + } + if err := data.dumpProcedures(); err != nil { return err } } + + // Dump the actual views + for _, name := range views { + if err := data.dumpTable(name, 1); err != nil { + return err + } + } + if data.err != nil { return data.err } @@ -222,14 +289,14 @@ func (data *Data) rollback() error { // MARK: writter methods -func (data *Data) dumpTable(table Table) error { +func (data *Data) dumpTable(table Table, part int) error { if data.err != nil { return data.err } if err := table.Init(); err != nil { return err } - return table.Execute(data.Out, data.Compact) + return table.Execute(data.Out, data.Compact, part) } // MARK: get methods @@ -251,6 +318,13 @@ func (data *Data) getTemplates() (err error) { hTmpl += "\n" } } + + // routines header + var routinesHeaderTmpl = routinesHeader + if data.Compact { + routinesHeaderTmpl = routinesHeaderCompact + } + data.headerTmpl, err = template.New("mysqldumpHeader").Parse(hTmpl) if err != nil { return @@ -260,22 +334,25 @@ func (data *Data) getTemplates() (err error) { if err != nil { return } + + data.routinesHeaderTmpl, err = template.New("mysqldumpRoutinesHeader").Parse(routinesHeaderTmpl) return } -func (data *Data) getTables() ([]Table, error) { - tables := make([]Table, 0) +func (data *Data) getTables() (tables []Table, views []Table, err error) { + tables = make([]Table, 0) + views = make([]Table, 0) rows, err := data.tx.Query("SHOW FULL TABLES") if err != nil { - return nil, err + return nil, nil, err } defer func() { _ = rows.Close() }() for rows.Next() { var tableName, tableType sql.NullString if err := rows.Scan(&tableName, &tableType); err != nil { - return nil, err + return nil, nil, err } if !tableName.Valid || data.isIgnoredTable(tableName.String) { continue @@ -287,14 +364,14 @@ func (data *Data) getTables() ([]Table, error) { } switch tableType.String { case "VIEW": - tables = append(tables, &view{baseTable: table}) + views = append(views, &view{baseTable: table}) case "BASE TABLE": tables = append(tables, &table) default: - return nil, errors.New("unknown table type: " + tableType.String) + return nil, nil, errors.New("unknown table type: " + tableType.String) } } - return tables, rows.Err() + return tables, views, rows.Err() } func (data *Data) getCharsetCollections() error { @@ -328,6 +405,138 @@ func (data *Data) isIgnoredTable(name string) bool { return false } +// dumpTriggers dump the triggers for the current schema into a list by table +func (data *Data) dumpTriggers() (map[string][]string, error) { + var triggers = make(map[string][]string) + rows, err := data.tx.Query("SHOW TRIGGERS FROM `" + data.Schema + "`") + if err != nil { + return nil, err + } + defer func() { _ = rows.Close() }() + + for rows.Next() { + var triggerName, event, table, statement, timing, sqlMode, definer, charset, collationConnection, databaseCollection sql.NullString + var created sql.NullTime + if err := rows.Scan(&triggerName, &event, &table, &statement, &timing, &created, &sqlMode, &definer, &charset, &collationConnection, &databaseCollection); err != nil { + return nil, err + } + if !triggerName.Valid || !statement.Valid { + continue + } + var definerUser, definerHost string + if definer.Valid { + // definer is in the format `user`@`host` + // split it into user and host + parts := bytes.Split([]byte(definer.String), []byte{'@'}) + if len(parts) == 2 { + definerUser = string(parts[0]) + definerHost = string(parts[1]) + } else { + definerUser = definer.String + definerHost = "%" + } + } + triggers[table.String] = append(triggers[table.String], ` +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = latin1 */ ; +/*!50003 SET character_set_results = latin1 */ ; +/*!50003 SET collation_connection = latin1_swedish_ci */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION' */ ; +DELIMITER ;; +`+fmt.Sprintf("/*!50003 CREATE*/ /*!50017 DEFINER=`%s`@`%s`*/ /*!50003 TRIGGER `%s` AFTER %s ON `%s` FOR EACH ROW %s */;;", definerUser, definerHost, triggerName.String, event.String, table.String, statement.String)+` +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +`) + } + return triggers, nil +} + +// dumpFunctions dump the functions for the current schema +func (data *Data) dumpFunctions() error { + return data.dumpProceduresOrFunctions("FUNCTION") +} + +// dumpProcedures dump the procedures for the current schema +func (data *Data) dumpProcedures() error { + return data.dumpProceduresOrFunctions("PROCEDURE") +} + +// dumpProceduresOrFunctions dump the procedures or functions for the current schema +func (data *Data) dumpProceduresOrFunctions(t string) error { + createQueries, err := data.getProceduresOrFunctionsCreateQueries(t) + if err != nil { + return err + } + for _, createQuery := range createQueries { + var name, sqlMode, createStmt, charset, collationConnection, databaseCollation sql.NullString + if err := data.tx.QueryRow(createQuery).Scan(&name, &sqlMode, &createStmt, &charset, &collationConnection, &databaseCollation); err != nil { + return err + } + if createStmt.Valid { + // TODO: the first line should only be there if it is full, not compact + var sql string + if !data.Compact { + sql = fmt.Sprintf(` +/*!50003 DROP %s IF EXISTS `+"`%s`"+` */; +`, t, name.String) + } + sql += fmt.Sprintf(` +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = latin1 */ ; +/*!50003 SET character_set_results = latin1 */ ; +/*!50003 SET collation_connection = latin1_swedish_ci */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION' */ ; +DELIMITER ;; +%s ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +`, createStmt.String) + if _, err := data.Out.Write([]byte(sql)); err != nil { + return err + } + } + } + return nil +} + +func (data *Data) getProceduresOrFunctionsCreateQueries(t string) ([]string, error) { + query := fmt.Sprintf("SHOW %s STATUS WHERE Db = '%s'", t, data.Schema) + var toGet []string + rows, err := data.tx.Query(query) + if err != nil { + return nil, err + } + defer func() { _ = rows.Close() }() + + // | Db | Name | Type | Language | Definer | Modified | Created | Security_type | Comment | character_set_client | collation_connection | Database Collation | + for rows.Next() { + var ( + db, name, typeDef, language, definer, securityType, comment, charset, collationConnection, databaseCollation sql.NullString + created, modified sql.NullTime + ) + if err := rows.Scan(&db, &name, &typeDef, &language, &definer, &modified, &created, &securityType, &comment, &charset, &collationConnection, &databaseCollation); err != nil { + return nil, err + } + if name.Valid && typeDef.Valid { + createQuery := fmt.Sprintf("SHOW CREATE %s `%s`", typeDef.String, name.String) + toGet = append(toGet, createQuery) + } + } + return toGet, nil +} + func (meta *metaData) updateMetadata(data *Data) (err error) { var serverVersion sql.NullString err = data.tx.QueryRow("SELECT version()").Scan(&serverVersion) diff --git a/pkg/database/mysql/table.go b/pkg/database/mysql/table.go index 144e3a74..d63c8235 100644 --- a/pkg/database/mysql/table.go +++ b/pkg/database/mysql/table.go @@ -43,10 +43,12 @@ type Table interface { Next() bool RowValues() string RowBuffer() *bytes.Buffer - Execute(io.Writer, bool) error + Execute(io.Writer, bool, int) error Stream() <-chan string } +var _ Table = &baseTable{} + type baseTable struct { name string err error @@ -335,7 +337,10 @@ func (table *baseTable) Stream() <-chan string { return valueOut } -func (table *baseTable) Execute(out io.Writer, compact bool) error { +func (table *baseTable) Execute(out io.Writer, compact bool, part int) error { + if part > 0 { + return fmt.Errorf("part %d is not supported for tables", part) + } tmpl := tableFullTemplate if compact { tmpl = tableCompactTemplate diff --git a/pkg/database/mysql/view.go b/pkg/database/mysql/view.go index cda1b5c4..7002d687 100644 --- a/pkg/database/mysql/view.go +++ b/pkg/database/mysql/view.go @@ -9,32 +9,52 @@ import ( "text/template" ) +var _ Table = &view{} + type view struct { baseTable charset string collation string } -var viewFullTemplate, viewCompactTemplate *template.Template +var viewFullTemplate0, viewCompactTemplate0, viewFullTemplate1, viewCompactTemplate1 *template.Template func init() { tmpl, err := template.New("mysqldumpView").Funcs(template.FuncMap{ "sub": sub, "esc": esc, - }).Parse(viewTmpl) + }).Parse(viewTmpl0) if err != nil { panic(fmt.Errorf("could not parse view template: %w", err)) } - viewFullTemplate = tmpl + viewFullTemplate0 = tmpl + + tmpl, err = template.New("mysqldumpView").Funcs(template.FuncMap{ + "sub": sub, + "esc": esc, + }).Parse(viewTmpl1) + if err != nil { + panic(fmt.Errorf("could not parse view template: %w", err)) + } + viewFullTemplate1 = tmpl tmpl, err = template.New("mysqldumpViewCompact").Funcs(template.FuncMap{ "sub": sub, "esc": esc, - }).Parse(viewTmplCompact) + }).Parse(viewTmplCompact0) if err != nil { panic(fmt.Errorf("could not parse view compact template: %w", err)) } - viewCompactTemplate = tmpl + viewCompactTemplate0 = tmpl + + tmpl, err = template.New("mysqldumpViewCompact").Funcs(template.FuncMap{ + "sub": sub, + "esc": esc, + }).Parse(viewTmplCompact1) + if err != nil { + panic(fmt.Errorf("could not parse view compact template: %w", err)) + } + viewCompactTemplate1 = tmpl } func (v *view) CreateSQL() ([]string, error) { @@ -92,10 +112,21 @@ func (v *view) Init() error { return nil } -func (v *view) Execute(out io.Writer, compact bool) error { - tmpl := viewFullTemplate - if compact { - tmpl = viewCompactTemplate +func (v *view) Execute(out io.Writer, compact bool, part int) error { + var tmpl *template.Template + switch part { + case 0: + tmpl = viewFullTemplate0 + if compact { + tmpl = viewCompactTemplate0 + } + case 1: + tmpl = viewFullTemplate1 + if compact { + tmpl = viewCompactTemplate1 + } + default: + return fmt.Errorf("invalid part %d for view %s", part, v.name) } return tmpl.Execute(out, v) } @@ -109,7 +140,7 @@ func (v *view) Collation() string { } // takes a Table, but is a view -const viewTmpl = ` +const viewTmpl0 = ` -- -- Temporary view structure for view {{ esc .Name }} -- @@ -121,7 +152,9 @@ SET @saved_cs_client = @@character_set_client; /*!50001 CREATE VIEW {{ esc .Name }} AS SELECT {{ $columns := .Columns }}{{ range $index, $column := .Columns }} 1 AS {{ esc $column }}{{ if ne $index (sub (len $columns) 1) }},{{ printf "%c" 10 }}{{ else }}*/;{{ end }}{{ end }} SET character_set_client = @saved_cs_client; +` +const viewTmpl1 = ` -- -- Current Database: {{ esc .Database }} -- @@ -146,13 +179,16 @@ USE {{ esc .Database }}; /*!50001 SET character_set_results = @saved_cs_results */; /*!50001 SET collation_connection = @saved_col_connection */; ` -const viewTmplCompact = ` + +const viewTmplCompact0 = ` SET @saved_cs_client = @@character_set_client; /*!50503 SET character_set_client = utf8mb4 */; /*!50001 CREATE VIEW {{ esc .Name }} AS SELECT {{ $columns := .Columns }}{{ range $index, $column := .Columns }} 1 AS {{ esc $column }}{{ if ne $index (sub (len $columns) 1) }},{{ printf "%c" 10 }}{{ else }}*/;{{ end }}{{ end }} SET character_set_client = @saved_cs_client; +` +const viewTmplCompact1 = ` USE {{ esc .Database }}; /*!50001 DROP VIEW IF EXISTS {{ esc .Name }}*/; /*!50001 SET @saved_cs_client = @@character_set_client */; diff --git a/test/backup_test.go b/test/backup_test.go index 8e712f8f..190ab190 100644 --- a/test/backup_test.go +++ b/test/backup_test.go @@ -59,6 +59,7 @@ var dumpFilterRegex = []*regexp.Regexp{ regexp.MustCompile(`(?i)^\s*-- MySQL dump .*$`), regexp.MustCompile(`(?i)^\s*-- Go SQL dump .*$`), regexp.MustCompile(`(?i)^\s*-- Dump completed on .*`), + regexp.MustCompile(`(?i)^\s*$`), } type containerPort struct { @@ -220,7 +221,7 @@ func (d *dockerContext) waitForDBConnectionAndGrantPrivileges(mysqlCID, dbuser, } // Ensure the user has the right privileges - dbGrant := []string{"mysql", fmt.Sprintf("-u%s", dbpass), fmt.Sprintf("-p%s", dbpass), "--protocol=tcp", "-h127.0.0.1", "-e", "grant process on *.* to user;"} + dbGrant := []string{"mysql", fmt.Sprintf("-u%s", dbpass), fmt.Sprintf("-p%s", dbpass), "--protocol=tcp", "-h127.0.0.1", "-e", "grant process on *.* to user; GRANT SYSTEM_VARIABLES_ADMIN ON *.* TO 'user'@'%'; SET GLOBAL log_bin_trust_function_creators = 1;"} attachResp, exitCode, err := d.execInContainer(ctx, mysqlCID, dbGrant) if err != nil { return fmt.Errorf("failed to attach to exec: %w", err) @@ -340,16 +341,75 @@ func (d *dockerContext) createBackupFile(mysqlCID, mysqlUser, mysqlPass, outfile // Create and populate the table mysqlCreateCmd := []string{"mysql", "-hlocalhost", "--protocol=tcp", fmt.Sprintf("-u%s", mysqlUser), fmt.Sprintf("-p%s", mysqlPass), "-e", ` - use tester; - create table t1 - (id int, name varchar(20), j json, d date, t time, dt datetime, ts timestamp); - INSERT INTO t1 (id,name,j,d,t,dt,ts) - VALUES - (1, "John", '{"a":"b"}', "2012-11-01", "00:15:00", "2012-11-01 00:15:00", "2012-11-01 00:15:00"), - (2, "Jill", '{"c":true}', "2012-11-02", "00:16:00", "2012-11-02 00:16:00", "2012-11-02 00:16:00"), - (3, "Sam", '{"d":24}', "2012-11-03", "00:17:00", "2012-11-03 00:17:00", "2012-11-03 00:17:00"), - (4, "Sarah", '{"a":"b"}', "2012-11-04", "00:18:00", "2012-11-04 00:18:00", "2012-11-04 00:18:00"); - create view view1 as select id, name from t1; +USE tester; + +-- Table +CREATE TABLE t1 ( + id INT, + name VARCHAR(20), + j JSON, + d DATE, + t TIME, + dt DATETIME, + ts TIMESTAMP +); + +-- Log table for trigger +CREATE TABLE t1_log ( + id INT, + name VARCHAR(20), + inserted_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Insert data +INSERT INTO t1 (id, name, j, d, t, dt, ts) VALUES +(1, "John", '{"a":"b"}', "2012-11-01", "00:15:00", "2012-11-01 00:15:00", "2012-11-01 00:15:00"), +(2, "Jill", '{"c":true}', "2012-11-02", "00:16:00", "2012-11-02 00:16:00", "2012-11-02 00:16:00"), +(3, "Sam", '{"d":24}', "2012-11-03", "00:17:00", "2012-11-03 00:17:00", "2012-11-03 00:17:00"), +(4, "Sarah", '{"a":"b"}', "2012-11-04", "00:18:00", "2012-11-04 00:18:00", "2012-11-04 00:18:00"); + +-- View +CREATE VIEW view1 AS +SELECT id, name FROM t1; + +-- Trigger: log inserts into t1 +DELIMITER // +CREATE TRIGGER trg_t1_insert +AFTER INSERT ON t1 +FOR EACH ROW +BEGIN + INSERT INTO t1_log (id, name) VALUES (NEW.id, NEW.name); +END; +// +DELIMITER ; + +-- Stored Procedure: insert a new person +DELIMITER // +CREATE PROCEDURE add_person( + IN p_id INT, + IN p_name VARCHAR(20), + IN p_json JSON, + IN p_date DATE, + IN p_time TIME, + IN p_datetime DATETIME, + IN p_timestamp TIMESTAMP +) +BEGIN + INSERT INTO t1 (id, name, j, d, t, dt, ts) + VALUES (p_id, p_name, p_json, p_date, p_time, p_datetime, p_timestamp); +END; +// +DELIMITER ; + +-- Function: return name length +DELIMITER // +CREATE FUNCTION name_length(p_name VARCHAR(255)) RETURNS INT +DETERMINISTIC +BEGIN + RETURN CHAR_LENGTH(p_name); +END; +// +DELIMITER ; `} attachResp, exitCode, err := d.execInContainer(ctx, mysqlCID, mysqlCreateCmd) if err != nil { @@ -363,7 +423,7 @@ func (d *dockerContext) createBackupFile(mysqlCID, mysqlUser, mysqlPass, outfile } // Dump the database - do both compact and non-compact - mysqlDumpCompactCmd := []string{"mysqldump", "-hlocalhost", "--protocol=tcp", "--complete-insert", fmt.Sprintf("-u%s", mysqlUser), fmt.Sprintf("-p%s", mysqlPass), "--compact", "--databases", "tester"} + mysqlDumpCompactCmd := []string{"mysqldump", "-hlocalhost", "--protocol=tcp", "--complete-insert", fmt.Sprintf("-u%s", mysqlUser), fmt.Sprintf("-p%s", mysqlPass), "--compact", "--databases", "--triggers", "--routines", "tester"} attachResp, exitCode, err = d.execInContainer(ctx, mysqlCID, mysqlDumpCompactCmd) if err != nil { return fmt.Errorf("failed to attach to exec: %w", err) @@ -384,7 +444,7 @@ func (d *dockerContext) createBackupFile(mysqlCID, mysqlUser, mysqlPass, outfile bufo.Reset() bufe.Reset() - mysqlDumpCmd := []string{"mysqldump", "-hlocalhost", "--protocol=tcp", "--complete-insert", fmt.Sprintf("-u%s", mysqlUser), fmt.Sprintf("-p%s", mysqlPass), "--databases", "tester"} + mysqlDumpCmd := []string{"mysqldump", "-hlocalhost", "--protocol=tcp", "--complete-insert", fmt.Sprintf("-u%s", mysqlUser), fmt.Sprintf("-p%s", mysqlPass), "--databases", "--triggers", "--routines", "tester"} attachResp, exitCode, err = d.execInContainer(ctx, mysqlCID, mysqlDumpCmd) if err != nil { return fmt.Errorf("failed to attach to exec: %w", err) @@ -872,6 +932,8 @@ func TestIntegration(t *testing.T) { dumpOpts := core.DumpOptions{ Compressor: &compression.GzipCompressor{}, Compact: false, + Triggers: true, + Routines: true, } runTest(t, testOptions{ targets: []string{"/full-backups/"}, @@ -892,6 +954,8 @@ func TestIntegration(t *testing.T) { dumpOpts := core.DumpOptions{ Compressor: &compression.GzipCompressor{}, Compact: true, + Triggers: true, + Routines: true, } runTest(t, testOptions{ targets: []string{"/compact-backups/"}, @@ -911,6 +975,8 @@ func TestIntegration(t *testing.T) { dumpOpts := core.DumpOptions{ Compressor: &compression.GzipCompressor{}, Compact: false, + Triggers: true, + Routines: true, FilenamePattern: "backup-{{ .Sequence }}-{{ .Subsequence }}.tgz", } runTest(t, testOptions{ @@ -942,6 +1008,8 @@ func TestIntegration(t *testing.T) { dumpOpts := core.DumpOptions{ Compressor: &compression.GzipCompressor{}, Compact: false, + Triggers: true, + Routines: true, } runTest(t, testOptions{ targets: []string{