Skip to content

Commit

Permalink
Add indexes to improve backup query performance.
Browse files Browse the repository at this point in the history
  • Loading branch information
jedthe3rd committed Feb 27, 2022
1 parent 781347d commit 396119a
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 2 deletions.
@@ -0,0 +1,12 @@
CREATE INDEX "nnc_Metadataset" ON Metadataset ("ID","BlocksetID")
CREATE INDEX "nn_FilesetentryFile" on FilesetEntry ("FilesetID","FileID")

-- Line 602 & 603 LocalBackupDatabase.cs
-- CREATE INDEX "tmpName1" ON "{0}" ("Path"),tmpName1
-- CREATE INDEX "tmpName2" ON "{0}" ("Path"),tmpName2

CREATE INDEX "nn_FileLookup_BlockMeta" ON FileLookup ("BlocksetID", "MetadataID")

CREATE INDEX "nnc_BlocksetEntry" ON "BlocksetEntry" ("Index", "BlocksetID", "BlockID")

UPDATE "Version" SET "Version" = 12;
11 changes: 9 additions & 2 deletions Duplicati/Library/Main/Database/Database schema/Schema.sql
Expand Up @@ -81,6 +81,8 @@ CREATE TABLE "FilesetEntry" (

/* Improved reverse lookup for joining Fileset and File table */
CREATE INDEX "FilesetentryFileIdIndex" on "FilesetEntry" ("FileID");
CREATE INDEX "nn_FilesetentryFile" on FilesetEntry ("FilesetID","FileID");



/*
Expand Down Expand Up @@ -109,6 +111,9 @@ CREATE TABLE "FileLookup" (

/* Fast path based lookup, single properties are auto-indexed */
CREATE UNIQUE INDEX "FileLookupPath" ON "FileLookup" ("PrefixID", "Path", "BlocksetID", "MetadataID");
CREATE INDEX "nn_FileLookup_BlockMeta" ON FileLookup ("BlocksetID", "MetadataID");



/*
The File view contains an ID
Expand Down Expand Up @@ -167,7 +172,7 @@ CREATE TABLE "BlocksetEntry" (

/* As this table is a cross table we need fast lookup */
CREATE INDEX "BlocksetEntry_IndexIdsBackwards" ON "BlocksetEntry" ("BlockID");

CREATE INDEX "nnc_BlocksetEntry" ON "BlocksetEntry" ("Index", "BlocksetID", "BlockID");

/*
The individual block hashes,
Expand Down Expand Up @@ -222,6 +227,8 @@ CREATE TABLE "Metadataset" (
);

CREATE INDEX "MetadatasetBlocksetID" ON "Metadataset" ("BlocksetID");
CREATE INDEX "nnc_Metadataset" ON Metadataset ("ID","BlocksetID");


/*
Operations performed on the backend,
Expand Down Expand Up @@ -280,4 +287,4 @@ CREATE TABLE "ChangeJournalData" (
"ConfigHash" TEXT NOT NULL
);

INSERT INTO "Version" ("Version") VALUES (11);
INSERT INTO "Version" ("Version") VALUES (12);
3 changes: 3 additions & 0 deletions Duplicati/Library/Main/Database/LocalBackupDatabase.cs
Expand Up @@ -601,6 +601,9 @@ internal void UpdateChangeStatistics(BackupResults results, System.Data.IDbTrans

cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" AS " + subqueryFiles, tmpName1), lastFilesetId);
cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" AS " + subqueryFiles, tmpName2), m_filesetId);
cmd.ExecuteNonQuery(string.Format(@"CREATE INDEX ""nn_tmpName1"" ON ""{0}"" (""Path"")",tmpName1));
cmd.ExecuteNonQuery(string.Format(@"CREATE INDEX ""nn_tmpName2"" ON ""{0}"" (""Path"")",tmpName2));


results.AddedFiles = cmd.ExecuteScalarInt64(string.Format(@"SELECT COUNT(*) FROM ""File"" INNER JOIN ""FilesetEntry"" ON ""File"".""ID"" = ""FilesetEntry"".""FileID"" WHERE ""FilesetEntry"".""FilesetID"" = ? AND ""File"".""BlocksetID"" != ? AND ""File"".""BlocksetID"" != ? AND NOT ""File"".""Path"" IN (SELECT ""Path"" FROM ""{0}"")", tmpName1), 0, m_filesetId, FOLDER_BLOCKSET_ID, SYMLINK_BLOCKSET_ID);
results.DeletedFiles = cmd.ExecuteScalarInt64(string.Format(@"SELECT COUNT(*) FROM ""{0}"" WHERE ""{0}"".""Path"" NOT IN (SELECT ""Path"" FROM ""File"" INNER JOIN ""FilesetEntry"" ON ""File"".""ID"" = ""FilesetEntry"".""FileID"" WHERE ""FilesetEntry"".""FilesetID"" = ?)", tmpName1), 0, m_filesetId);
Expand Down

1 comment on commit 396119a

@duplicatibot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This commit has been mentioned on Duplicati. There might be relevant details there:

https://forum.duplicati.com/t/the-database-has-version-12-but-the-largest-supported-version-is-11/15554/2

Please sign in to comment.