Skip to content

Commit

Permalink
All: Allow modifying a resource metadata only when synchronising (#9114)
Browse files Browse the repository at this point in the history
  • Loading branch information
laurent22 committed Oct 24, 2023
1 parent 0069069 commit 9aed3e0
Show file tree
Hide file tree
Showing 13 changed files with 197 additions and 53 deletions.
1 change: 1 addition & 0 deletions .eslintignore
Expand Up @@ -633,6 +633,7 @@ packages/lib/markdownUtils2.test.js
packages/lib/markupLanguageUtils.js
packages/lib/migrations/42.js
packages/lib/models/Alarm.js
packages/lib/models/BaseItem.test.js
packages/lib/models/BaseItem.js
packages/lib/models/Folder.sharing.test.js
packages/lib/models/Folder.test.js
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -615,6 +615,7 @@ packages/lib/markdownUtils2.test.js
packages/lib/markupLanguageUtils.js
packages/lib/migrations/42.js
packages/lib/models/Alarm.js
packages/lib/models/BaseItem.test.js
packages/lib/models/BaseItem.js
packages/lib/models/Folder.sharing.test.js
packages/lib/models/Folder.test.js
Expand Down
1 change: 1 addition & 0 deletions packages/app-cli/tests/support/sample.txt
@@ -0,0 +1 @@
just testing
1 change: 1 addition & 0 deletions packages/app-cli/tests/support/sample2.txt
@@ -0,0 +1 @@
just testing 2
4 changes: 2 additions & 2 deletions packages/lib/JoplinDatabase.ts
Expand Up @@ -3,7 +3,7 @@ import shim from './shim';
import Database from './database';
import migration42 from './services/database/migrations/42';
import migration43 from './services/database/migrations/43';
// import migration44 from './services/database/migrations/44';
import migration44 from './services/database/migrations/44';
import { SqlQuery, Migration } from './services/database/types';
import addMigrationFile from './services/database/addMigrationFile';

Expand Down Expand Up @@ -127,7 +127,7 @@ INSERT INTO version (version) VALUES (1);
const migrations: Migration[] = [
migration42,
migration43,
// migration44,
migration44,
];

export interface TableField {
Expand Down
3 changes: 1 addition & 2 deletions packages/lib/RotatingLogs.test.ts
Expand Up @@ -48,8 +48,7 @@ describe('RotatingLogs', () => {
try {
dir = await createTempDir();
await createTestLogFile(dir);
await msleep(100);
const rotatingLogs: RotatingLogs = new RotatingLogs(dir, 1, 100);
const rotatingLogs: RotatingLogs = new RotatingLogs(dir, 1, 5000);
await rotatingLogs.cleanActiveLogFile();
await rotatingLogs.deleteNonActiveLogFiles();
const files = await readdir(dir);
Expand Down
12 changes: 10 additions & 2 deletions packages/lib/Synchronizer.ts
Expand Up @@ -604,7 +604,7 @@ export default class Synchronizer {
} else {
// Note: in order to know the real updated_time value, we need to load the content. In theory we could
// rely on the file timestamp (in remote.updated_time) but in practice it's not accurate enough and
// can lead to conflicts (for example when the file timestamp is slightly ahead of it's real
// can lead to conflicts (for example when the file timestamp is slightly ahead of its real
// updated_time). updated_time is set and managed by clients so it's always accurate.
// Same situation below for updateLocal.
//
Expand Down Expand Up @@ -701,7 +701,15 @@ export default class Synchronizer {
logger.warn(`Uploading a large resource (resourceId: ${local.id}, size:${resource.size} bytes) which may tie up the sync process.`);
}

await this.apiCall('put', remoteContentPath, null, { path: localResourceContentPath, source: 'file', shareId: resource.share_id });
// We skip updating the blob if it hasn't
// been modified since the last sync. In
// that case, it means the resource metadata
// (title, filename, etc.) has been changed,
// but not the data blob.
const syncItem = await BaseItem.syncItem(syncTargetId, resource.id, { fields: ['sync_time', 'force_sync'] });
if (!syncItem || syncItem.sync_time < resource.blob_updated_time || syncItem.force_sync) {
await this.apiCall('put', remoteContentPath, null, { path: localResourceContentPath, source: 'file', shareId: resource.share_id });
}
} catch (error) {
if (isCannotSyncError(error)) {
await handleCannotSyncItem(ItemClass, syncTargetId, local, error.message);
Expand Down
@@ -1,17 +1,22 @@
const { setupDatabaseAndSynchronizer, switchClient } = require('../testing/test-utils.js');
const Folder = require('../models/Folder').default;
const Note = require('../models/Note').default;
import { afterAllCleanUp, setupDatabaseAndSynchronizer, switchClient, syncTargetId, synchronizerStart, msleep } from '../testing/test-utils';
import BaseItem from './BaseItem';
import Folder from './Folder';
import Note from './Note';

describe('models/BaseItem', () => {
describe('BaseItem', () => {

beforeEach(async () => {
await setupDatabaseAndSynchronizer(1);
await switchClient(1);
});

afterAll(async () => {
await afterAllCleanUp();
});

// This is to handle the case where a property is removed from a BaseItem table - in that case files in
// the sync target will still have the old property but we don't need it locally.
it('should ignore properties that are present in sync file but not in database when serialising', (async () => {
it('should ignore properties that are present in sync file but not in database when serialising', async () => {
const folder = await Folder.save({ title: 'folder1' });

let serialized = await Folder.serialize(folder);
Expand All @@ -20,9 +25,9 @@ describe('models/BaseItem', () => {
const unserialized = await Folder.unserialize(serialized);

expect('ignore_me' in unserialized).toBe(false);
}));
});

it('should not modify title when unserializing', (async () => {
it('should not modify title when unserializing', async () => {
const folder1 = await Folder.save({ title: '' });
const folder2 = await Folder.save({ title: 'folder1' });

Expand All @@ -35,9 +40,9 @@ describe('models/BaseItem', () => {
const unserialized2 = await Folder.unserialize(serialized2);

expect(unserialized2.title).toBe(folder2.title);
}));
});

it('should correctly unserialize note timestamps', (async () => {
it('should correctly unserialize note timestamps', async () => {
const folder = await Folder.save({ title: 'folder' });
const note = await Note.save({ title: 'note', parent_id: folder.id });

Expand All @@ -48,9 +53,9 @@ describe('models/BaseItem', () => {
expect(unserialized.updated_time).toEqual(note.updated_time);
expect(unserialized.user_created_time).toEqual(note.user_created_time);
expect(unserialized.user_updated_time).toEqual(note.user_updated_time);
}));
});

it('should serialize geolocation fields', (async () => {
it('should serialize geolocation fields', async () => {
const folder = await Folder.save({ title: 'folder' });
let note = await Note.save({ title: 'note', parent_id: folder.id });
note = await Note.load(note.id);
Expand All @@ -76,9 +81,9 @@ describe('models/BaseItem', () => {
expect(unserialized.latitude).toEqual(note.latitude);
expect(unserialized.longitude).toEqual(note.longitude);
expect(unserialized.altitude).toEqual(note.altitude);
}));
});

it('should serialize and unserialize notes', (async () => {
it('should serialize and unserialize notes', async () => {
const folder = await Folder.save({ title: 'folder' });
const note = await Note.save({ title: 'note', parent_id: folder.id });
await Note.save({
Expand All @@ -93,9 +98,9 @@ describe('models/BaseItem', () => {
const noteAfter = await Note.unserialize(serialized);

expect(noteAfter).toEqual(noteBefore);
}));
});

it('should serialize and unserialize properties that contain new lines', (async () => {
it('should serialize and unserialize properties that contain new lines', async () => {
const sourceUrl = `
https://joplinapp.org/ \\n
`;
Expand All @@ -107,9 +112,9 @@ https://joplinapp.org/ \\n
const noteAfter = await Note.unserialize(serialized);

expect(noteAfter).toEqual(noteBefore);
}));
});

it('should not serialize the note title and body', (async () => {
it('should not serialize the note title and body', async () => {
const note = await Note.save({ title: 'my note', body: `one line
two line
three line \\n no escape` });
Expand All @@ -121,5 +126,27 @@ three line \\n no escape` });
one line
two line
three line \\n no escape`)).toBe(0);
}));
});

it('should update item sync item', async () => {
const note1 = await Note.save({ });

const syncTime = async (itemId: string) => {
const syncItem = await BaseItem.syncItem(syncTargetId(), itemId, { fields: ['sync_time'] });
return syncItem ? syncItem.sync_time : 0;
};

expect(await syncTime(note1.id)).toBe(0);

await synchronizerStart();

const newTime = await syncTime(note1.id);
expect(newTime).toBeLessThanOrEqual(Date.now());

// Check that it doesn't change if we sync again
await msleep(1);
await synchronizerStart();
expect(await syncTime(note1.id)).toBe(newTime);
});

});
10 changes: 9 additions & 1 deletion packages/lib/models/BaseItem.ts
@@ -1,5 +1,5 @@
import { ModelType, DeleteOptions } from '../BaseModel';
import { BaseItemEntity, DeletedItemEntity, NoteEntity } from '../services/database/types';
import { BaseItemEntity, DeletedItemEntity, NoteEntity, SyncItemEntity } from '../services/database/types';
import Setting from './Setting';
import BaseModel from '../BaseModel';
import time from '../time';
Expand Down Expand Up @@ -194,6 +194,14 @@ export default class BaseItem extends BaseModel {
return output;
}

public static async syncItem(syncTarget: number, itemId: string, options: LoadOptions = null): Promise<SyncItemEntity> {
options = {
fields: '*',
...options,
};
return await this.db().selectOne(`SELECT ${this.db().escapeFieldsToString(options.fields)} FROM sync_items WHERE sync_target = ? AND item_id = ?`, [syncTarget, itemId]);
}

public static async allSyncItems(syncTarget: number) {
const output = await this.db().selectAll('SELECT * FROM sync_items WHERE sync_target = ?', [syncTarget]);
return output;
Expand Down
36 changes: 35 additions & 1 deletion packages/lib/models/Resource.test.ts
@@ -1,10 +1,11 @@
import { supportDir, setupDatabaseAndSynchronizer, switchClient, simulateReadOnlyShareEnv, expectThrow, createTempFile } from '../testing/test-utils';
import { supportDir, setupDatabaseAndSynchronizer, switchClient, simulateReadOnlyShareEnv, expectThrow, createTempFile, msleep } from '../testing/test-utils';
import Folder from '../models/Folder';
import Note from '../models/Note';
import Resource from '../models/Resource';
import shim from '../shim';
import { ErrorCode } from '../errors';
import { remove, pathExists } from 'fs-extra';
import { ResourceEntity } from '../services/database/types';

const testImagePath = `${supportDir}/photo.jpg`;

Expand Down Expand Up @@ -95,6 +96,39 @@ describe('models/Resource', () => {
expect(originalStat.size).toBe(newStat.size);
}));

it('should set the blob_updated_time property if the blob is updated', (async () => {
const note = await Note.save({});
await shim.attachFileToNote(note, testImagePath);

const resourceA: ResourceEntity = (await Resource.all())[0];
expect(resourceA.updated_time).toBe(resourceA.blob_updated_time);

await msleep(1);

await Resource.updateResourceBlobContent(resourceA.id, testImagePath);

const resourceB: ResourceEntity = (await Resource.all())[0];
expect(resourceB.updated_time).toBeGreaterThan(resourceA.updated_time);
expect(resourceB.blob_updated_time).toBeGreaterThan(resourceA.blob_updated_time);
}));

it('should NOT set the blob_updated_time property if the blob is NOT updated', (async () => {
const note = await Note.save({});
await shim.attachFileToNote(note, testImagePath);

const resourceA: ResourceEntity = (await Resource.all())[0];

await msleep(1);

// We only update the resource metadata - so the blob timestamp should
// not change
await Resource.save({ id: resourceA.id, title: 'new title' });

const resourceB: ResourceEntity = (await Resource.all())[0];
expect(resourceB.updated_time).toBeGreaterThan(resourceA.updated_time);
expect(resourceB.blob_updated_time).toBe(resourceA.blob_updated_time);
}));

it('should not allow modifying a read-only resource', async () => {
const { cleanup, resource } = await setupFolderNoteResourceReadOnly('123456789');
await expectThrow(async () => Resource.save({ id: resource.id, share_id: '123456789', title: 'cannot do this!' }), ErrorCode.IsReadOnly);
Expand Down
25 changes: 20 additions & 5 deletions packages/lib/models/Resource.ts
Expand Up @@ -15,6 +15,7 @@ import JoplinError from '../JoplinError';
import itemCanBeEncrypted from './utils/itemCanBeEncrypted';
import { getEncryptionEnabled } from '../services/synchronizer/syncInfoUtils';
import ShareService from '../services/share/ShareService';
import { SaveOptions } from './utils/types';

export default class Resource extends BaseItem {

Expand Down Expand Up @@ -372,9 +373,15 @@ export default class Resource extends BaseItem {
// We first save the resource metadata because this can throw, for
// example if modifying a resource that is read-only

const now = Date.now();

const result = await Resource.save({
id: resource.id,
size: fileStat.size,
updated_time: now,
blob_updated_time: now,
}, {
autoTimestamp: false,
});

// If the above call has succeeded, we save the data blob
Expand Down Expand Up @@ -442,10 +449,18 @@ export default class Resource extends BaseItem {
}, { changeSource: ItemChange.SOURCE_SYNC });
}

// public static async save(o: ResourceEntity, options: SaveOptions = null): Promise<ResourceEntity> {
// const resource:ResourceEntity = await super.save(o, options);
// if (resource.updated_time) resource.bl
// return resource;
// }
public static async save(o: ResourceEntity, options: SaveOptions = null): Promise<ResourceEntity> {
const resource = { ...o };

if (this.isNew(o, options)) {
const now = Date.now();
options = { ...options, autoTimestamp: false };
if (!resource.created_time) resource.created_time = now;
if (!resource.updated_time) resource.updated_time = now;
if (!resource.blob_updated_time) resource.blob_updated_time = now;
}

return await super.save(resource, options);
}

}
2 changes: 2 additions & 0 deletions packages/lib/services/database/types.ts
Expand Up @@ -256,6 +256,7 @@ export interface ResourceLocalStateEntity {
'type_'?: number;
}
export interface ResourceEntity {
'blob_updated_time'?: number;
'created_time'?: number;
'encryption_applied'?: number;
'encryption_blob_encrypted'?: number;
Expand Down Expand Up @@ -467,6 +468,7 @@ export const databaseSchema: DatabaseTables = {
type_: { type: 'number' },
},
resources: {
blob_updated_time: { type: 'number' },
created_time: { type: 'number' },
encryption_applied: { type: 'number' },
encryption_blob_encrypted: { type: 'number' },
Expand Down

0 comments on commit 9aed3e0

Please sign in to comment.