mirror of https://github.com/laurent22/joplin.git
splitting sync tests
parent
39b7fefdbf
commit
904a01bc5a
|
@ -74,6 +74,12 @@ packages/app-cli/tests/InMemoryCache.js.map
|
|||
packages/app-cli/tests/MdToHtml.d.ts
|
||||
packages/app-cli/tests/MdToHtml.js
|
||||
packages/app-cli/tests/MdToHtml.js.map
|
||||
packages/app-cli/tests/Synchronizer.basics.d.ts
|
||||
packages/app-cli/tests/Synchronizer.basics.js
|
||||
packages/app-cli/tests/Synchronizer.basics.js.map
|
||||
packages/app-cli/tests/Synchronizer.conflicts.d.ts
|
||||
packages/app-cli/tests/Synchronizer.conflicts.js
|
||||
packages/app-cli/tests/Synchronizer.conflicts.js.map
|
||||
packages/app-cli/tests/Synchronizer.d.ts
|
||||
packages/app-cli/tests/Synchronizer.js
|
||||
packages/app-cli/tests/Synchronizer.js.map
|
||||
|
@ -245,6 +251,9 @@ packages/app-cli/tests/synchronizer_LockHandler.js.map
|
|||
packages/app-cli/tests/synchronizer_MigrationHandler.d.ts
|
||||
packages/app-cli/tests/synchronizer_MigrationHandler.js
|
||||
packages/app-cli/tests/synchronizer_MigrationHandler.js.map
|
||||
packages/app-cli/tests/test-utils-synchronizer.d.ts
|
||||
packages/app-cli/tests/test-utils-synchronizer.js
|
||||
packages/app-cli/tests/test-utils-synchronizer.js.map
|
||||
packages/app-desktop/ElectronAppWrapper.d.ts
|
||||
packages/app-desktop/ElectronAppWrapper.js
|
||||
packages/app-desktop/ElectronAppWrapper.js.map
|
||||
|
|
|
@ -65,6 +65,12 @@ packages/app-cli/tests/InMemoryCache.js.map
|
|||
packages/app-cli/tests/MdToHtml.d.ts
|
||||
packages/app-cli/tests/MdToHtml.js
|
||||
packages/app-cli/tests/MdToHtml.js.map
|
||||
packages/app-cli/tests/Synchronizer.basics.d.ts
|
||||
packages/app-cli/tests/Synchronizer.basics.js
|
||||
packages/app-cli/tests/Synchronizer.basics.js.map
|
||||
packages/app-cli/tests/Synchronizer.conflicts.d.ts
|
||||
packages/app-cli/tests/Synchronizer.conflicts.js
|
||||
packages/app-cli/tests/Synchronizer.conflicts.js.map
|
||||
packages/app-cli/tests/Synchronizer.d.ts
|
||||
packages/app-cli/tests/Synchronizer.js
|
||||
packages/app-cli/tests/Synchronizer.js.map
|
||||
|
@ -236,6 +242,9 @@ packages/app-cli/tests/synchronizer_LockHandler.js.map
|
|||
packages/app-cli/tests/synchronizer_MigrationHandler.d.ts
|
||||
packages/app-cli/tests/synchronizer_MigrationHandler.js
|
||||
packages/app-cli/tests/synchronizer_MigrationHandler.js.map
|
||||
packages/app-cli/tests/test-utils-synchronizer.d.ts
|
||||
packages/app-cli/tests/test-utils-synchronizer.js
|
||||
packages/app-cli/tests/test-utils-synchronizer.js.map
|
||||
packages/app-desktop/ElectronAppWrapper.d.ts
|
||||
packages/app-desktop/ElectronAppWrapper.js
|
||||
packages/app-desktop/ElectronAppWrapper.js.map
|
||||
|
|
|
@ -1,207 +0,0 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const test_utils_synchronizer_1 = require("./test-utils-synchronizer");
|
||||
const { synchronizerStart, syncTargetName, allSyncTargetItemsEncrypted, tempFilePath, resourceFetcher, kvStore, revisionService, setupDatabaseAndSynchronizer, synchronizer, fileApi, sleep, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, checkThrowAsync } = require('./test-utils.js');
|
||||
const Folder = require('@joplin/lib/models/Folder.js');
|
||||
const Note = require('@joplin/lib/models/Note.js');
|
||||
const Resource = require('@joplin/lib/models/Resource.js');
|
||||
const ResourceFetcher = require('@joplin/lib/services/ResourceFetcher');
|
||||
const Tag = require('@joplin/lib/models/Tag.js');
|
||||
const MasterKey = require('@joplin/lib/models/MasterKey');
|
||||
const BaseItem = require('@joplin/lib/models/BaseItem.js');
|
||||
const Revision = require('@joplin/lib/models/Revision.js');
|
||||
const WelcomeUtils = require('@joplin/lib/WelcomeUtils');
|
||||
let insideBeforeEach = false;
|
||||
describe('Synchronizer.basics', function () {
|
||||
beforeEach((done) => __awaiter(this, void 0, void 0, function* () {
|
||||
insideBeforeEach = true;
|
||||
yield setupDatabaseAndSynchronizer(1);
|
||||
yield setupDatabaseAndSynchronizer(2);
|
||||
yield switchClient(1);
|
||||
done();
|
||||
insideBeforeEach = false;
|
||||
}));
|
||||
it('should create remote items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', parent_id: folder.id });
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield synchronizerStart();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should update remote items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
const note = yield Note.save({ title: 'un', parent_id: folder.id });
|
||||
yield synchronizerStart();
|
||||
yield Note.save({ title: 'un UPDATE', id: note.id });
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield synchronizerStart();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should create local items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', parent_id: folder.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should update local items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
let note2 = yield Note.load(note1.id);
|
||||
note2.title = 'Updated on client 2';
|
||||
yield Note.save(note2);
|
||||
note2 = yield Note.load(note2.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should delete remote notes', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
yield Note.delete(note1.id);
|
||||
yield synchronizerStart();
|
||||
const remotes = yield test_utils_synchronizer_1.remoteNotesAndFolders();
|
||||
expect(remotes.length).toBe(1);
|
||||
expect(remotes[0].id).toBe(folder1.id);
|
||||
const deletedItems = yield BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
})));
|
||||
it('should not created deleted_items entries for items deleted via sync', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield Folder.delete(folder1.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const deletedItems = yield BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
})));
|
||||
it('should delete local notes', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
// For these tests we pass the context around for each user. This is to make sure that the "deletedItemsProcessed"
|
||||
// property of the basicDelta() function is cleared properly at the end of a sync operation. If it is not cleared
|
||||
// it means items will no longer be deleted locally via sync.
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const note2 = yield Note.save({ title: 'deux', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield Note.delete(note1.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const items = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
expect(items.length).toBe(2);
|
||||
const deletedItems = yield BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
yield Note.delete(note2.id);
|
||||
yield synchronizerStart();
|
||||
})));
|
||||
it('should delete remote folder', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
yield Folder.save({ title: 'folder1' });
|
||||
const folder2 = yield Folder.save({ title: 'folder2' });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
yield Folder.delete(folder2.id);
|
||||
yield synchronizerStart();
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should delete local folder', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
yield Folder.save({ title: 'folder1' });
|
||||
const folder2 = yield Folder.save({ title: 'folder2' });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield Folder.delete(folder2.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const items = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(items, expect);
|
||||
})));
|
||||
it('should cross delete all folders', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
// If client1 and 2 have two folders, client 1 deletes item 1 and client
|
||||
// 2 deletes item 2, they should both end up with no items after sync.
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const folder2 = yield Folder.save({ title: 'folder2' });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
yield Folder.delete(folder1.id);
|
||||
yield switchClient(1);
|
||||
yield Folder.delete(folder2.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const items2 = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const items1 = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
expect(items1.length).toBe(0);
|
||||
expect(items1.length).toBe(items2.length);
|
||||
})));
|
||||
it('items should be downloaded again when user cancels in the middle of delta operation', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
synchronizer().testingHooks_ = ['cancelDeltaLoop2'];
|
||||
yield synchronizerStart();
|
||||
let notes = yield Note.all();
|
||||
expect(notes.length).toBe(0);
|
||||
synchronizer().testingHooks_ = [];
|
||||
yield synchronizerStart();
|
||||
notes = yield Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
})));
|
||||
it('should skip items that cannot be synced', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
const noteId = note1.id;
|
||||
yield synchronizerStart();
|
||||
let disabledItems = yield BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(0);
|
||||
yield Note.save({ id: noteId, title: 'un mod' });
|
||||
synchronizer().testingHooks_ = ['notesRejectedByTarget'];
|
||||
yield synchronizerStart();
|
||||
synchronizer().testingHooks_ = [];
|
||||
yield synchronizerStart(); // Another sync to check that this item is now excluded from sync
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const notes = yield Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
expect(notes[0].title).toBe('un');
|
||||
yield switchClient(1);
|
||||
disabledItems = yield BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(1);
|
||||
})));
|
||||
});
|
||||
//# sourceMappingURL=Synchronizer.basics.js.map
|
|
@ -1,20 +1,9 @@
|
|||
import time from '@joplin/lib/time';
|
||||
import shim from '@joplin/lib/shim';
|
||||
import Setting from '@joplin/lib/models/Setting';
|
||||
import BaseModel from '@joplin/lib/BaseModel';
|
||||
import { NoteEntity } from '@joplin/lib/services/database/types';
|
||||
import { allNotesFolders, remoteNotesAndFolders, remoteNotesFoldersResources, remoteResources, localNotesFoldersSameAsRemote } from './test-utils-synchronizer';
|
||||
import { allNotesFolders, remoteNotesAndFolders, localNotesFoldersSameAsRemote } from './test-utils-synchronizer';
|
||||
|
||||
const { synchronizerStart, syncTargetName, allSyncTargetItemsEncrypted, tempFilePath, resourceFetcher, kvStore, revisionService, setupDatabaseAndSynchronizer, synchronizer, fileApi, sleep, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, checkThrowAsync } = require('./test-utils.js');
|
||||
const { synchronizerStart, setupDatabaseAndSynchronizer, synchronizer, sleep, switchClient, syncTargetId } = require('./test-utils.js');
|
||||
const Folder = require('@joplin/lib/models/Folder.js');
|
||||
const Note = require('@joplin/lib/models/Note.js');
|
||||
const Resource = require('@joplin/lib/models/Resource.js');
|
||||
const ResourceFetcher = require('@joplin/lib/services/ResourceFetcher');
|
||||
const Tag = require('@joplin/lib/models/Tag.js');
|
||||
const MasterKey = require('@joplin/lib/models/MasterKey');
|
||||
const BaseItem = require('@joplin/lib/models/BaseItem.js');
|
||||
const Revision = require('@joplin/lib/models/Revision.js');
|
||||
const WelcomeUtils = require('@joplin/lib/WelcomeUtils');
|
||||
|
||||
let insideBeforeEach = false;
|
||||
|
||||
|
|
|
@ -0,0 +1,300 @@
|
|||
import time from '@joplin/lib/time';
|
||||
import Setting from '@joplin/lib/models/Setting';
|
||||
import { allNotesFolders, localNotesFoldersSameAsRemote } from './test-utils-synchronizer';
|
||||
|
||||
const { synchronizerStart, setupDatabaseAndSynchronizer, sleep, switchClient, syncTargetId, loadEncryptionMasterKey, decryptionWorker } = require('./test-utils.js');
|
||||
const Folder = require('@joplin/lib/models/Folder.js');
|
||||
const Note = require('@joplin/lib/models/Note.js');
|
||||
const BaseItem = require('@joplin/lib/models/BaseItem.js');
|
||||
|
||||
let insideBeforeEach = false;
|
||||
|
||||
describe('Synchronizer.conflicts', function() {
|
||||
|
||||
beforeEach(async (done) => {
|
||||
insideBeforeEach = true;
|
||||
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
await setupDatabaseAndSynchronizer(2);
|
||||
await switchClient(1);
|
||||
done();
|
||||
|
||||
insideBeforeEach = false;
|
||||
});
|
||||
|
||||
it('should resolve note conflicts', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
let note2 = await Note.load(note1.id);
|
||||
note2.title = 'Updated on client 2';
|
||||
await Note.save(note2);
|
||||
note2 = await Note.load(note2.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
let note2conf = await Note.load(note1.id);
|
||||
note2conf.title = 'Updated on client 1';
|
||||
await Note.save(note2conf);
|
||||
note2conf = await Note.load(note1.id);
|
||||
await synchronizerStart();
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
expect(conflictedNotes.length).toBe(1);
|
||||
|
||||
// Other than the id (since the conflicted note is a duplicate), and the is_conflict property
|
||||
// the conflicted and original note must be the same in every way, to make sure no data has been lost.
|
||||
const conflictedNote = conflictedNotes[0];
|
||||
expect(conflictedNote.id == note2conf.id).toBe(false);
|
||||
for (const n in conflictedNote) {
|
||||
if (!conflictedNote.hasOwnProperty(n)) continue;
|
||||
if (n == 'id' || n == 'is_conflict') continue;
|
||||
expect(conflictedNote[n]).toBe(note2conf[n]);
|
||||
}
|
||||
|
||||
const noteUpdatedFromRemote = await Note.load(note1.id);
|
||||
for (const n in noteUpdatedFromRemote) {
|
||||
if (!noteUpdatedFromRemote.hasOwnProperty(n)) continue;
|
||||
expect(noteUpdatedFromRemote[n]).toBe(note2[n]);
|
||||
}
|
||||
}));
|
||||
|
||||
it('should resolve folders conflicts', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2); // ----------------------------------
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
let folder1_modRemote = await Folder.load(folder1.id);
|
||||
folder1_modRemote.title = 'folder1 UPDATE CLIENT 2';
|
||||
await Folder.save(folder1_modRemote);
|
||||
folder1_modRemote = await Folder.load(folder1_modRemote.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1); // ----------------------------------
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
let folder1_modLocal = await Folder.load(folder1.id);
|
||||
folder1_modLocal.title = 'folder1 UPDATE CLIENT 1';
|
||||
await Folder.save(folder1_modLocal);
|
||||
folder1_modLocal = await Folder.load(folder1.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const folder1_final = await Folder.load(folder1.id);
|
||||
expect(folder1_final.title).toBe(folder1_modRemote.title);
|
||||
}));
|
||||
|
||||
it('should resolve conflict if remote folder has been deleted, but note has been added to folder locally', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Folder.delete(folder1.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
const items = await allNotesFolders();
|
||||
expect(items.length).toBe(1);
|
||||
expect(items[0].title).toBe('note1');
|
||||
expect(items[0].is_conflict).toBe(1);
|
||||
}));
|
||||
|
||||
it('should resolve conflict if note has been deleted remotely and locally', (async () => {
|
||||
const folder = await Folder.save({ title: 'folder' });
|
||||
const note = await Note.save({ title: 'note', parent_id: folder.title });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Note.delete(note.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await Note.delete(note.id);
|
||||
await synchronizerStart();
|
||||
|
||||
const items = await allNotesFolders();
|
||||
expect(items.length).toBe(1);
|
||||
expect(items[0].title).toBe('folder');
|
||||
|
||||
await localNotesFoldersSameAsRemote(items, expect);
|
||||
}));
|
||||
|
||||
it('should handle conflict when remote note is deleted then local note is modified', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
await Note.delete(note1.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
const newTitle = 'Modified after having been deleted';
|
||||
await Note.save({ id: note1.id, title: newTitle });
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
|
||||
expect(conflictedNotes.length).toBe(1);
|
||||
expect(conflictedNotes[0].title).toBe(newTitle);
|
||||
|
||||
const unconflictedNotes = await Note.unconflictedNotes();
|
||||
|
||||
expect(unconflictedNotes.length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should handle conflict when remote folder is deleted then local folder is renamed', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Folder.save({ title: 'folder2' });
|
||||
await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
await Folder.delete(folder1.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
const newTitle = 'Modified after having been deleted';
|
||||
await Folder.save({ id: folder1.id, title: newTitle });
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const items = await allNotesFolders();
|
||||
|
||||
expect(items.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('should not sync notes with conflicts', (async () => {
|
||||
const f1 = await Folder.save({ title: 'folder' });
|
||||
await Note.save({ title: 'mynote', parent_id: f1.id, is_conflict: 1 });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
const notes = await Note.all();
|
||||
const folders = await Folder.all();
|
||||
expect(notes.length).toBe(0);
|
||||
expect(folders.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('should not try to delete on remote conflicted notes that have been deleted', (async () => {
|
||||
const f1 = await Folder.save({ title: 'folder' });
|
||||
const n1 = await Note.save({ title: 'mynote', parent_id: f1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Note.save({ id: n1.id, is_conflict: 1 });
|
||||
await Note.delete(n1.id);
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
|
||||
expect(deletedItems.length).toBe(0);
|
||||
}));
|
||||
|
||||
async function ignorableNoteConflictTest(withEncryption: boolean) {
|
||||
if (withEncryption) {
|
||||
Setting.setValue('encryption.enabled', true);
|
||||
await loadEncryptionMasterKey();
|
||||
}
|
||||
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
if (withEncryption) {
|
||||
await loadEncryptionMasterKey(null, true);
|
||||
await decryptionWorker().start();
|
||||
}
|
||||
let note2 = await Note.load(note1.id);
|
||||
note2.todo_completed = time.unixMs() - 1;
|
||||
await Note.save(note2);
|
||||
note2 = await Note.load(note2.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
let note2conf = await Note.load(note1.id);
|
||||
note2conf.todo_completed = time.unixMs();
|
||||
await Note.save(note2conf);
|
||||
note2conf = await Note.load(note1.id);
|
||||
await synchronizerStart();
|
||||
|
||||
if (!withEncryption) {
|
||||
// That was previously a common conflict:
|
||||
// - Client 1 mark todo as "done", and sync
|
||||
// - Client 2 doesn't sync, mark todo as "done" todo. Then sync.
|
||||
// In theory it is a conflict because the todo_completed dates are different
|
||||
// but in practice it doesn't matter, we can just take the date when the
|
||||
// todo was marked as "done" the first time.
|
||||
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
expect(conflictedNotes.length).toBe(0);
|
||||
|
||||
const notes = await Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
expect(notes[0].id).toBe(note1.id);
|
||||
expect(notes[0].todo_completed).toBe(note2.todo_completed);
|
||||
} else {
|
||||
// If the notes are encrypted however it's not possible to do this kind of
|
||||
// smart conflict resolving since we don't know the content, so in that
|
||||
// case it's handled as a regular conflict.
|
||||
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
expect(conflictedNotes.length).toBe(1);
|
||||
|
||||
const notes = await Note.all();
|
||||
expect(notes.length).toBe(2);
|
||||
}
|
||||
}
|
||||
|
||||
it('should not consider it is a conflict if neither the title nor body of the note have changed', (async () => {
|
||||
await ignorableNoteConflictTest(false);
|
||||
}));
|
||||
|
||||
it('should always handle conflict if local or remote are encrypted', (async () => {
|
||||
await ignorableNoteConflictTest(true);
|
||||
}));
|
||||
|
||||
});
|
File diff suppressed because it is too large
Load Diff
|
@ -31,185 +31,6 @@ describe('synchronizer', function() {
|
|||
insideBeforeEach = false;
|
||||
});
|
||||
|
||||
it('should resolve note conflicts', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
let note2 = await Note.load(note1.id);
|
||||
note2.title = 'Updated on client 2';
|
||||
await Note.save(note2);
|
||||
note2 = await Note.load(note2.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
let note2conf = await Note.load(note1.id);
|
||||
note2conf.title = 'Updated on client 1';
|
||||
await Note.save(note2conf);
|
||||
note2conf = await Note.load(note1.id);
|
||||
await synchronizerStart();
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
expect(conflictedNotes.length).toBe(1);
|
||||
|
||||
// Other than the id (since the conflicted note is a duplicate), and the is_conflict property
|
||||
// the conflicted and original note must be the same in every way, to make sure no data has been lost.
|
||||
const conflictedNote = conflictedNotes[0];
|
||||
expect(conflictedNote.id == note2conf.id).toBe(false);
|
||||
for (const n in conflictedNote) {
|
||||
if (!conflictedNote.hasOwnProperty(n)) continue;
|
||||
if (n == 'id' || n == 'is_conflict') continue;
|
||||
expect(conflictedNote[n]).toBe(note2conf[n]);
|
||||
}
|
||||
|
||||
const noteUpdatedFromRemote = await Note.load(note1.id);
|
||||
for (const n in noteUpdatedFromRemote) {
|
||||
if (!noteUpdatedFromRemote.hasOwnProperty(n)) continue;
|
||||
expect(noteUpdatedFromRemote[n]).toBe(note2[n]);
|
||||
}
|
||||
}));
|
||||
|
||||
it('should resolve folders conflicts', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2); // ----------------------------------
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
let folder1_modRemote = await Folder.load(folder1.id);
|
||||
folder1_modRemote.title = 'folder1 UPDATE CLIENT 2';
|
||||
await Folder.save(folder1_modRemote);
|
||||
folder1_modRemote = await Folder.load(folder1_modRemote.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1); // ----------------------------------
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
let folder1_modLocal = await Folder.load(folder1.id);
|
||||
folder1_modLocal.title = 'folder1 UPDATE CLIENT 1';
|
||||
await Folder.save(folder1_modLocal);
|
||||
folder1_modLocal = await Folder.load(folder1.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const folder1_final = await Folder.load(folder1.id);
|
||||
expect(folder1_final.title).toBe(folder1_modRemote.title);
|
||||
}));
|
||||
|
||||
it('should resolve conflict if remote folder has been deleted, but note has been added to folder locally', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Folder.delete(folder1.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
const items = await allNotesFolders();
|
||||
expect(items.length).toBe(1);
|
||||
expect(items[0].title).toBe('note1');
|
||||
expect(items[0].is_conflict).toBe(1);
|
||||
}));
|
||||
|
||||
it('should resolve conflict if note has been deleted remotely and locally', (async () => {
|
||||
const folder = await Folder.save({ title: 'folder' });
|
||||
const note = await Note.save({ title: 'note', parent_id: folder.title });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Note.delete(note.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await Note.delete(note.id);
|
||||
await synchronizerStart();
|
||||
|
||||
const items = await allNotesFolders();
|
||||
expect(items.length).toBe(1);
|
||||
expect(items[0].title).toBe('folder');
|
||||
|
||||
await localNotesFoldersSameAsRemote(items, expect);
|
||||
}));
|
||||
|
||||
it('should handle conflict when remote note is deleted then local note is modified', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
await Note.delete(note1.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
const newTitle = 'Modified after having been deleted';
|
||||
await Note.save({ id: note1.id, title: newTitle });
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
|
||||
expect(conflictedNotes.length).toBe(1);
|
||||
expect(conflictedNotes[0].title).toBe(newTitle);
|
||||
|
||||
const unconflictedNotes = await Note.unconflictedNotes();
|
||||
|
||||
expect(unconflictedNotes.length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should handle conflict when remote folder is deleted then local folder is renamed', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Folder.save({ title: 'folder2' });
|
||||
await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
await Folder.delete(folder1.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
const newTitle = 'Modified after having been deleted';
|
||||
await Folder.save({ id: folder1.id, title: newTitle });
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const items = await allNotesFolders();
|
||||
|
||||
expect(items.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('should allow duplicate folder titles', (async () => {
|
||||
await Folder.save({ title: 'folder' });
|
||||
|
||||
|
@ -301,102 +122,6 @@ describe('synchronizer', function() {
|
|||
await shoudSyncTagTest(true);
|
||||
}));
|
||||
|
||||
it('should not sync notes with conflicts', (async () => {
|
||||
const f1 = await Folder.save({ title: 'folder' });
|
||||
await Note.save({ title: 'mynote', parent_id: f1.id, is_conflict: 1 });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
const notes = await Note.all();
|
||||
const folders = await Folder.all();
|
||||
expect(notes.length).toBe(0);
|
||||
expect(folders.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('should not try to delete on remote conflicted notes that have been deleted', (async () => {
|
||||
const f1 = await Folder.save({ title: 'folder' });
|
||||
const n1 = await Note.save({ title: 'mynote', parent_id: f1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Note.save({ id: n1.id, is_conflict: 1 });
|
||||
await Note.delete(n1.id);
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
|
||||
expect(deletedItems.length).toBe(0);
|
||||
}));
|
||||
|
||||
async function ignorableNoteConflictTest(withEncryption: boolean) {
|
||||
if (withEncryption) {
|
||||
Setting.setValue('encryption.enabled', true);
|
||||
await loadEncryptionMasterKey();
|
||||
}
|
||||
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
if (withEncryption) {
|
||||
await loadEncryptionMasterKey(null, true);
|
||||
await decryptionWorker().start();
|
||||
}
|
||||
let note2 = await Note.load(note1.id);
|
||||
note2.todo_completed = time.unixMs() - 1;
|
||||
await Note.save(note2);
|
||||
note2 = await Note.load(note2.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
let note2conf = await Note.load(note1.id);
|
||||
note2conf.todo_completed = time.unixMs();
|
||||
await Note.save(note2conf);
|
||||
note2conf = await Note.load(note1.id);
|
||||
await synchronizerStart();
|
||||
|
||||
if (!withEncryption) {
|
||||
// That was previously a common conflict:
|
||||
// - Client 1 mark todo as "done", and sync
|
||||
// - Client 2 doesn't sync, mark todo as "done" todo. Then sync.
|
||||
// In theory it is a conflict because the todo_completed dates are different
|
||||
// but in practice it doesn't matter, we can just take the date when the
|
||||
// todo was marked as "done" the first time.
|
||||
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
expect(conflictedNotes.length).toBe(0);
|
||||
|
||||
const notes = await Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
expect(notes[0].id).toBe(note1.id);
|
||||
expect(notes[0].todo_completed).toBe(note2.todo_completed);
|
||||
} else {
|
||||
// If the notes are encrypted however it's not possible to do this kind of
|
||||
// smart conflict resolving since we don't know the content, so in that
|
||||
// case it's handled as a regular conflict.
|
||||
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
expect(conflictedNotes.length).toBe(1);
|
||||
|
||||
const notes = await Note.all();
|
||||
expect(notes.length).toBe(2);
|
||||
}
|
||||
}
|
||||
|
||||
it('should not consider it is a conflict if neither the title nor body of the note have changed', (async () => {
|
||||
await ignorableNoteConflictTest(false);
|
||||
}));
|
||||
|
||||
it('should always handle conflict if local or remote are encrypted', (async () => {
|
||||
await ignorableNoteConflictTest(true);
|
||||
}));
|
||||
|
||||
it('notes and folders should get encrypted when encryption is enabled', (async () => {
|
||||
Setting.setValue('encryption.enabled', true);
|
||||
const masterKey = await loadEncryptionMasterKey();
|
||||
|
@ -640,7 +365,7 @@ describe('synchronizer', function() {
|
|||
expect(await shim.fsDriver().exists(resourcePath1)).toBe(false);
|
||||
}));
|
||||
|
||||
it('should encryt resources', (async () => {
|
||||
it('should encrypt resources', (async () => {
|
||||
Setting.setValue('encryption.enabled', true);
|
||||
const masterKey = await loadEncryptionMasterKey();
|
||||
|
||||
|
|
Loading…
Reference in New Issue