mirror of https://github.com/laurent22/joplin.git
splitting sync tests
parent
71854fb66b
commit
39b7fefdbf
|
@ -34,6 +34,7 @@ module.exports = {
|
|||
'<rootDir>/tests/support/',
|
||||
'<rootDir>/build/',
|
||||
'<rootDir>/tests/test-utils.js',
|
||||
'<rootDir>/tests/test-utils-synchronizer.js',
|
||||
'<rootDir>/tests/file_api_driver.js',
|
||||
'<rootDir>/tests/tmp/',
|
||||
],
|
||||
|
|
|
@ -0,0 +1,207 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const test_utils_synchronizer_1 = require("./test-utils-synchronizer");
|
||||
const { synchronizerStart, syncTargetName, allSyncTargetItemsEncrypted, tempFilePath, resourceFetcher, kvStore, revisionService, setupDatabaseAndSynchronizer, synchronizer, fileApi, sleep, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, checkThrowAsync } = require('./test-utils.js');
|
||||
const Folder = require('@joplin/lib/models/Folder.js');
|
||||
const Note = require('@joplin/lib/models/Note.js');
|
||||
const Resource = require('@joplin/lib/models/Resource.js');
|
||||
const ResourceFetcher = require('@joplin/lib/services/ResourceFetcher');
|
||||
const Tag = require('@joplin/lib/models/Tag.js');
|
||||
const MasterKey = require('@joplin/lib/models/MasterKey');
|
||||
const BaseItem = require('@joplin/lib/models/BaseItem.js');
|
||||
const Revision = require('@joplin/lib/models/Revision.js');
|
||||
const WelcomeUtils = require('@joplin/lib/WelcomeUtils');
|
||||
let insideBeforeEach = false;
|
||||
describe('Synchronizer.basics', function () {
|
||||
beforeEach((done) => __awaiter(this, void 0, void 0, function* () {
|
||||
insideBeforeEach = true;
|
||||
yield setupDatabaseAndSynchronizer(1);
|
||||
yield setupDatabaseAndSynchronizer(2);
|
||||
yield switchClient(1);
|
||||
done();
|
||||
insideBeforeEach = false;
|
||||
}));
|
||||
it('should create remote items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', parent_id: folder.id });
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield synchronizerStart();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should update remote items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
const note = yield Note.save({ title: 'un', parent_id: folder.id });
|
||||
yield synchronizerStart();
|
||||
yield Note.save({ title: 'un UPDATE', id: note.id });
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield synchronizerStart();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should create local items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', parent_id: folder.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should update local items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
let note2 = yield Note.load(note1.id);
|
||||
note2.title = 'Updated on client 2';
|
||||
yield Note.save(note2);
|
||||
note2 = yield Note.load(note2.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should delete remote notes', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
yield Note.delete(note1.id);
|
||||
yield synchronizerStart();
|
||||
const remotes = yield test_utils_synchronizer_1.remoteNotesAndFolders();
|
||||
expect(remotes.length).toBe(1);
|
||||
expect(remotes[0].id).toBe(folder1.id);
|
||||
const deletedItems = yield BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
})));
|
||||
it('should not created deleted_items entries for items deleted via sync', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield Folder.delete(folder1.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const deletedItems = yield BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
})));
|
||||
it('should delete local notes', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
// For these tests we pass the context around for each user. This is to make sure that the "deletedItemsProcessed"
|
||||
// property of the basicDelta() function is cleared properly at the end of a sync operation. If it is not cleared
|
||||
// it means items will no longer be deleted locally via sync.
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const note2 = yield Note.save({ title: 'deux', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield Note.delete(note1.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const items = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
expect(items.length).toBe(2);
|
||||
const deletedItems = yield BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
yield Note.delete(note2.id);
|
||||
yield synchronizerStart();
|
||||
})));
|
||||
it('should delete remote folder', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
yield Folder.save({ title: 'folder1' });
|
||||
const folder2 = yield Folder.save({ title: 'folder2' });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
yield Folder.delete(folder2.id);
|
||||
yield synchronizerStart();
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should delete local folder', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
yield Folder.save({ title: 'folder1' });
|
||||
const folder2 = yield Folder.save({ title: 'folder2' });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield Folder.delete(folder2.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const items = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(items, expect);
|
||||
})));
|
||||
it('should cross delete all folders', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
// If client1 and 2 have two folders, client 1 deletes item 1 and client
|
||||
// 2 deletes item 2, they should both end up with no items after sync.
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const folder2 = yield Folder.save({ title: 'folder2' });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
yield Folder.delete(folder1.id);
|
||||
yield switchClient(1);
|
||||
yield Folder.delete(folder2.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const items2 = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const items1 = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
expect(items1.length).toBe(0);
|
||||
expect(items1.length).toBe(items2.length);
|
||||
})));
|
||||
it('items should be downloaded again when user cancels in the middle of delta operation', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
synchronizer().testingHooks_ = ['cancelDeltaLoop2'];
|
||||
yield synchronizerStart();
|
||||
let notes = yield Note.all();
|
||||
expect(notes.length).toBe(0);
|
||||
synchronizer().testingHooks_ = [];
|
||||
yield synchronizerStart();
|
||||
notes = yield Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
})));
|
||||
it('should skip items that cannot be synced', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
const noteId = note1.id;
|
||||
yield synchronizerStart();
|
||||
let disabledItems = yield BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(0);
|
||||
yield Note.save({ id: noteId, title: 'un mod' });
|
||||
synchronizer().testingHooks_ = ['notesRejectedByTarget'];
|
||||
yield synchronizerStart();
|
||||
synchronizer().testingHooks_ = [];
|
||||
yield synchronizerStart(); // Another sync to check that this item is now excluded from sync
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const notes = yield Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
expect(notes[0].title).toBe('un');
|
||||
yield switchClient(1);
|
||||
disabledItems = yield BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(1);
|
||||
})));
|
||||
});
|
||||
//# sourceMappingURL=Synchronizer.basics.js.map
|
|
@ -0,0 +1,280 @@
|
|||
import time from '@joplin/lib/time';
|
||||
import shim from '@joplin/lib/shim';
|
||||
import Setting from '@joplin/lib/models/Setting';
|
||||
import BaseModel from '@joplin/lib/BaseModel';
|
||||
import { NoteEntity } from '@joplin/lib/services/database/types';
|
||||
import { allNotesFolders, remoteNotesAndFolders, remoteNotesFoldersResources, remoteResources, localNotesFoldersSameAsRemote } from './test-utils-synchronizer';
|
||||
|
||||
const { synchronizerStart, syncTargetName, allSyncTargetItemsEncrypted, tempFilePath, resourceFetcher, kvStore, revisionService, setupDatabaseAndSynchronizer, synchronizer, fileApi, sleep, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, checkThrowAsync } = require('./test-utils.js');
|
||||
const Folder = require('@joplin/lib/models/Folder.js');
|
||||
const Note = require('@joplin/lib/models/Note.js');
|
||||
const Resource = require('@joplin/lib/models/Resource.js');
|
||||
const ResourceFetcher = require('@joplin/lib/services/ResourceFetcher');
|
||||
const Tag = require('@joplin/lib/models/Tag.js');
|
||||
const MasterKey = require('@joplin/lib/models/MasterKey');
|
||||
const BaseItem = require('@joplin/lib/models/BaseItem.js');
|
||||
const Revision = require('@joplin/lib/models/Revision.js');
|
||||
const WelcomeUtils = require('@joplin/lib/WelcomeUtils');
|
||||
|
||||
let insideBeforeEach = false;
|
||||
|
||||
describe('Synchronizer.basics', function() {
|
||||
|
||||
beforeEach(async (done) => {
|
||||
insideBeforeEach = true;
|
||||
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
await setupDatabaseAndSynchronizer(2);
|
||||
await switchClient(1);
|
||||
done();
|
||||
|
||||
insideBeforeEach = false;
|
||||
});
|
||||
|
||||
it('should create remote items', (async () => {
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder.id });
|
||||
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should update remote items', (async () => {
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
const note = await Note.save({ title: 'un', parent_id: folder.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await Note.save({ title: 'un UPDATE', id: note.id });
|
||||
|
||||
const all = await allNotesFolders();
|
||||
await synchronizerStart();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should create local items', (async () => {
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should update local items', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
let note2 = await Note.load(note1.id);
|
||||
note2.title = 'Updated on client 2';
|
||||
await Note.save(note2);
|
||||
note2 = await Note.load(note2.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should delete remote notes', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
await Note.delete(note1.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const remotes = await remoteNotesAndFolders();
|
||||
expect(remotes.length).toBe(1);
|
||||
expect(remotes[0].id).toBe(folder1.id);
|
||||
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should not created deleted_items entries for items deleted via sync', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Folder.delete(folder1.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should delete local notes', (async () => {
|
||||
// For these tests we pass the context around for each user. This is to make sure that the "deletedItemsProcessed"
|
||||
// property of the basicDelta() function is cleared properly at the end of a sync operation. If it is not cleared
|
||||
// it means items will no longer be deleted locally via sync.
|
||||
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'deux', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Note.delete(note1.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
const items = await allNotesFolders();
|
||||
expect(items.length).toBe(2);
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
await Note.delete(note2.id);
|
||||
await synchronizerStart();
|
||||
}));
|
||||
|
||||
it('should delete remote folder', (async () => {
|
||||
await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
await Folder.delete(folder2.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const all = await allNotesFolders();
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should delete local folder', (async () => {
|
||||
await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Folder.delete(folder2.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
const items = await allNotesFolders();
|
||||
await localNotesFoldersSameAsRemote(items, expect);
|
||||
}));
|
||||
|
||||
it('should cross delete all folders', (async () => {
|
||||
// If client1 and 2 have two folders, client 1 deletes item 1 and client
|
||||
// 2 deletes item 2, they should both end up with no items after sync.
|
||||
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await sleep(0.1);
|
||||
await Folder.delete(folder1.id);
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await Folder.delete(folder2.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
const items2 = await allNotesFolders();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
const items1 = await allNotesFolders();
|
||||
expect(items1.length).toBe(0);
|
||||
expect(items1.length).toBe(items2.length);
|
||||
}));
|
||||
|
||||
it('items should be downloaded again when user cancels in the middle of delta operation', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
synchronizer().testingHooks_ = ['cancelDeltaLoop2'];
|
||||
await synchronizerStart();
|
||||
let notes = await Note.all();
|
||||
expect(notes.length).toBe(0);
|
||||
|
||||
synchronizer().testingHooks_ = [];
|
||||
await synchronizerStart();
|
||||
notes = await Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('should skip items that cannot be synced', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
const noteId = note1.id;
|
||||
await synchronizerStart();
|
||||
let disabledItems = await BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(0);
|
||||
await Note.save({ id: noteId, title: 'un mod' });
|
||||
synchronizer().testingHooks_ = ['notesRejectedByTarget'];
|
||||
await synchronizerStart();
|
||||
synchronizer().testingHooks_ = [];
|
||||
await synchronizerStart(); // Another sync to check that this item is now excluded from sync
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
const notes = await Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
expect(notes[0].title).toBe('un');
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
disabledItems = await BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(1);
|
||||
}));
|
||||
|
||||
});
|
|
@ -13,6 +13,7 @@ const time_1 = require("@joplin/lib/time");
|
|||
const shim_1 = require("@joplin/lib/shim");
|
||||
const Setting_1 = require("@joplin/lib/models/Setting");
|
||||
const BaseModel_1 = require("@joplin/lib/BaseModel");
|
||||
const test_utils_synchronizer_1 = require("./test-utils-synchronizer");
|
||||
const { synchronizerStart, syncTargetName, allSyncTargetItemsEncrypted, tempFilePath, resourceFetcher, kvStore, revisionService, setupDatabaseAndSynchronizer, synchronizer, fileApi, sleep, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, checkThrowAsync } = require('./test-utils.js');
|
||||
const Folder = require('@joplin/lib/models/Folder.js');
|
||||
const Note = require('@joplin/lib/models/Note.js');
|
||||
|
@ -23,69 +24,6 @@ const MasterKey = require('@joplin/lib/models/MasterKey');
|
|||
const BaseItem = require('@joplin/lib/models/BaseItem.js');
|
||||
const Revision = require('@joplin/lib/models/Revision.js');
|
||||
const WelcomeUtils = require('@joplin/lib/WelcomeUtils');
|
||||
function allNotesFolders() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const folders = yield Folder.all();
|
||||
const notes = yield Note.all();
|
||||
return folders.concat(notes);
|
||||
});
|
||||
}
|
||||
function remoteItemsByTypes(types) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const list = yield fileApi().list('', { includeDirs: false, syncItemsOnly: true });
|
||||
if (list.has_more)
|
||||
throw new Error('Not implemented!!!');
|
||||
const files = list.items;
|
||||
const output = [];
|
||||
for (const file of files) {
|
||||
const remoteContent = yield fileApi().get(file.path);
|
||||
const content = yield BaseItem.unserialize(remoteContent);
|
||||
if (types.indexOf(content.type_) < 0)
|
||||
continue;
|
||||
output.push(content);
|
||||
}
|
||||
return output;
|
||||
});
|
||||
}
|
||||
function remoteNotesAndFolders() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return remoteItemsByTypes([BaseModel_1.default.TYPE_NOTE, BaseModel_1.default.TYPE_FOLDER]);
|
||||
});
|
||||
}
|
||||
function remoteNotesFoldersResources() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return remoteItemsByTypes([BaseModel_1.default.TYPE_NOTE, BaseModel_1.default.TYPE_FOLDER, BaseModel_1.default.TYPE_RESOURCE]);
|
||||
});
|
||||
}
|
||||
function remoteResources() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return remoteItemsByTypes([BaseModel_1.default.TYPE_RESOURCE]);
|
||||
});
|
||||
}
|
||||
function localNotesFoldersSameAsRemote(locals, expect) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let error = null;
|
||||
try {
|
||||
const nf = yield remoteNotesAndFolders();
|
||||
expect(locals.length).toBe(nf.length);
|
||||
for (let i = 0; i < locals.length; i++) {
|
||||
const dbItem = locals[i];
|
||||
const path = BaseItem.systemPath(dbItem);
|
||||
const remote = yield fileApi().stat(path);
|
||||
expect(!!remote).toBe(true);
|
||||
if (!remote)
|
||||
continue;
|
||||
let remoteContent = yield fileApi().get(path);
|
||||
remoteContent = dbItem.type_ == BaseModel_1.default.TYPE_NOTE ? yield Note.unserialize(remoteContent) : yield Folder.unserialize(remoteContent);
|
||||
expect(remoteContent.title).toBe(dbItem.title);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
error = e;
|
||||
}
|
||||
expect(error).toBe(null);
|
||||
});
|
||||
}
|
||||
let insideBeforeEach = false;
|
||||
describe('synchronizer', function () {
|
||||
beforeEach((done) => __awaiter(this, void 0, void 0, function* () {
|
||||
|
@ -96,48 +34,6 @@ describe('synchronizer', function () {
|
|||
done();
|
||||
insideBeforeEach = false;
|
||||
}));
|
||||
it('should create remote items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', parent_id: folder.id });
|
||||
const all = yield allNotesFolders();
|
||||
yield synchronizerStart();
|
||||
yield localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should update remote items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
const note = yield Note.save({ title: 'un', parent_id: folder.id });
|
||||
yield synchronizerStart();
|
||||
yield Note.save({ title: 'un UPDATE', id: note.id });
|
||||
const all = yield allNotesFolders();
|
||||
yield synchronizerStart();
|
||||
yield localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should create local items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', parent_id: folder.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const all = yield allNotesFolders();
|
||||
yield localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should update local items', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
let note2 = yield Note.load(note1.id);
|
||||
note2.title = 'Updated on client 2';
|
||||
yield Note.save(note2);
|
||||
note2 = yield Note.load(note2.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const all = yield allNotesFolders();
|
||||
yield localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should resolve note conflicts', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
|
@ -197,80 +93,6 @@ describe('synchronizer', function () {
|
|||
const folder1_final = yield Folder.load(folder1.id);
|
||||
expect(folder1_final.title).toBe(folder1_modRemote.title);
|
||||
})));
|
||||
it('should delete remote notes', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
yield Note.delete(note1.id);
|
||||
yield synchronizerStart();
|
||||
const remotes = yield remoteNotesAndFolders();
|
||||
expect(remotes.length).toBe(1);
|
||||
expect(remotes[0].id).toBe(folder1.id);
|
||||
const deletedItems = yield BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
})));
|
||||
it('should not created deleted_items entries for items deleted via sync', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield Folder.delete(folder1.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const deletedItems = yield BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
})));
|
||||
it('should delete local notes', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
// For these tests we pass the context around for each user. This is to make sure that the "deletedItemsProcessed"
|
||||
// property of the basicDelta() function is cleared properly at the end of a sync operation. If it is not cleared
|
||||
// it means items will no longer be deleted locally via sync.
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const note2 = yield Note.save({ title: 'deux', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield Note.delete(note1.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const items = yield allNotesFolders();
|
||||
expect(items.length).toBe(2);
|
||||
const deletedItems = yield BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
yield Note.delete(note2.id);
|
||||
yield synchronizerStart();
|
||||
})));
|
||||
it('should delete remote folder', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
yield Folder.save({ title: 'folder1' });
|
||||
const folder2 = yield Folder.save({ title: 'folder2' });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
yield Folder.delete(folder2.id);
|
||||
yield synchronizerStart();
|
||||
const all = yield allNotesFolders();
|
||||
yield localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should delete local folder', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
yield Folder.save({ title: 'folder1' });
|
||||
const folder2 = yield Folder.save({ title: 'folder2' });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield Folder.delete(folder2.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const items = yield allNotesFolders();
|
||||
yield localNotesFoldersSameAsRemote(items, expect);
|
||||
})));
|
||||
it('should resolve conflict if remote folder has been deleted, but note has been added to folder locally', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
yield synchronizerStart();
|
||||
|
@ -281,7 +103,7 @@ describe('synchronizer', function () {
|
|||
yield switchClient(1);
|
||||
yield Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
const items = yield allNotesFolders();
|
||||
const items = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
expect(items.length).toBe(1);
|
||||
expect(items[0].title).toBe('note1');
|
||||
expect(items[0].is_conflict).toBe(1);
|
||||
|
@ -297,32 +119,10 @@ describe('synchronizer', function () {
|
|||
yield switchClient(1);
|
||||
yield Note.delete(note.id);
|
||||
yield synchronizerStart();
|
||||
const items = yield allNotesFolders();
|
||||
const items = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
expect(items.length).toBe(1);
|
||||
expect(items[0].title).toBe('folder');
|
||||
yield localNotesFoldersSameAsRemote(items, expect);
|
||||
})));
|
||||
it('should cross delete all folders', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
// If client1 and 2 have two folders, client 1 deletes item 1 and client
|
||||
// 2 deletes item 2, they should both end up with no items after sync.
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const folder2 = yield Folder.save({ title: 'folder2' });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
yield sleep(0.1);
|
||||
yield Folder.delete(folder1.id);
|
||||
yield switchClient(1);
|
||||
yield Folder.delete(folder2.id);
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const items2 = yield allNotesFolders();
|
||||
yield switchClient(1);
|
||||
yield synchronizerStart();
|
||||
const items1 = yield allNotesFolders();
|
||||
expect(items1.length).toBe(0);
|
||||
expect(items1.length).toBe(items2.length);
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(items, expect);
|
||||
})));
|
||||
it('should handle conflict when remote note is deleted then local note is modified', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
|
@ -358,7 +158,7 @@ describe('synchronizer', function () {
|
|||
const newTitle = 'Modified after having been deleted';
|
||||
yield Folder.save({ id: folder1.id, title: newTitle });
|
||||
yield synchronizerStart();
|
||||
const items = yield allNotesFolders();
|
||||
const items = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
expect(items.length).toBe(1);
|
||||
})));
|
||||
it('should allow duplicate folder titles', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
|
@ -509,41 +309,6 @@ describe('synchronizer', function () {
|
|||
it('should always handle conflict if local or remote are encrypted', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
yield ignorableNoteConflictTest(true);
|
||||
})));
|
||||
it('items should be downloaded again when user cancels in the middle of delta operation', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
yield Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
yield synchronizerStart();
|
||||
yield switchClient(2);
|
||||
synchronizer().testingHooks_ = ['cancelDeltaLoop2'];
|
||||
yield synchronizerStart();
|
||||
let notes = yield Note.all();
|
||||
expect(notes.length).toBe(0);
|
||||
synchronizer().testingHooks_ = [];
|
||||
yield synchronizerStart();
|
||||
notes = yield Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
})));
|
||||
it('should skip items that cannot be synced', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder1 = yield Folder.save({ title: 'folder1' });
|
||||
const note1 = yield Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
const noteId = note1.id;
|
||||
yield synchronizerStart();
|
||||
let disabledItems = yield BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(0);
|
||||
yield Note.save({ id: noteId, title: 'un mod' });
|
||||
synchronizer().testingHooks_ = ['notesRejectedByTarget'];
|
||||
yield synchronizerStart();
|
||||
synchronizer().testingHooks_ = [];
|
||||
yield synchronizerStart(); // Another sync to check that this item is now excluded from sync
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const notes = yield Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
expect(notes[0].title).toBe('un');
|
||||
yield switchClient(1);
|
||||
disabledItems = yield BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(1);
|
||||
})));
|
||||
it('notes and folders should get encrypted when encryption is enabled', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
Setting_1.default.setValue('encryption.enabled', true);
|
||||
const masterKey = yield loadEncryptionMasterKey();
|
||||
|
@ -659,7 +424,7 @@ describe('synchronizer', function () {
|
|||
const resource1 = (yield Resource.all())[0];
|
||||
const resourcePath1 = Resource.fullPath(resource1);
|
||||
yield synchronizerStart();
|
||||
expect((yield remoteNotesFoldersResources()).length).toBe(3);
|
||||
expect((yield test_utils_synchronizer_1.remoteNotesFoldersResources()).length).toBe(3);
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
const allResources = yield Resource.all();
|
||||
|
@ -732,10 +497,10 @@ describe('synchronizer', function () {
|
|||
yield synchronizerStart();
|
||||
let allResources = yield Resource.all();
|
||||
expect(allResources.length).toBe(1);
|
||||
expect((yield remoteNotesFoldersResources()).length).toBe(3);
|
||||
expect((yield test_utils_synchronizer_1.remoteNotesFoldersResources()).length).toBe(3);
|
||||
yield Resource.delete(resource1.id);
|
||||
yield synchronizerStart();
|
||||
expect((yield remoteNotesFoldersResources()).length).toBe(2);
|
||||
expect((yield test_utils_synchronizer_1.remoteNotesFoldersResources()).length).toBe(2);
|
||||
const remoteBlob = yield fileApi().stat(`.resource/${resource1.id}`);
|
||||
expect(!remoteBlob).toBe(true);
|
||||
yield switchClient(1);
|
||||
|
@ -974,9 +739,9 @@ describe('synchronizer', function () {
|
|||
it('should create remote items with UTF-8 content', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'Fahrräder' });
|
||||
yield Note.save({ title: 'Fahrräder', body: 'Fahrräder', parent_id: folder.id });
|
||||
const all = yield allNotesFolders();
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
yield synchronizerStart();
|
||||
yield localNotesFoldersSameAsRemote(all, expect);
|
||||
yield test_utils_synchronizer_1.localNotesFoldersSameAsRemote(all, expect);
|
||||
})));
|
||||
it('should update remote items but not pull remote changes', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const folder = yield Folder.save({ title: 'folder1' });
|
||||
|
@ -989,7 +754,7 @@ describe('synchronizer', function () {
|
|||
yield switchClient(1);
|
||||
yield Note.save({ title: 'un UPDATE', id: note.id });
|
||||
yield synchronizerStart(null, { syncSteps: ['update_remote'] });
|
||||
const all = yield allNotesFolders();
|
||||
const all = yield test_utils_synchronizer_1.allNotesFolders();
|
||||
expect(all.length).toBe(2);
|
||||
yield switchClient(2);
|
||||
yield synchronizerStart();
|
||||
|
@ -1154,10 +919,10 @@ describe('synchronizer', function () {
|
|||
const resource = (yield Resource.all())[0];
|
||||
yield Resource.setLocalState(resource.id, { fetch_status: Resource.FETCH_STATUS_IDLE });
|
||||
yield synchronizerStart();
|
||||
expect((yield remoteResources()).length).toBe(0);
|
||||
expect((yield test_utils_synchronizer_1.remoteResources()).length).toBe(0);
|
||||
yield Resource.setLocalState(resource.id, { fetch_status: Resource.FETCH_STATUS_DONE });
|
||||
yield synchronizerStart();
|
||||
expect((yield remoteResources()).length).toBe(1);
|
||||
expect((yield test_utils_synchronizer_1.remoteResources()).length).toBe(1);
|
||||
})));
|
||||
it('should decrypt the resource metadata, but not try to decrypt the file, if it is not present', (() => __awaiter(this, void 0, void 0, function* () {
|
||||
const note1 = yield Note.save({ title: 'note' });
|
||||
|
|
|
@ -3,6 +3,7 @@ import shim from '@joplin/lib/shim';
|
|||
import Setting from '@joplin/lib/models/Setting';
|
||||
import BaseModel from '@joplin/lib/BaseModel';
|
||||
import { NoteEntity } from '@joplin/lib/services/database/types';
|
||||
import { allNotesFolders, remoteNotesAndFolders, remoteNotesFoldersResources, remoteResources, localNotesFoldersSameAsRemote } from './test-utils-synchronizer';
|
||||
|
||||
const { synchronizerStart, syncTargetName, allSyncTargetItemsEncrypted, tempFilePath, resourceFetcher, kvStore, revisionService, setupDatabaseAndSynchronizer, synchronizer, fileApi, sleep, switchClient, syncTargetId, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, checkThrowAsync } = require('./test-utils.js');
|
||||
const Folder = require('@joplin/lib/models/Folder.js');
|
||||
|
@ -15,65 +16,6 @@ const BaseItem = require('@joplin/lib/models/BaseItem.js');
|
|||
const Revision = require('@joplin/lib/models/Revision.js');
|
||||
const WelcomeUtils = require('@joplin/lib/WelcomeUtils');
|
||||
|
||||
async function allNotesFolders() {
|
||||
const folders = await Folder.all();
|
||||
const notes = await Note.all();
|
||||
return folders.concat(notes);
|
||||
}
|
||||
|
||||
async function remoteItemsByTypes(types: number[]) {
|
||||
const list = await fileApi().list('', { includeDirs: false, syncItemsOnly: true });
|
||||
if (list.has_more) throw new Error('Not implemented!!!');
|
||||
const files = list.items;
|
||||
|
||||
const output = [];
|
||||
for (const file of files) {
|
||||
const remoteContent = await fileApi().get(file.path);
|
||||
const content = await BaseItem.unserialize(remoteContent);
|
||||
if (types.indexOf(content.type_) < 0) continue;
|
||||
output.push(content);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
async function remoteNotesAndFolders() {
|
||||
return remoteItemsByTypes([BaseModel.TYPE_NOTE, BaseModel.TYPE_FOLDER]);
|
||||
}
|
||||
|
||||
async function remoteNotesFoldersResources() {
|
||||
return remoteItemsByTypes([BaseModel.TYPE_NOTE, BaseModel.TYPE_FOLDER, BaseModel.TYPE_RESOURCE]);
|
||||
}
|
||||
|
||||
async function remoteResources() {
|
||||
return remoteItemsByTypes([BaseModel.TYPE_RESOURCE]);
|
||||
}
|
||||
|
||||
async function localNotesFoldersSameAsRemote(locals: any[], expect: Function) {
|
||||
let error = null;
|
||||
try {
|
||||
const nf = await remoteNotesAndFolders();
|
||||
expect(locals.length).toBe(nf.length);
|
||||
|
||||
for (let i = 0; i < locals.length; i++) {
|
||||
const dbItem = locals[i];
|
||||
const path = BaseItem.systemPath(dbItem);
|
||||
const remote = await fileApi().stat(path);
|
||||
|
||||
expect(!!remote).toBe(true);
|
||||
if (!remote) continue;
|
||||
|
||||
let remoteContent = await fileApi().get(path);
|
||||
|
||||
remoteContent = dbItem.type_ == BaseModel.TYPE_NOTE ? await Note.unserialize(remoteContent) : await Folder.unserialize(remoteContent);
|
||||
expect(remoteContent.title).toBe(dbItem.title);
|
||||
}
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBe(null);
|
||||
}
|
||||
|
||||
let insideBeforeEach = false;
|
||||
|
||||
describe('synchronizer', function() {
|
||||
|
@ -89,71 +31,6 @@ describe('synchronizer', function() {
|
|||
insideBeforeEach = false;
|
||||
});
|
||||
|
||||
it('should create remote items', (async () => {
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder.id });
|
||||
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should update remote items', (async () => {
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
const note = await Note.save({ title: 'un', parent_id: folder.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await Note.save({ title: 'un UPDATE', id: note.id });
|
||||
|
||||
const all = await allNotesFolders();
|
||||
await synchronizerStart();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should create local items', (async () => {
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should update local items', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
let note2 = await Note.load(note1.id);
|
||||
note2.title = 'Updated on client 2';
|
||||
await Note.save(note2);
|
||||
note2 = await Note.load(note2.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should resolve note conflicts', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
|
@ -228,111 +105,6 @@ describe('synchronizer', function() {
|
|||
expect(folder1_final.title).toBe(folder1_modRemote.title);
|
||||
}));
|
||||
|
||||
it('should delete remote notes', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
await Note.delete(note1.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const remotes = await remoteNotesAndFolders();
|
||||
expect(remotes.length).toBe(1);
|
||||
expect(remotes[0].id).toBe(folder1.id);
|
||||
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should not created deleted_items entries for items deleted via sync', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Folder.delete(folder1.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should delete local notes', (async () => {
|
||||
// For these tests we pass the context around for each user. This is to make sure that the "deletedItemsProcessed"
|
||||
// property of the basicDelta() function is cleared properly at the end of a sync operation. If it is not cleared
|
||||
// it means items will no longer be deleted locally via sync.
|
||||
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'deux', parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Note.delete(note1.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
const items = await allNotesFolders();
|
||||
expect(items.length).toBe(2);
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
await Note.delete(note2.id);
|
||||
await synchronizerStart();
|
||||
}));
|
||||
|
||||
it('should delete remote folder', (async () => {
|
||||
await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await sleep(0.1);
|
||||
|
||||
await Folder.delete(folder2.id);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const all = await allNotesFolders();
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should delete local folder', (async () => {
|
||||
await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await Folder.delete(folder2.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
const items = await allNotesFolders();
|
||||
await localNotesFoldersSameAsRemote(items, expect);
|
||||
}));
|
||||
|
||||
it('should resolve conflict if remote folder has been deleted, but note has been added to folder locally', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await synchronizerStart();
|
||||
|
@ -376,38 +148,6 @@ describe('synchronizer', function() {
|
|||
await localNotesFoldersSameAsRemote(items, expect);
|
||||
}));
|
||||
|
||||
it('should cross delete all folders', (async () => {
|
||||
// If client1 and 2 have two folders, client 1 deletes item 1 and client
|
||||
// 2 deletes item 2, they should both end up with no items after sync.
|
||||
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await sleep(0.1);
|
||||
await Folder.delete(folder1.id);
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await Folder.delete(folder2.id);
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
const items2 = await allNotesFolders();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
const items1 = await allNotesFolders();
|
||||
expect(items1.length).toBe(0);
|
||||
expect(items1.length).toBe(items2.length);
|
||||
}));
|
||||
|
||||
it('should handle conflict when remote note is deleted then local note is modified', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
|
@ -657,50 +397,6 @@ describe('synchronizer', function() {
|
|||
await ignorableNoteConflictTest(true);
|
||||
}));
|
||||
|
||||
it('items should be downloaded again when user cancels in the middle of delta operation', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
synchronizer().testingHooks_ = ['cancelDeltaLoop2'];
|
||||
await synchronizerStart();
|
||||
let notes = await Note.all();
|
||||
expect(notes.length).toBe(0);
|
||||
|
||||
synchronizer().testingHooks_ = [];
|
||||
await synchronizerStart();
|
||||
notes = await Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('should skip items that cannot be synced', (async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
const noteId = note1.id;
|
||||
await synchronizerStart();
|
||||
let disabledItems = await BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(0);
|
||||
await Note.save({ id: noteId, title: 'un mod' });
|
||||
synchronizer().testingHooks_ = ['notesRejectedByTarget'];
|
||||
await synchronizerStart();
|
||||
synchronizer().testingHooks_ = [];
|
||||
await synchronizerStart(); // Another sync to check that this item is now excluded from sync
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
const notes = await Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
expect(notes[0].title).toBe('un');
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
disabledItems = await BaseItem.syncDisabledItems(syncTargetId());
|
||||
expect(disabledItems.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('notes and folders should get encrypted when encryption is enabled', (async () => {
|
||||
Setting.setValue('encryption.enabled', true);
|
||||
const masterKey = await loadEncryptionMasterKey();
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.localNotesFoldersSameAsRemote = exports.remoteResources = exports.remoteNotesFoldersResources = exports.remoteNotesAndFolders = exports.allNotesFolders = void 0;
|
||||
const BaseModel_1 = require("@joplin/lib/BaseModel");
|
||||
const { fileApi } = require('./test-utils.js');
|
||||
const Folder = require('@joplin/lib/models/Folder.js');
|
||||
const Note = require('@joplin/lib/models/Note.js');
|
||||
const BaseItem = require('@joplin/lib/models/BaseItem.js');
|
||||
function allNotesFolders() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const folders = yield Folder.all();
|
||||
const notes = yield Note.all();
|
||||
return folders.concat(notes);
|
||||
});
|
||||
}
|
||||
exports.allNotesFolders = allNotesFolders;
|
||||
function remoteItemsByTypes(types) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const list = yield fileApi().list('', { includeDirs: false, syncItemsOnly: true });
|
||||
if (list.has_more)
|
||||
throw new Error('Not implemented!!!');
|
||||
const files = list.items;
|
||||
const output = [];
|
||||
for (const file of files) {
|
||||
const remoteContent = yield fileApi().get(file.path);
|
||||
const content = yield BaseItem.unserialize(remoteContent);
|
||||
if (types.indexOf(content.type_) < 0)
|
||||
continue;
|
||||
output.push(content);
|
||||
}
|
||||
return output;
|
||||
});
|
||||
}
|
||||
function remoteNotesAndFolders() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return remoteItemsByTypes([BaseModel_1.default.TYPE_NOTE, BaseModel_1.default.TYPE_FOLDER]);
|
||||
});
|
||||
}
|
||||
exports.remoteNotesAndFolders = remoteNotesAndFolders;
|
||||
function remoteNotesFoldersResources() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return remoteItemsByTypes([BaseModel_1.default.TYPE_NOTE, BaseModel_1.default.TYPE_FOLDER, BaseModel_1.default.TYPE_RESOURCE]);
|
||||
});
|
||||
}
|
||||
exports.remoteNotesFoldersResources = remoteNotesFoldersResources;
|
||||
function remoteResources() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return remoteItemsByTypes([BaseModel_1.default.TYPE_RESOURCE]);
|
||||
});
|
||||
}
|
||||
exports.remoteResources = remoteResources;
|
||||
function localNotesFoldersSameAsRemote(locals, expect) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let error = null;
|
||||
try {
|
||||
const nf = yield remoteNotesAndFolders();
|
||||
expect(locals.length).toBe(nf.length);
|
||||
for (let i = 0; i < locals.length; i++) {
|
||||
const dbItem = locals[i];
|
||||
const path = BaseItem.systemPath(dbItem);
|
||||
const remote = yield fileApi().stat(path);
|
||||
expect(!!remote).toBe(true);
|
||||
if (!remote)
|
||||
continue;
|
||||
let remoteContent = yield fileApi().get(path);
|
||||
remoteContent = dbItem.type_ == BaseModel_1.default.TYPE_NOTE ? yield Note.unserialize(remoteContent) : yield Folder.unserialize(remoteContent);
|
||||
expect(remoteContent.title).toBe(dbItem.title);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
error = e;
|
||||
}
|
||||
expect(error).toBe(null);
|
||||
});
|
||||
}
|
||||
exports.localNotesFoldersSameAsRemote = localNotesFoldersSameAsRemote;
|
||||
//# sourceMappingURL=test-utils-synchronizer.js.map
|
|
@ -0,0 +1,65 @@
|
|||
import BaseModel from '@joplin/lib/BaseModel';
|
||||
|
||||
const { fileApi } = require('./test-utils.js');
|
||||
const Folder = require('@joplin/lib/models/Folder.js');
|
||||
const Note = require('@joplin/lib/models/Note.js');
|
||||
const BaseItem = require('@joplin/lib/models/BaseItem.js');
|
||||
|
||||
export async function allNotesFolders() {
|
||||
const folders = await Folder.all();
|
||||
const notes = await Note.all();
|
||||
return folders.concat(notes);
|
||||
}
|
||||
|
||||
async function remoteItemsByTypes(types: number[]) {
|
||||
const list = await fileApi().list('', { includeDirs: false, syncItemsOnly: true });
|
||||
if (list.has_more) throw new Error('Not implemented!!!');
|
||||
const files = list.items;
|
||||
|
||||
const output = [];
|
||||
for (const file of files) {
|
||||
const remoteContent = await fileApi().get(file.path);
|
||||
const content = await BaseItem.unserialize(remoteContent);
|
||||
if (types.indexOf(content.type_) < 0) continue;
|
||||
output.push(content);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
export async function remoteNotesAndFolders() {
|
||||
return remoteItemsByTypes([BaseModel.TYPE_NOTE, BaseModel.TYPE_FOLDER]);
|
||||
}
|
||||
|
||||
export async function remoteNotesFoldersResources() {
|
||||
return remoteItemsByTypes([BaseModel.TYPE_NOTE, BaseModel.TYPE_FOLDER, BaseModel.TYPE_RESOURCE]);
|
||||
}
|
||||
|
||||
export async function remoteResources() {
|
||||
return remoteItemsByTypes([BaseModel.TYPE_RESOURCE]);
|
||||
}
|
||||
|
||||
export async function localNotesFoldersSameAsRemote(locals: any[], expect: Function) {
|
||||
let error = null;
|
||||
try {
|
||||
const nf = await remoteNotesAndFolders();
|
||||
expect(locals.length).toBe(nf.length);
|
||||
|
||||
for (let i = 0; i < locals.length; i++) {
|
||||
const dbItem = locals[i];
|
||||
const path = BaseItem.systemPath(dbItem);
|
||||
const remote = await fileApi().stat(path);
|
||||
|
||||
expect(!!remote).toBe(true);
|
||||
if (!remote) continue;
|
||||
|
||||
let remoteContent = await fileApi().get(path);
|
||||
|
||||
remoteContent = dbItem.type_ == BaseModel.TYPE_NOTE ? await Note.unserialize(remoteContent) : await Folder.unserialize(remoteContent);
|
||||
expect(remoteContent.title).toBe(dbItem.title);
|
||||
}
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBe(null);
|
||||
}
|
Loading…
Reference in New Issue