All: Allow deleting and syncing deleted resources

pull/306/head
Laurent Cozic 2018-03-15 17:46:54 +00:00
parent df7b981e5e
commit 945018b698
5 changed files with 56 additions and 6 deletions

View File

@ -883,6 +883,37 @@ describe('Synchronizer', function() {
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
}));
it('should delete resources', asyncTest(async () => {
while (insideBeforeEach) await time.msleep(500);
let folder1 = await Folder.save({ title: "folder1" });
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
await shim.attachFileToNote(note1, __dirname + '/../tests/support/photo.jpg');
let resource1 = (await Resource.all())[0];
let resourcePath1 = Resource.fullPath(resource1);
await synchronizer().start();
await switchClient(2);
await synchronizer().start();
let allResources = await Resource.all();
expect(allResources.length).toBe(1);
let all = await fileApi().list();
expect(all.items.length).toBe(3);
await Resource.delete(resource1.id);
await synchronizer().start();
all = await fileApi().list();
expect(all.items.length).toBe(2);
await switchClient(1);
expect(await shim.fsDriver().exists(resourcePath1)).toBe(true);
await synchronizer().start();
allResources = await Resource.all();
expect(allResources.length).toBe(0);
expect(await shim.fsDriver().exists(resourcePath1)).toBe(false);
}));
it('should encryt resources', asyncTest(async () => {
Setting.setValue('encryption.enabled', true);
const masterKey = await loadEncryptionMasterKey();

View File

@ -16,6 +16,7 @@ const { FileApi } = require('lib/file-api.js');
const { FileApiDriverMemory } = require('lib/file-api-driver-memory.js');
const { FileApiDriverLocal } = require('lib/file-api-driver-local.js');
const { FileApiDriverWebDav } = require('lib/file-api-driver-webdav.js');
const BaseService = require('lib/services/BaseService.js');
const { FsDriverNode } = require('lib/fs-driver-node.js');
const { time } = require('lib/time-utils.js');
const { shimInit } = require('lib/shim-init-node.js');
@ -63,7 +64,7 @@ console.info('Testing with sync target: ' + SyncTargetRegistry.idToName(syncTarg
const logger = new Logger();
logger.addTarget('console');
logger.addTarget('file', { path: logDir + '/log.txt' });
logger.setLevel(Logger.LEVEL_WARN); // Set to INFO to display sync process in console
logger.setLevel(Logger.LEVEL_WARN); // Set to DEBUG to display sync process in console
BaseItem.loadClass('Note', Note);
BaseItem.loadClass('Folder', Folder);
@ -75,6 +76,8 @@ BaseItem.loadClass('MasterKey', MasterKey);
Setting.setConstant('appId', 'net.cozic.joplin-cli');
Setting.setConstant('appType', 'cli');
BaseService.logger_ = logger;
Setting.autoSaveEnabled = false;
function syncTargetId() {
@ -118,8 +121,9 @@ async function clearDatabase(id = null) {
'DELETE FROM tags',
'DELETE FROM note_tags',
'DELETE FROM master_keys',
'DELETE FROM settings',
'DELETE FROM item_changes',
'DELETE FROM note_resources',
'DELETE FROM settings',
'DELETE FROM deleted_items',
'DELETE FROM sync_items',
];

View File

@ -243,7 +243,9 @@ async function basicDelta(path, getDirStatFn, options) {
newContext.statsCache.sort(function(a, b) {
return a.updated_time - b.updated_time;
});
newContext.statIdsCache = newContext.statsCache.map((item) => BaseItem.pathToId(item.path));
newContext.statIdsCache = newContext.statsCache
.filter(item => BaseItem.isSystemPath(item.path))
.map(item => BaseItem.pathToId(item.path));
newContext.statIdsCache.sort(); // Items must be sorted to use binary search below
}

View File

@ -436,8 +436,8 @@ class Note extends BaseItem {
// });
// }
static batchDelete(ids, options = null) {
const result = super.batchDelete(ids, options);
static async batchDelete(ids, options = null) {
const result = await super.batchDelete(ids, options);
for (let i = 0; i < ids.length; i++) {
ItemChange.add(BaseModel.TYPE_NOTE, ids[i], ItemChange.TYPE_DELETE);

View File

@ -143,6 +143,19 @@ class Resource extends BaseItem {
return url.substr(2);
}
static async batchDelete(ids, options = null) {
// For resources, there's not really batch deleting since there's the file data to delete
// too, so each is processed one by one with the item being deleted last (since the db
// call is the less likely to fail).
for (let i = 0; i < ids.length; i++) {
const id = ids[i];
const resource = await Resource.load(id);
const path = Resource.fullPath(resource);
await this.fsDriver().remove(path);
await super.batchDelete([id], options)
}
}
}
Resource.IMAGE_MAX_DIMENSION = 1920;