mirror of https://github.com/laurent22/joplin.git
Merge branch 'master' into master
commit
0a1ba511ea
|
@ -51,6 +51,8 @@ ReactNativeClient/pluginAssets/
|
|||
ReactNativeClient/lib/joplin-renderer/vendor/fountain.min.js
|
||||
ReactNativeClient/lib/joplin-renderer/assets/
|
||||
ReactNativeClient/lib/rnInjectedJs/
|
||||
Clipper/popup/config/webpack.config.js
|
||||
Clipper/popup/scripts/build.js
|
||||
|
||||
# AUTO-GENERATED - EXCLUDED TYPESCRIPT BUILD
|
||||
ElectronClient/gui/editors/PlainEditor.js
|
||||
|
|
|
@ -54,16 +54,21 @@ module.exports = {
|
|||
// This error is always a false positive so far since it detects
|
||||
// possible race conditions in contexts where we know it cannot happen.
|
||||
"require-atomic-updates": 0,
|
||||
"prefer-const": ["error"],
|
||||
"no-var": ["error"],
|
||||
|
||||
// Checks rules of Hooks
|
||||
"react-hooks/rules-of-hooks": "error",
|
||||
// Checks effect dependencies
|
||||
"react-hooks/exhaustive-deps": "warn",
|
||||
// Disable because of this: https://github.com/facebook/react/issues/16265
|
||||
// "react-hooks/exhaustive-deps": "warn",
|
||||
|
||||
// -------------------------------
|
||||
// Formatting
|
||||
// -------------------------------
|
||||
"space-in-parens": ["error", "never"],
|
||||
"space-infix-ops": ["error"],
|
||||
"curly": ["error", "multi-line", "consistent"],
|
||||
"semi": ["error", "always"],
|
||||
"eol-last": ["error", "always"],
|
||||
"quotes": ["error", "single"],
|
||||
|
@ -92,7 +97,7 @@ module.exports = {
|
|||
"multiline-comment-style": ["error", "separate-lines"],
|
||||
"space-before-blocks": "error",
|
||||
"spaced-comment": ["error", "always"],
|
||||
"keyword-spacing": ["error", { "before": true, "after": true }]
|
||||
"keyword-spacing": ["error", { "before": true, "after": true }],
|
||||
},
|
||||
"plugins": [
|
||||
"react",
|
||||
|
|
|
@ -134,7 +134,7 @@ class AppGui {
|
|||
const item = folderList.currentItem;
|
||||
|
||||
if (item === '-') {
|
||||
let newIndex = event.currentIndex + (event.previousIndex < event.currentIndex ? +1 : -1);
|
||||
const newIndex = event.currentIndex + (event.previousIndex < event.currentIndex ? +1 : -1);
|
||||
let nextItem = folderList.itemAt(newIndex);
|
||||
if (!nextItem) nextItem = folderList.itemAt(event.previousIndex);
|
||||
|
||||
|
@ -186,7 +186,7 @@ class AppGui {
|
|||
borderRightWidth: 1,
|
||||
};
|
||||
noteList.on('currentItemChange', async () => {
|
||||
let note = noteList.currentItem;
|
||||
const note = noteList.currentItem;
|
||||
this.store_.dispatch({
|
||||
type: 'NOTE_SELECT',
|
||||
id: note ? note.id : null,
|
||||
|
@ -338,7 +338,7 @@ class AppGui {
|
|||
|
||||
if (consoleWidget.isMaximized__ === doMaximize) return;
|
||||
|
||||
let constraints = {
|
||||
const constraints = {
|
||||
type: 'stretch',
|
||||
factor: !doMaximize ? 1 : 4,
|
||||
};
|
||||
|
@ -415,10 +415,10 @@ class AppGui {
|
|||
async handleModelAction(action) {
|
||||
this.logger().info('Action:', action);
|
||||
|
||||
let state = Object.assign({}, defaultState);
|
||||
const state = Object.assign({}, defaultState);
|
||||
state.notes = this.widget('noteList').items;
|
||||
|
||||
let newState = reducer(state, action);
|
||||
const newState = reducer(state, action);
|
||||
|
||||
if (newState !== state) {
|
||||
this.widget('noteList').items = newState.notes;
|
||||
|
@ -485,9 +485,9 @@ class AppGui {
|
|||
// this.logger().debug('Got command: ' + cmd);
|
||||
|
||||
try {
|
||||
let note = this.widget('noteList').currentItem;
|
||||
let folder = this.widget('folderList').currentItem;
|
||||
let args = splitCommandString(cmd);
|
||||
const note = this.widget('noteList').currentItem;
|
||||
const folder = this.widget('folderList').currentItem;
|
||||
const args = splitCommandString(cmd);
|
||||
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
if (args[i] == '$n') {
|
||||
|
@ -548,7 +548,7 @@ class AppGui {
|
|||
stdout(text) {
|
||||
if (text === null || text === undefined) return;
|
||||
|
||||
let lines = text.split('\n');
|
||||
const lines = text.split('\n');
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const v = typeof lines[i] === 'object' ? JSON.stringify(lines[i]) : lines[i];
|
||||
this.widget('console').addLine(v);
|
||||
|
@ -626,7 +626,7 @@ class AppGui {
|
|||
|
||||
if (link.type === 'item') {
|
||||
const itemId = link.id;
|
||||
let item = await BaseItem.loadItemById(itemId);
|
||||
const item = await BaseItem.loadItemById(itemId);
|
||||
if (!item) throw new Error(`No item with ID ${itemId}`); // Should be nearly impossible
|
||||
|
||||
if (item.type_ === BaseModel.TYPE_RESOURCE) {
|
||||
|
@ -750,7 +750,7 @@ class AppGui {
|
|||
// -------------------------------------------------------------------------
|
||||
|
||||
const shortcutKey = this.currentShortcutKeys_.join('');
|
||||
let keymapItem = this.keymapItemByKey(shortcutKey);
|
||||
const keymapItem = this.keymapItemByKey(shortcutKey);
|
||||
|
||||
// If this command is an alias to another command, resolve to the actual command
|
||||
|
||||
|
@ -766,7 +766,7 @@ class AppGui {
|
|||
if (keymapItem.type === 'function') {
|
||||
this.processFunctionCommand(keymapItem.command);
|
||||
} else if (keymapItem.type === 'prompt') {
|
||||
let promptOptions = {};
|
||||
const promptOptions = {};
|
||||
if ('cursorPosition' in keymapItem) promptOptions.cursorPosition = keymapItem.cursorPosition;
|
||||
const commandString = await statusBar.prompt(keymapItem.command ? keymapItem.command : '', null, promptOptions);
|
||||
this.addCommandToConsole(commandString);
|
||||
|
|
|
@ -47,7 +47,7 @@ class Application extends BaseApplication {
|
|||
}
|
||||
|
||||
async loadItem(type, pattern, options = null) {
|
||||
let output = await this.loadItems(type, pattern, options);
|
||||
const output = await this.loadItems(type, pattern, options);
|
||||
|
||||
if (output.length > 1) {
|
||||
// output.sort((a, b) => { return a.user_updated_time < b.user_updated_time ? +1 : -1; });
|
||||
|
@ -144,7 +144,7 @@ class Application extends BaseApplication {
|
|||
if (options.type === 'boolean') {
|
||||
if (answer === null) return false; // Pressed ESCAPE
|
||||
if (!answer) answer = options.answers[0];
|
||||
let positiveIndex = options.booleanAnswerDefault == 'y' ? 0 : 1;
|
||||
const positiveIndex = options.booleanAnswerDefault == 'y' ? 0 : 1;
|
||||
return answer.toLowerCase() === options.answers[positiveIndex].toLowerCase();
|
||||
} else {
|
||||
return answer;
|
||||
|
@ -181,7 +181,7 @@ class Application extends BaseApplication {
|
|||
const ext = fileExtension(path);
|
||||
if (ext != 'js') return;
|
||||
|
||||
let CommandClass = require(`./${path}`);
|
||||
const CommandClass = require(`./${path}`);
|
||||
let cmd = new CommandClass();
|
||||
if (!cmd.enabled()) return;
|
||||
cmd = this.setupCommand(cmd);
|
||||
|
@ -192,8 +192,8 @@ class Application extends BaseApplication {
|
|||
}
|
||||
|
||||
if (uiType !== null) {
|
||||
let temp = [];
|
||||
for (let n in this.commands_) {
|
||||
const temp = [];
|
||||
for (const n in this.commands_) {
|
||||
if (!this.commands_.hasOwnProperty(n)) continue;
|
||||
const c = this.commands_[n];
|
||||
if (!c.supportsUi(uiType)) continue;
|
||||
|
@ -207,8 +207,8 @@ class Application extends BaseApplication {
|
|||
|
||||
async commandNames() {
|
||||
const metadata = await this.commandMetadata();
|
||||
let output = [];
|
||||
for (let n in metadata) {
|
||||
const output = [];
|
||||
for (const n in metadata) {
|
||||
if (!metadata.hasOwnProperty(n)) continue;
|
||||
output.push(n);
|
||||
}
|
||||
|
@ -227,7 +227,7 @@ class Application extends BaseApplication {
|
|||
const commands = this.commands();
|
||||
|
||||
output = {};
|
||||
for (let n in commands) {
|
||||
for (const n in commands) {
|
||||
if (!commands.hasOwnProperty(n)) continue;
|
||||
const cmd = commands[n];
|
||||
output[n] = cmd.metadata();
|
||||
|
@ -251,7 +251,7 @@ class Application extends BaseApplication {
|
|||
CommandClass = require(`${__dirname}/command-${name}.js`);
|
||||
} catch (error) {
|
||||
if (error.message && error.message.indexOf('Cannot find module') >= 0) {
|
||||
let e = new Error(_('No such command: %s', name));
|
||||
const e = new Error(_('No such command: %s', name));
|
||||
e.type = 'notFound';
|
||||
throw e;
|
||||
} else {
|
||||
|
@ -362,7 +362,7 @@ class Application extends BaseApplication {
|
|||
}
|
||||
|
||||
const output = [];
|
||||
for (let n in itemsByCommand) {
|
||||
for (const n in itemsByCommand) {
|
||||
if (!itemsByCommand.hasOwnProperty(n)) continue;
|
||||
output.push(itemsByCommand[n]);
|
||||
}
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
var { app } = require('./app.js');
|
||||
var Note = require('lib/models/Note.js');
|
||||
var Folder = require('lib/models/Folder.js');
|
||||
var Tag = require('lib/models/Tag.js');
|
||||
var { cliUtils } = require('./cli-utils.js');
|
||||
var yargParser = require('yargs-parser');
|
||||
var fs = require('fs-extra');
|
||||
const { app } = require('./app.js');
|
||||
const Note = require('lib/models/Note.js');
|
||||
const Folder = require('lib/models/Folder.js');
|
||||
const Tag = require('lib/models/Tag.js');
|
||||
const { cliUtils } = require('./cli-utils.js');
|
||||
const yargParser = require('yargs-parser');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
async function handleAutocompletionPromise(line) {
|
||||
// Auto-complete the command name
|
||||
const names = await app().commandNames();
|
||||
let words = getArguments(line);
|
||||
const words = getArguments(line);
|
||||
// If there is only one word and it is not already a command name then you
|
||||
// should look for commands it could be
|
||||
if (words.length == 1) {
|
||||
if (names.indexOf(words[0]) === -1) {
|
||||
let x = names.filter(n => n.indexOf(words[0]) === 0);
|
||||
const x = names.filter(n => n.indexOf(words[0]) === 0);
|
||||
if (x.length === 1) {
|
||||
return `${x[0]} `;
|
||||
}
|
||||
|
@ -36,8 +36,8 @@ async function handleAutocompletionPromise(line) {
|
|||
}
|
||||
|
||||
// complete an option
|
||||
let next = words.length > 1 ? words[words.length - 1] : '';
|
||||
let l = [];
|
||||
const next = words.length > 1 ? words[words.length - 1] : '';
|
||||
const l = [];
|
||||
if (next[0] === '-') {
|
||||
for (let i = 0; i < metadata.options.length; i++) {
|
||||
const options = metadata.options[i][0].split(' ');
|
||||
|
@ -60,7 +60,7 @@ async function handleAutocompletionPromise(line) {
|
|||
if (l.length === 0) {
|
||||
return line;
|
||||
}
|
||||
let ret = l.map(a => toCommandLine(a));
|
||||
const ret = l.map(a => toCommandLine(a));
|
||||
ret.prefix = `${toCommandLine(words.slice(0, -1))} `;
|
||||
return ret;
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ async function handleAutocompletionPromise(line) {
|
|||
// words that don't start with a - less one for the command name
|
||||
const positionalArgs = words.filter(a => a.indexOf('-') !== 0).length - 1;
|
||||
|
||||
let cmdUsage = yargParser(metadata.usage)['_'];
|
||||
const cmdUsage = yargParser(metadata.usage)['_'];
|
||||
cmdUsage.splice(0, 1);
|
||||
|
||||
if (cmdUsage.length >= positionalArgs) {
|
||||
|
@ -95,29 +95,29 @@ async function handleAutocompletionPromise(line) {
|
|||
}
|
||||
|
||||
if (argName == 'tag') {
|
||||
let tags = await Tag.search({ titlePattern: `${next}*` });
|
||||
const tags = await Tag.search({ titlePattern: `${next}*` });
|
||||
l.push(...tags.map(n => n.title));
|
||||
}
|
||||
|
||||
if (argName == 'file') {
|
||||
let files = await fs.readdir('.');
|
||||
const files = await fs.readdir('.');
|
||||
l.push(...files);
|
||||
}
|
||||
|
||||
if (argName == 'tag-command') {
|
||||
let c = filterList(['add', 'remove', 'list', 'notetags'], next);
|
||||
const c = filterList(['add', 'remove', 'list', 'notetags'], next);
|
||||
l.push(...c);
|
||||
}
|
||||
|
||||
if (argName == 'todo-command') {
|
||||
let c = filterList(['toggle', 'clear'], next);
|
||||
const c = filterList(['toggle', 'clear'], next);
|
||||
l.push(...c);
|
||||
}
|
||||
}
|
||||
if (l.length === 1) {
|
||||
return toCommandLine([...words.slice(0, -1), l[0]]);
|
||||
} else if (l.length > 1) {
|
||||
let ret = l.map(a => toCommandLine(a));
|
||||
const ret = l.map(a => toCommandLine(a));
|
||||
ret.prefix = `${toCommandLine(words.slice(0, -1))} `;
|
||||
return ret;
|
||||
}
|
||||
|
@ -155,7 +155,7 @@ function getArguments(line) {
|
|||
let inSingleQuotes = false;
|
||||
let inDoubleQuotes = false;
|
||||
let currentWord = '';
|
||||
let parsed = [];
|
||||
const parsed = [];
|
||||
for (let i = 0; i < line.length; i++) {
|
||||
if (line[i] === '"') {
|
||||
if (inDoubleQuotes) {
|
||||
|
@ -192,7 +192,7 @@ function getArguments(line) {
|
|||
return parsed;
|
||||
}
|
||||
function filterList(list, next) {
|
||||
let output = [];
|
||||
const output = [];
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
if (list[i].indexOf(next) !== 0) continue;
|
||||
output.push(list[i]);
|
||||
|
|
|
@ -50,7 +50,7 @@ class BaseCommand {
|
|||
async cancel() {}
|
||||
|
||||
name() {
|
||||
let r = this.usage().split(' ');
|
||||
const r = this.usage().split(' ');
|
||||
return r[0];
|
||||
}
|
||||
|
||||
|
|
|
@ -15,11 +15,11 @@ function wrap(text, indent) {
|
|||
}
|
||||
|
||||
function renderOptions(options) {
|
||||
let output = [];
|
||||
const output = [];
|
||||
const optionColWidth = getOptionColWidth(options);
|
||||
|
||||
for (let i = 0; i < options.length; i++) {
|
||||
let option = options[i];
|
||||
const option = options[i];
|
||||
const flag = option[0];
|
||||
const indent = INDENT + INDENT + ' '.repeat(optionColWidth + 2);
|
||||
|
||||
|
@ -33,7 +33,7 @@ function renderOptions(options) {
|
|||
}
|
||||
|
||||
function renderCommand(cmd) {
|
||||
let output = [];
|
||||
const output = [];
|
||||
output.push(INDENT + cmd.usage());
|
||||
output.push('');
|
||||
output.push(wrap(cmd.description(), INDENT + INDENT));
|
||||
|
@ -48,14 +48,14 @@ function renderCommand(cmd) {
|
|||
}
|
||||
|
||||
function getCommands() {
|
||||
let output = [];
|
||||
const output = [];
|
||||
fs.readdirSync(__dirname).forEach(path => {
|
||||
if (path.indexOf('command-') !== 0) return;
|
||||
const ext = fileExtension(path);
|
||||
if (ext != 'js') return;
|
||||
|
||||
let CommandClass = require(`./${path}`);
|
||||
let cmd = new CommandClass();
|
||||
const CommandClass = require(`./${path}`);
|
||||
const cmd = new CommandClass();
|
||||
if (!cmd.enabled()) return;
|
||||
if (cmd.hidden()) return;
|
||||
output.push(cmd);
|
||||
|
@ -73,7 +73,7 @@ function getOptionColWidth(options) {
|
|||
}
|
||||
|
||||
function getHeader() {
|
||||
let output = [];
|
||||
const output = [];
|
||||
|
||||
output.push('NAME');
|
||||
output.push('');
|
||||
|
@ -84,7 +84,7 @@ function getHeader() {
|
|||
output.push('DESCRIPTION');
|
||||
output.push('');
|
||||
|
||||
let description = [];
|
||||
const description = [];
|
||||
description.push('Joplin is a note taking and to-do application, which can handle a large number of notes organised into notebooks.');
|
||||
description.push('The notes are searchable, can be copied, tagged and modified with your own text editor.');
|
||||
description.push('\n\n');
|
||||
|
@ -98,7 +98,7 @@ function getHeader() {
|
|||
}
|
||||
|
||||
function getFooter() {
|
||||
let output = [];
|
||||
const output = [];
|
||||
|
||||
output.push('WEBSITE');
|
||||
output.push('');
|
||||
|
@ -120,10 +120,10 @@ async function main() {
|
|||
// setLocale('fr_FR');
|
||||
|
||||
const commands = getCommands();
|
||||
let commandBlocks = [];
|
||||
const commandBlocks = [];
|
||||
|
||||
for (let i = 0; i < commands.length; i++) {
|
||||
let cmd = commands[i];
|
||||
const cmd = commands[i];
|
||||
commandBlocks.push(renderCommand(cmd));
|
||||
}
|
||||
|
||||
|
|
|
@ -40,8 +40,8 @@ function createClient(id) {
|
|||
const client = createClient(1);
|
||||
|
||||
function execCommand(client, command) {
|
||||
let exePath = `node ${joplinAppPath}`;
|
||||
let cmd = `${exePath} --update-geolocation-disabled --env dev --profile ${client.profileDir} ${command}`;
|
||||
const exePath = `node ${joplinAppPath}`;
|
||||
const cmd = `${exePath} --update-geolocation-disabled --env dev --profile ${client.profileDir} ${command}`;
|
||||
logger.info(`${client.id}: ${command}`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
|
@ -129,8 +129,8 @@ testUnits.testCat = async () => {
|
|||
await execCommand(client, 'mkbook nb1');
|
||||
await execCommand(client, 'mknote mynote');
|
||||
|
||||
let folder = await Folder.loadByTitle('nb1');
|
||||
let note = await Note.loadFolderNoteByField(folder.id, 'title', 'mynote');
|
||||
const folder = await Folder.loadByTitle('nb1');
|
||||
const note = await Note.loadFolderNoteByField(folder.id, 'title', 'mynote');
|
||||
|
||||
let r = await execCommand(client, 'cat mynote');
|
||||
assertTrue(r.indexOf('mynote') >= 0);
|
||||
|
@ -149,7 +149,7 @@ testUnits.testConfig = async () => {
|
|||
await Setting.load();
|
||||
assertEquals('subl', Setting.value('editor'));
|
||||
|
||||
let r = await execCommand(client, 'config');
|
||||
const r = await execCommand(client, 'config');
|
||||
assertTrue(r.indexOf('editor') >= 0);
|
||||
assertTrue(r.indexOf('subl') >= 0);
|
||||
};
|
||||
|
@ -161,14 +161,14 @@ testUnits.testCp = async () => {
|
|||
|
||||
await execCommand(client, 'cp n1');
|
||||
|
||||
let f1 = await Folder.loadByTitle('nb1');
|
||||
let f2 = await Folder.loadByTitle('nb2');
|
||||
const f1 = await Folder.loadByTitle('nb1');
|
||||
const f2 = await Folder.loadByTitle('nb2');
|
||||
let notes = await Note.previews(f1.id);
|
||||
|
||||
assertEquals(2, notes.length);
|
||||
|
||||
await execCommand(client, 'cp n1 nb2');
|
||||
let notesF1 = await Note.previews(f1.id);
|
||||
const notesF1 = await Note.previews(f1.id);
|
||||
assertEquals(2, notesF1.length);
|
||||
notes = await Note.previews(f2.id);
|
||||
assertEquals(1, notes.length);
|
||||
|
@ -179,7 +179,7 @@ testUnits.testLs = async () => {
|
|||
await execCommand(client, 'mkbook nb1');
|
||||
await execCommand(client, 'mknote note1');
|
||||
await execCommand(client, 'mknote note2');
|
||||
let r = await execCommand(client, 'ls');
|
||||
const r = await execCommand(client, 'ls');
|
||||
|
||||
assertTrue(r.indexOf('note1') >= 0);
|
||||
assertTrue(r.indexOf('note2') >= 0);
|
||||
|
@ -191,8 +191,8 @@ testUnits.testMv = async () => {
|
|||
await execCommand(client, 'mknote n1');
|
||||
await execCommand(client, 'mv n1 nb2');
|
||||
|
||||
let f1 = await Folder.loadByTitle('nb1');
|
||||
let f2 = await Folder.loadByTitle('nb2');
|
||||
const f1 = await Folder.loadByTitle('nb1');
|
||||
const f2 = await Folder.loadByTitle('nb2');
|
||||
let notes1 = await Note.previews(f1.id);
|
||||
let notes2 = await Note.previews(f2.id);
|
||||
|
||||
|
@ -224,12 +224,12 @@ async function main() {
|
|||
let onlyThisTest = 'testMv';
|
||||
onlyThisTest = '';
|
||||
|
||||
for (let n in testUnits) {
|
||||
for (const n in testUnits) {
|
||||
if (!testUnits.hasOwnProperty(n)) continue;
|
||||
if (onlyThisTest && n != onlyThisTest) continue;
|
||||
|
||||
await clearDatabase();
|
||||
let testName = n.substr(4).toLowerCase();
|
||||
const testName = n.substr(4).toLowerCase();
|
||||
process.stdout.write(`${testName}: `);
|
||||
await testUnits[n]();
|
||||
console.info('');
|
||||
|
|
|
@ -11,27 +11,27 @@ cliUtils.printArray = function(logFunction, rows) {
|
|||
const ALIGN_LEFT = 0;
|
||||
const ALIGN_RIGHT = 1;
|
||||
|
||||
let colWidths = [];
|
||||
let colAligns = [];
|
||||
const colWidths = [];
|
||||
const colAligns = [];
|
||||
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
let row = rows[i];
|
||||
const row = rows[i];
|
||||
|
||||
for (let j = 0; j < row.length; j++) {
|
||||
let item = row[j];
|
||||
let width = item ? item.toString().length : 0;
|
||||
let align = typeof item == 'number' ? ALIGN_RIGHT : ALIGN_LEFT;
|
||||
const item = row[j];
|
||||
const width = item ? item.toString().length : 0;
|
||||
const align = typeof item == 'number' ? ALIGN_RIGHT : ALIGN_LEFT;
|
||||
if (!colWidths[j] || colWidths[j] < width) colWidths[j] = width;
|
||||
if (colAligns.length <= j) colAligns[j] = align;
|
||||
}
|
||||
}
|
||||
|
||||
for (let row = 0; row < rows.length; row++) {
|
||||
let line = [];
|
||||
const line = [];
|
||||
for (let col = 0; col < colWidths.length; col++) {
|
||||
let item = rows[row][col];
|
||||
let width = colWidths[col];
|
||||
let dir = colAligns[col] == ALIGN_LEFT ? stringPadding.RIGHT : stringPadding.LEFT;
|
||||
const item = rows[row][col];
|
||||
const width = colWidths[col];
|
||||
const dir = colAligns[col] == ALIGN_LEFT ? stringPadding.RIGHT : stringPadding.LEFT;
|
||||
line.push(stringPadding(item, width, ' ', dir));
|
||||
}
|
||||
logFunction(line.join(' '));
|
||||
|
@ -39,7 +39,7 @@ cliUtils.printArray = function(logFunction, rows) {
|
|||
};
|
||||
|
||||
cliUtils.parseFlags = function(flags) {
|
||||
let output = {};
|
||||
const output = {};
|
||||
flags = flags.split(',');
|
||||
for (let i = 0; i < flags.length; i++) {
|
||||
let f = flags[i].trim();
|
||||
|
@ -76,11 +76,11 @@ cliUtils.parseCommandArg = function(arg) {
|
|||
cliUtils.makeCommandArgs = function(cmd, argv) {
|
||||
let cmdUsage = cmd.usage();
|
||||
cmdUsage = yargParser(cmdUsage);
|
||||
let output = {};
|
||||
const output = {};
|
||||
|
||||
let options = cmd.options();
|
||||
let booleanFlags = [];
|
||||
let aliases = {};
|
||||
const options = cmd.options();
|
||||
const booleanFlags = [];
|
||||
const aliases = {};
|
||||
for (let i = 0; i < options.length; i++) {
|
||||
if (options[i].length != 2) throw new Error(`Invalid options: ${options[i]}`);
|
||||
let flags = options[i][0];
|
||||
|
@ -97,7 +97,7 @@ cliUtils.makeCommandArgs = function(cmd, argv) {
|
|||
}
|
||||
}
|
||||
|
||||
let args = yargParser(argv, {
|
||||
const args = yargParser(argv, {
|
||||
boolean: booleanFlags,
|
||||
alias: aliases,
|
||||
string: ['_'],
|
||||
|
@ -113,8 +113,8 @@ cliUtils.makeCommandArgs = function(cmd, argv) {
|
|||
}
|
||||
}
|
||||
|
||||
let argOptions = {};
|
||||
for (let key in args) {
|
||||
const argOptions = {};
|
||||
for (const key in args) {
|
||||
if (!args.hasOwnProperty(key)) continue;
|
||||
if (key == '_') continue;
|
||||
argOptions[key] = args[key];
|
||||
|
@ -134,7 +134,7 @@ cliUtils.promptMcq = function(message, answers) {
|
|||
});
|
||||
|
||||
message += '\n\n';
|
||||
for (let n in answers) {
|
||||
for (const n in answers) {
|
||||
if (!answers.hasOwnProperty(n)) continue;
|
||||
message += `${_('%s: %s', n, answers[n])}\n`;
|
||||
}
|
||||
|
|
|
@ -56,7 +56,6 @@ class Command extends BaseCommand {
|
|||
lines.push('# Joplin API');
|
||||
lines.push('');
|
||||
|
||||
lines.push('When the Web Clipper service is enabled, Joplin exposes a [REST API](https://en.wikipedia.org/wiki/Representational_state_transfer) which allows third-party applications to access Joplin\'s data and to create, modify or delete notes, notebooks, resources or tags.');
|
||||
lines.push('');
|
||||
lines.push('In order to use it, you\'ll first need to find on which port the service is running. To do so, open the Web Clipper Options in Joplin and if the service is running it should tell you on which port. Normally it runs on port **41184**. If you want to find it programmatically, you may follow this kind of algorithm:');
|
||||
lines.push('');
|
||||
|
|
|
@ -14,9 +14,9 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let title = args['note'];
|
||||
const title = args['note'];
|
||||
|
||||
let note = await app().loadItem(BaseModel.TYPE_NOTE, title, { parent: app().currentFolder() });
|
||||
const note = await app().loadItem(BaseModel.TYPE_NOTE, title, { parent: app().currentFolder() });
|
||||
this.encryptionCheck(note);
|
||||
if (!note) throw new Error(_('Cannot find "%s".', title));
|
||||
|
||||
|
|
|
@ -18,9 +18,9 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let title = args['note'];
|
||||
const title = args['note'];
|
||||
|
||||
let item = await app().loadItem(BaseModel.TYPE_NOTE, title, { parent: app().currentFolder() });
|
||||
const item = await app().loadItem(BaseModel.TYPE_NOTE, title, { parent: app().currentFolder() });
|
||||
if (!item) throw new Error(_('Cannot find "%s".', title));
|
||||
|
||||
const content = args.options.verbose ? await Note.serialize(item) : await Note.serializeForEdit(item);
|
||||
|
|
|
@ -35,7 +35,7 @@ class Command extends BaseCommand {
|
|||
});
|
||||
|
||||
inputStream.on('end', () => {
|
||||
let json = chunks.join('');
|
||||
const json = chunks.join('');
|
||||
let settingsObj;
|
||||
try {
|
||||
settingsObj = JSON.parse(json);
|
||||
|
@ -83,7 +83,7 @@ class Command extends BaseCommand {
|
|||
};
|
||||
|
||||
if (isExport || (!isImport && !args.value)) {
|
||||
let keys = Setting.keys(!verbose, 'cli');
|
||||
const keys = Setting.keys(!verbose, 'cli');
|
||||
keys.sort();
|
||||
|
||||
if (isExport) {
|
||||
|
|
|
@ -18,15 +18,15 @@ class Command extends BaseCommand {
|
|||
|
||||
async action() {
|
||||
let items = [];
|
||||
let folders = await Folder.all();
|
||||
const folders = await Folder.all();
|
||||
for (let i = 0; i < folders.length; i++) {
|
||||
let folder = folders[i];
|
||||
let notes = await Note.previews(folder.id);
|
||||
const folder = folders[i];
|
||||
const notes = await Note.previews(folder.id);
|
||||
items.push(folder);
|
||||
items = items.concat(notes);
|
||||
}
|
||||
|
||||
let tags = await Tag.all();
|
||||
const tags = await Tag.all();
|
||||
for (let i = 0; i < tags.length; i++) {
|
||||
tags[i].notes_ = await Tag.noteIds(tags[i].id);
|
||||
}
|
||||
|
|
|
@ -138,7 +138,7 @@ class Command extends BaseCommand {
|
|||
if (!targetPath) throw new Error('Please specify the sync target path.');
|
||||
|
||||
const dirPaths = function(targetPath) {
|
||||
let paths = [];
|
||||
const paths = [];
|
||||
fs.readdirSync(targetPath).forEach(path => {
|
||||
paths.push(path);
|
||||
});
|
||||
|
@ -151,10 +151,10 @@ class Command extends BaseCommand {
|
|||
let encryptedResourceCount = 0;
|
||||
let otherItemCount = 0;
|
||||
|
||||
let encryptedPaths = [];
|
||||
let decryptedPaths = [];
|
||||
const encryptedPaths = [];
|
||||
const decryptedPaths = [];
|
||||
|
||||
let paths = dirPaths(targetPath);
|
||||
const paths = dirPaths(targetPath);
|
||||
|
||||
for (let i = 0; i < paths.length; i++) {
|
||||
const path = paths[i];
|
||||
|
@ -164,7 +164,7 @@ class Command extends BaseCommand {
|
|||
// this.stdout(fullPath);
|
||||
|
||||
if (path === '.resource') {
|
||||
let resourcePaths = dirPaths(fullPath);
|
||||
const resourcePaths = dirPaths(fullPath);
|
||||
for (let j = 0; j < resourcePaths.length; j++) {
|
||||
const resourcePath = resourcePaths[j];
|
||||
resourceCount++;
|
||||
|
|
|
@ -35,7 +35,7 @@ class Command extends BaseCommand {
|
|||
// Load note or create it if it doesn't exist
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
let title = args['note'];
|
||||
const title = args['note'];
|
||||
|
||||
if (!app().currentFolder()) throw new Error(_('No active notebook.'));
|
||||
let note = await app().loadItem(BaseModel.TYPE_NOTE, title);
|
||||
|
@ -91,7 +91,7 @@ class Command extends BaseCommand {
|
|||
|
||||
const updatedContent = await fs.readFile(tempFilePath, 'utf8');
|
||||
if (updatedContent !== originalContent) {
|
||||
let updatedNote = await Note.unserializeForEdit(updatedContent);
|
||||
const updatedNote = await Note.unserializeForEdit(updatedContent);
|
||||
updatedNote.id = note.id;
|
||||
await Note.save(updatedNote);
|
||||
this.stdout(_('Note has been saved.'));
|
||||
|
|
|
@ -24,7 +24,7 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let exportOptions = {};
|
||||
const exportOptions = {};
|
||||
exportOptions.path = args.path;
|
||||
|
||||
exportOptions.format = args.options.format ? args.options.format : 'jex';
|
||||
|
|
|
@ -14,9 +14,9 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let title = args['note'];
|
||||
const title = args['note'];
|
||||
|
||||
let item = await app().loadItem(BaseModel.TYPE_NOTE, title, { parent: app().currentFolder() });
|
||||
const item = await app().loadItem(BaseModel.TYPE_NOTE, title, { parent: app().currentFolder() });
|
||||
if (!item) throw new Error(_('Cannot find "%s".', title));
|
||||
const url = Note.geolocationUrl(item);
|
||||
this.stdout(url);
|
||||
|
|
|
@ -15,8 +15,8 @@ class Command extends BaseCommand {
|
|||
|
||||
allCommands() {
|
||||
const commands = app().commands(app().uiType());
|
||||
let output = [];
|
||||
for (let n in commands) {
|
||||
const output = [];
|
||||
for (const n in commands) {
|
||||
if (!commands.hasOwnProperty(n)) continue;
|
||||
const command = commands[n];
|
||||
if (command.hidden()) continue;
|
||||
|
@ -48,7 +48,7 @@ class Command extends BaseCommand {
|
|||
.gui()
|
||||
.keymap();
|
||||
|
||||
let rows = [];
|
||||
const rows = [];
|
||||
|
||||
for (let i = 0; i < keymap.length; i++) {
|
||||
const item = keymap[i];
|
||||
|
|
|
@ -25,7 +25,7 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let folder = await app().loadItem(BaseModel.TYPE_FOLDER, args.notebook);
|
||||
const folder = await app().loadItem(BaseModel.TYPE_FOLDER, args.notebook);
|
||||
|
||||
if (args.notebook && !folder) throw new Error(_('Cannot find "%s".', args.notebook));
|
||||
|
||||
|
@ -39,7 +39,7 @@ class Command extends BaseCommand {
|
|||
// onProgress/onError supported by Enex import only
|
||||
|
||||
importOptions.onProgress = progressState => {
|
||||
let line = [];
|
||||
const line = [];
|
||||
line.push(_('Found: %d.', progressState.loaded));
|
||||
line.push(_('Created: %d.', progressState.created));
|
||||
if (progressState.updated) line.push(_('Updated: %d.', progressState.updated));
|
||||
|
@ -51,7 +51,7 @@ class Command extends BaseCommand {
|
|||
};
|
||||
|
||||
importOptions.onError = error => {
|
||||
let s = error.trace ? error.trace : error.toString();
|
||||
const s = error.trace ? error.trace : error.toString();
|
||||
this.stdout(s);
|
||||
};
|
||||
|
||||
|
|
|
@ -34,11 +34,11 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let pattern = args['note-pattern'];
|
||||
const pattern = args['note-pattern'];
|
||||
let items = [];
|
||||
let options = args.options;
|
||||
const options = args.options;
|
||||
|
||||
let queryOptions = {};
|
||||
const queryOptions = {};
|
||||
if (options.limit) queryOptions.limit = options.limit;
|
||||
if (options.sort) {
|
||||
queryOptions.orderBy = options.sort;
|
||||
|
@ -70,19 +70,19 @@ class Command extends BaseCommand {
|
|||
} else {
|
||||
let hasTodos = false;
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
let item = items[i];
|
||||
const item = items[i];
|
||||
if (item.is_todo) {
|
||||
hasTodos = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let seenTitles = [];
|
||||
let rows = [];
|
||||
const seenTitles = [];
|
||||
const rows = [];
|
||||
let shortIdShown = false;
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
let item = items[i];
|
||||
let row = [];
|
||||
const item = items[i];
|
||||
const row = [];
|
||||
|
||||
if (options.long) {
|
||||
row.push(BaseModel.shortId(item.id));
|
||||
|
|
|
@ -13,7 +13,7 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let folder = await Folder.save({ title: args['new-notebook'] }, { userSideValidation: true });
|
||||
const folder = await Folder.save({ title: args['new-notebook'] }, { userSideValidation: true });
|
||||
app().switchCurrentFolder(folder);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ class Command extends BaseCommand {
|
|||
|
||||
const ok = force ? true : await this.prompt(notes.length > 1 ? _('%d notes match this pattern. Delete them?', notes.length) : _('Delete note?'), { booleanAnswerDefault: 'n' });
|
||||
if (!ok) return;
|
||||
let ids = notes.map(n => n.id);
|
||||
const ids = notes.map(n => n.id);
|
||||
await Note.batchDelete(ids);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,8 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let pattern = args['pattern'];
|
||||
let folderTitle = args['notebook'];
|
||||
const pattern = args['pattern'];
|
||||
const folderTitle = args['notebook'];
|
||||
|
||||
let folder = null;
|
||||
if (folderTitle) {
|
||||
|
|
|
@ -23,18 +23,18 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let title = args['note'];
|
||||
let propName = args['name'];
|
||||
const title = args['note'];
|
||||
const propName = args['name'];
|
||||
let propValue = args['value'];
|
||||
if (!propValue) propValue = '';
|
||||
|
||||
let notes = await app().loadItems(BaseModel.TYPE_NOTE, title);
|
||||
const notes = await app().loadItems(BaseModel.TYPE_NOTE, title);
|
||||
if (!notes.length) throw new Error(_('Cannot find "%s".', title));
|
||||
|
||||
for (let i = 0; i < notes.length; i++) {
|
||||
this.encryptionCheck(notes[i]);
|
||||
|
||||
let newNote = {
|
||||
const newNote = {
|
||||
id: notes[i].id,
|
||||
type_: notes[i].type_,
|
||||
};
|
||||
|
|
|
@ -14,20 +14,20 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action() {
|
||||
let service = new ReportService();
|
||||
let report = await service.status(Setting.value('sync.target'));
|
||||
const service = new ReportService();
|
||||
const report = await service.status(Setting.value('sync.target'));
|
||||
|
||||
for (let i = 0; i < report.length; i++) {
|
||||
let section = report[i];
|
||||
const section = report[i];
|
||||
|
||||
if (i > 0) this.stdout('');
|
||||
|
||||
this.stdout(`# ${section.title}`);
|
||||
this.stdout('');
|
||||
|
||||
for (let n in section.body) {
|
||||
for (const n in section.body) {
|
||||
if (!section.body.hasOwnProperty(n)) continue;
|
||||
let line = section.body[n];
|
||||
const line = section.body[n];
|
||||
this.stdout(line);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -161,9 +161,9 @@ class Command extends BaseCommand {
|
|||
|
||||
const sync = await syncTarget.synchronizer();
|
||||
|
||||
let options = {
|
||||
const options = {
|
||||
onProgress: report => {
|
||||
let lines = Synchronizer.reportToLines(report);
|
||||
const lines = Synchronizer.reportToLines(report);
|
||||
if (lines.length) cliUtils.redraw(lines.join(' '));
|
||||
},
|
||||
onMessage: msg => {
|
||||
|
@ -185,7 +185,7 @@ class Command extends BaseCommand {
|
|||
options.context = context;
|
||||
|
||||
try {
|
||||
let newContext = await sync.start(options);
|
||||
const newContext = await sync.start(options);
|
||||
Setting.setValue(contextKey, JSON.stringify(newContext));
|
||||
} catch (error) {
|
||||
if (error.code == 'alreadyStarted') {
|
||||
|
|
|
@ -20,7 +20,7 @@ class Command extends BaseCommand {
|
|||
|
||||
async action(args) {
|
||||
let tag = null;
|
||||
let options = args.options;
|
||||
const options = args.options;
|
||||
|
||||
if (args.tag) tag = await app().loadItem(BaseModel.TYPE_TAG, args.tag);
|
||||
let notes = [];
|
||||
|
@ -46,7 +46,7 @@ class Command extends BaseCommand {
|
|||
}
|
||||
} else if (command == 'list') {
|
||||
if (tag) {
|
||||
let notes = await Tag.notes(tag.id);
|
||||
const notes = await Tag.notes(tag.id);
|
||||
notes.map(note => {
|
||||
let line = '';
|
||||
if (options.long) {
|
||||
|
@ -70,7 +70,7 @@ class Command extends BaseCommand {
|
|||
this.stdout(line);
|
||||
});
|
||||
} else {
|
||||
let tags = await Tag.all();
|
||||
const tags = await Tag.all();
|
||||
tags.map(tag => {
|
||||
this.stdout(tag.title);
|
||||
});
|
||||
|
|
|
@ -17,7 +17,7 @@ class Command extends BaseCommand {
|
|||
}
|
||||
|
||||
async action(args) {
|
||||
let folder = await app().loadItem(BaseModel.TYPE_FOLDER, args['notebook']);
|
||||
const folder = await app().loadItem(BaseModel.TYPE_FOLDER, args['notebook']);
|
||||
if (!folder) throw new Error(_('Cannot find "%s".', args['notebook']));
|
||||
app().switchCurrentFolder(folder);
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ const fs = require('fs-extra');
|
|||
const baseDir = `${dirname(__dirname)}/tests/fuzzing`;
|
||||
const syncDir = `${baseDir}/sync`;
|
||||
const joplinAppPath = `${__dirname}/main.js`;
|
||||
let syncDurations = [];
|
||||
const syncDurations = [];
|
||||
|
||||
const fsDriver = new FsDriverNode();
|
||||
Logger.fsDriver_ = fsDriver;
|
||||
|
@ -34,10 +34,10 @@ function createClient(id) {
|
|||
}
|
||||
|
||||
async function createClients() {
|
||||
let output = [];
|
||||
let promises = [];
|
||||
const output = [];
|
||||
const promises = [];
|
||||
for (let clientId = 0; clientId < 2; clientId++) {
|
||||
let client = createClient(clientId);
|
||||
const client = createClient(clientId);
|
||||
promises.push(fs.remove(client.profileDir));
|
||||
promises.push(
|
||||
execCommand(client, 'config sync.target 2').then(() => {
|
||||
|
@ -2064,8 +2064,8 @@ function randomWord() {
|
|||
}
|
||||
|
||||
function execCommand(client, command, options = {}) {
|
||||
let exePath = `node ${joplinAppPath}`;
|
||||
let cmd = `${exePath} --update-geolocation-disabled --env dev --log-level debug --profile ${client.profileDir} ${command}`;
|
||||
const exePath = `node ${joplinAppPath}`;
|
||||
const cmd = `${exePath} --update-geolocation-disabled --env dev --log-level debug --profile ${client.profileDir} ${command}`;
|
||||
logger.info(`${client.id}: ${command}`);
|
||||
|
||||
if (options.killAfter) {
|
||||
|
@ -2073,7 +2073,7 @@ function execCommand(client, command, options = {}) {
|
|||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let childProcess = exec(cmd, (error, stdout, stderr) => {
|
||||
const childProcess = exec(cmd, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
if (error.signal == 'SIGTERM') {
|
||||
resolve('Process was killed');
|
||||
|
@ -2096,7 +2096,7 @@ function execCommand(client, command, options = {}) {
|
|||
}
|
||||
|
||||
async function clientItems(client) {
|
||||
let itemsJson = await execCommand(client, 'dump');
|
||||
const itemsJson = await execCommand(client, 'dump');
|
||||
try {
|
||||
return JSON.parse(itemsJson);
|
||||
} catch (error) {
|
||||
|
@ -2105,7 +2105,7 @@ async function clientItems(client) {
|
|||
}
|
||||
|
||||
function randomTag(items) {
|
||||
let tags = [];
|
||||
const tags = [];
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
if (items[i].type_ != 5) continue;
|
||||
tags.push(items[i]);
|
||||
|
@ -2115,7 +2115,7 @@ function randomTag(items) {
|
|||
}
|
||||
|
||||
function randomNote(items) {
|
||||
let notes = [];
|
||||
const notes = [];
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
if (items[i].type_ != 1) continue;
|
||||
notes.push(items[i]);
|
||||
|
@ -2125,14 +2125,14 @@ function randomNote(items) {
|
|||
}
|
||||
|
||||
async function execRandomCommand(client) {
|
||||
let possibleCommands = [
|
||||
const possibleCommands = [
|
||||
['mkbook {word}', 40], // CREATE FOLDER
|
||||
['mknote {word}', 70], // CREATE NOTE
|
||||
[
|
||||
async () => {
|
||||
// DELETE RANDOM ITEM
|
||||
let items = await clientItems(client);
|
||||
let item = randomElement(items);
|
||||
const items = await clientItems(client);
|
||||
const item = randomElement(items);
|
||||
if (!item) return;
|
||||
|
||||
if (item.type_ == 1) {
|
||||
|
@ -2150,8 +2150,8 @@ async function execRandomCommand(client) {
|
|||
[
|
||||
async () => {
|
||||
// SYNC
|
||||
let avgSyncDuration = averageSyncDuration();
|
||||
let options = {};
|
||||
const avgSyncDuration = averageSyncDuration();
|
||||
const options = {};
|
||||
if (!isNaN(avgSyncDuration)) {
|
||||
if (Math.random() >= 0.5) {
|
||||
options.killAfter = avgSyncDuration * Math.random();
|
||||
|
@ -2164,8 +2164,8 @@ async function execRandomCommand(client) {
|
|||
[
|
||||
async () => {
|
||||
// UPDATE RANDOM ITEM
|
||||
let items = await clientItems(client);
|
||||
let item = randomNote(items);
|
||||
const items = await clientItems(client);
|
||||
const item = randomNote(items);
|
||||
if (!item) return;
|
||||
|
||||
return execCommand(client, `set ${item.id} title "${randomWord()}"`);
|
||||
|
@ -2175,12 +2175,12 @@ async function execRandomCommand(client) {
|
|||
[
|
||||
async () => {
|
||||
// ADD TAG
|
||||
let items = await clientItems(client);
|
||||
let note = randomNote(items);
|
||||
const items = await clientItems(client);
|
||||
const note = randomNote(items);
|
||||
if (!note) return;
|
||||
|
||||
let tag = randomTag(items);
|
||||
let tagTitle = !tag || Math.random() >= 0.9 ? `tag-${randomWord()}` : tag.title;
|
||||
const tag = randomTag(items);
|
||||
const tagTitle = !tag || Math.random() >= 0.9 ? `tag-${randomWord()}` : tag.title;
|
||||
|
||||
return execCommand(client, `tag add ${tagTitle} ${note.id}`);
|
||||
},
|
||||
|
@ -2191,7 +2191,7 @@ async function execRandomCommand(client) {
|
|||
let cmd = null;
|
||||
while (true) {
|
||||
cmd = randomElement(possibleCommands);
|
||||
let r = 1 + Math.floor(Math.random() * 100);
|
||||
const r = 1 + Math.floor(Math.random() * 100);
|
||||
if (r <= cmd[1]) break;
|
||||
}
|
||||
|
||||
|
@ -2210,7 +2210,7 @@ function averageSyncDuration() {
|
|||
}
|
||||
|
||||
function randomNextCheckTime() {
|
||||
let output = time.unixMs() + 1000 + Math.random() * 1000 * 120;
|
||||
const output = time.unixMs() + 1000 + Math.random() * 1000 * 120;
|
||||
logger.info(`Next sync check: ${time.unixMsToIso(output)} (${Math.round((output - time.unixMs()) / 1000)} sec.)`);
|
||||
return output;
|
||||
}
|
||||
|
@ -2223,11 +2223,11 @@ function findItem(items, itemId) {
|
|||
}
|
||||
|
||||
function compareItems(item1, item2) {
|
||||
let output = [];
|
||||
for (let n in item1) {
|
||||
const output = [];
|
||||
for (const n in item1) {
|
||||
if (!item1.hasOwnProperty(n)) continue;
|
||||
let p1 = item1[n];
|
||||
let p2 = item2[n];
|
||||
const p1 = item1[n];
|
||||
const p2 = item2[n];
|
||||
|
||||
if (n == 'notes_') {
|
||||
p1.sort();
|
||||
|
@ -2243,13 +2243,13 @@ function compareItems(item1, item2) {
|
|||
}
|
||||
|
||||
function findMissingItems_(items1, items2) {
|
||||
let output = [];
|
||||
const output = [];
|
||||
|
||||
for (let i = 0; i < items1.length; i++) {
|
||||
let item1 = items1[i];
|
||||
const item1 = items1[i];
|
||||
let found = false;
|
||||
for (let j = 0; j < items2.length; j++) {
|
||||
let item2 = items2[j];
|
||||
const item2 = items2[j];
|
||||
if (item1.id == item2.id) {
|
||||
found = true;
|
||||
break;
|
||||
|
@ -2269,33 +2269,33 @@ function findMissingItems(items1, items2) {
|
|||
}
|
||||
|
||||
async function compareClientItems(clientItems) {
|
||||
let itemCounts = [];
|
||||
const itemCounts = [];
|
||||
for (let i = 0; i < clientItems.length; i++) {
|
||||
let items = clientItems[i];
|
||||
const items = clientItems[i];
|
||||
itemCounts.push(items.length);
|
||||
}
|
||||
logger.info(`Item count: ${itemCounts.join(', ')}`);
|
||||
|
||||
let missingItems = findMissingItems(clientItems[0], clientItems[1]);
|
||||
const missingItems = findMissingItems(clientItems[0], clientItems[1]);
|
||||
if (missingItems[0].length || missingItems[1].length) {
|
||||
logger.error('Items are different');
|
||||
logger.error(missingItems);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let differences = [];
|
||||
let items = clientItems[0];
|
||||
const differences = [];
|
||||
const items = clientItems[0];
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
let item1 = items[i];
|
||||
const item1 = items[i];
|
||||
for (let clientId = 1; clientId < clientItems.length; clientId++) {
|
||||
let item2 = findItem(clientItems[clientId], item1.id);
|
||||
const item2 = findItem(clientItems[clientId], item1.id);
|
||||
if (!item2) {
|
||||
logger.error(`Item not found on client ${clientId}:`);
|
||||
logger.error(item1);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let diff = compareItems(item1, item2);
|
||||
const diff = compareItems(item1, item2);
|
||||
if (diff.length) {
|
||||
differences.push({
|
||||
item1: JSON.stringify(item1),
|
||||
|
@ -2315,7 +2315,7 @@ async function compareClientItems(clientItems) {
|
|||
async function main() {
|
||||
await fs.remove(syncDir);
|
||||
|
||||
let clients = await createClients();
|
||||
const clients = await createClients();
|
||||
let clientId = 0;
|
||||
|
||||
for (let i = 0; i < clients.length; i++) {
|
||||
|
@ -2348,7 +2348,7 @@ async function main() {
|
|||
|
||||
if (state == 'syncCheck') {
|
||||
state = 'waitForSyncCheck';
|
||||
let clientItems = [];
|
||||
const clientItems = [];
|
||||
// Up to 3 sync operations must be performed by each clients in order for them
|
||||
// to be perfectly in sync - in order for each items to send their changes
|
||||
// and get those from the other clients, and to also get changes that are
|
||||
|
@ -2356,12 +2356,12 @@ async function main() {
|
|||
// with another one).
|
||||
for (let loopCount = 0; loopCount < 3; loopCount++) {
|
||||
for (let i = 0; i < clients.length; i++) {
|
||||
let beforeTime = time.unixMs();
|
||||
const beforeTime = time.unixMs();
|
||||
await execCommand(clients[i], 'sync');
|
||||
syncDurations.push(time.unixMs() - beforeTime);
|
||||
if (syncDurations.length > 20) syncDurations.splice(0, 1);
|
||||
if (loopCount === 2) {
|
||||
let dump = await execCommand(clients[i], 'dump');
|
||||
const dump = await execCommand(clients[i], 'dump');
|
||||
clientItems[i] = JSON.parse(dump);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ class FolderListWidget extends ListWidget {
|
|||
this.trimItemTitle = false;
|
||||
|
||||
this.itemRenderer = item => {
|
||||
let output = [];
|
||||
const output = [];
|
||||
if (item === '-') {
|
||||
output.push('-'.repeat(this.innerWidth));
|
||||
} else if (item.type_ === Folder.modelType()) {
|
||||
|
@ -121,7 +121,7 @@ class FolderListWidget extends ListWidget {
|
|||
|
||||
folderHasChildren_(folders, folderId) {
|
||||
for (let i = 0; i < folders.length; i++) {
|
||||
let folder = folders[i];
|
||||
const folder = folders[i];
|
||||
if (folder.parent_id === folderId) return true;
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -106,7 +106,7 @@ class StatusBarWidget extends BaseWidget {
|
|||
|
||||
const isSecurePrompt = !!this.promptState_.secure;
|
||||
|
||||
let options = {
|
||||
const options = {
|
||||
cancelable: true,
|
||||
history: this.history,
|
||||
default: this.promptState_.initialText,
|
||||
|
|
|
@ -6,11 +6,11 @@ const MAX_WIDTH = 78;
|
|||
const INDENT = ' ';
|
||||
|
||||
function renderTwoColumnData(options, baseIndent, width) {
|
||||
let output = [];
|
||||
const output = [];
|
||||
const optionColWidth = getOptionColWidth(options);
|
||||
|
||||
for (let i = 0; i < options.length; i++) {
|
||||
let option = options[i];
|
||||
const option = options[i];
|
||||
const flag = option[0];
|
||||
const indent = baseIndent + INDENT + ' '.repeat(optionColWidth + 2);
|
||||
|
||||
|
@ -28,7 +28,7 @@ function renderCommandHelp(cmd, width = null) {
|
|||
|
||||
const baseIndent = '';
|
||||
|
||||
let output = [];
|
||||
const output = [];
|
||||
output.push(baseIndent + cmd.usage());
|
||||
output.push('');
|
||||
output.push(wrap(cmd.description(), baseIndent + INDENT, width));
|
||||
|
@ -42,7 +42,7 @@ function renderCommandHelp(cmd, width = null) {
|
|||
|
||||
if (cmd.name() === 'config') {
|
||||
const renderMetadata = md => {
|
||||
let desc = [];
|
||||
const desc = [];
|
||||
|
||||
if (md.label) {
|
||||
let label = md.label();
|
||||
|
@ -77,7 +77,7 @@ function renderCommandHelp(cmd, width = null) {
|
|||
output.push(_('Possible keys/values:'));
|
||||
output.push('');
|
||||
|
||||
let keysValues = [];
|
||||
const keysValues = [];
|
||||
const keys = Setting.keys(true, 'cli');
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
if (keysValues.length) keysValues.push(['', '']);
|
||||
|
|
|
@ -54,7 +54,7 @@ shimInit();
|
|||
const application = app();
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
var rl = require('readline').createInterface({
|
||||
const rl = require('readline').createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
|
|
@ -1048,11 +1048,11 @@ msgstr ""
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/app.js:1028
|
||||
msgid "Zoom In"
|
||||
msgstr ""
|
||||
msgstr "Зумирај"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/app.js:1034
|
||||
msgid "Zoom Out"
|
||||
msgstr ""
|
||||
msgstr "Одзумирај"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/app.js:1042
|
||||
msgid "&Tools"
|
||||
|
@ -1245,7 +1245,7 @@ msgstr ""
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ConfigScreen.min.js:82
|
||||
msgid "This will open a new screen. Save your current changes?"
|
||||
msgstr ""
|
||||
msgstr "Ово ће отворити нови екран. Сачувај своје промене?"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ConfigScreen.min.js:139
|
||||
#, javascript-format
|
||||
|
@ -1286,7 +1286,7 @@ msgstr "Прикажи све"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ConfigScreen.min.js:219
|
||||
msgid "Joplin Nextcloud App status:"
|
||||
msgstr ""
|
||||
msgstr "Статус Joplin Nextcloud апликације"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ConfigScreen.min.js:233
|
||||
#, fuzzy
|
||||
|
@ -1452,23 +1452,23 @@ msgstr "Шифровање је:"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ExtensionBadge.min.js:10
|
||||
msgid "Firefox Extension"
|
||||
msgstr ""
|
||||
msgstr "Firefox екстензија"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ExtensionBadge.min.js:17
|
||||
msgid "Chrome Web Store"
|
||||
msgstr ""
|
||||
msgstr "Chrome Web продавница"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ExtensionBadge.min.js:44
|
||||
msgid "Get it now:"
|
||||
msgstr ""
|
||||
msgstr "Набави их сада"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/FolderPropertiesDialog.min.js:22
|
||||
msgid "Name"
|
||||
msgstr ""
|
||||
msgstr "Име"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/FolderPropertiesDialog.min.js:23
|
||||
msgid "Icon"
|
||||
msgstr ""
|
||||
msgstr "Икона"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/FolderPropertiesDialog.min.js:272
|
||||
#, fuzzy
|
||||
|
@ -1516,7 +1516,7 @@ msgstr "Подеси аларм:"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/MainScreen.min.js:368
|
||||
msgid "Template file:"
|
||||
msgstr ""
|
||||
msgstr "Шаблонска датотека:"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/MainScreen.min.js:547
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteText.min.js:1043
|
||||
|
@ -1545,19 +1545,19 @@ msgstr "Постави лозинку"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteContentPropertiesDialog.js:31
|
||||
msgid "Words"
|
||||
msgstr ""
|
||||
msgstr "Речи"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteContentPropertiesDialog.js:32
|
||||
msgid "Characters"
|
||||
msgstr ""
|
||||
msgstr "Карактери"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteContentPropertiesDialog.js:33
|
||||
msgid "Characters excluding spaces"
|
||||
msgstr ""
|
||||
msgstr "Карактери искључујући празне"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteContentPropertiesDialog.js:34
|
||||
msgid "Lines"
|
||||
msgstr ""
|
||||
msgstr "Линије"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteContentPropertiesDialog.js:56
|
||||
#, fuzzy
|
||||
|
@ -1567,7 +1567,7 @@ msgstr "Својства белешке"
|
|||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteContentPropertiesDialog.js:58
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ShareNoteDialog.js:180
|
||||
msgid "Close"
|
||||
msgstr ""
|
||||
msgstr "Затвори"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteList.min.js:451
|
||||
msgid "No notes in here. Create one by clicking on \"New note\"."
|
||||
|
@ -1599,7 +1599,7 @@ msgstr "Историја о белешци"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NotePropertiesDialog.min.js:33
|
||||
msgid "Markup"
|
||||
msgstr ""
|
||||
msgstr "Означавање"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NotePropertiesDialog.min.js:304
|
||||
msgid "Previous versions of this note"
|
||||
|
@ -1661,7 +1661,7 @@ msgstr "Копирај адресу везе"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteText.min.js:818
|
||||
msgid "There was an error downloading this attachment:"
|
||||
msgstr ""
|
||||
msgstr "Дошло је до грешке приликом преузимања овог прилога"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/NoteText.min.js:820
|
||||
#, fuzzy
|
||||
|
@ -1778,7 +1778,7 @@ msgstr "назив"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ResourceScreen.js:32
|
||||
msgid "Size"
|
||||
msgstr ""
|
||||
msgstr "Величина"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ResourceScreen.js:36
|
||||
#, fuzzy
|
||||
|
@ -1802,12 +1802,12 @@ msgstr "Проверавам... Молимо вас да сачекате."
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ResourceScreen.js:128
|
||||
msgid "No resources!"
|
||||
msgstr ""
|
||||
msgstr "Без ресурса"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ResourceScreen.js:130
|
||||
#, javascript-format
|
||||
msgid "Warning: not all resources shown for performance reasons (limit: %s)."
|
||||
msgstr ""
|
||||
msgstr "Упозорење: нису сви ресурси приказани због перформанси (лимит: %s)."
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/Root.min.js:89
|
||||
msgid "OneDrive Login"
|
||||
|
@ -1842,6 +1842,7 @@ msgstr[2] "Токен је копиран у клипборд!"
|
|||
msgid ""
|
||||
"Note: When a note is shared, it will no longer be encrypted on the server."
|
||||
msgstr ""
|
||||
"Белешка: Када је белешка дељена, не може се више шифровати на серверу."
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/ShareNoteDialog.js:175
|
||||
#, fuzzy
|
||||
|
@ -1858,7 +1859,7 @@ msgstr[2] "Подели"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/SideBar.min.js:282
|
||||
msgid "Remove"
|
||||
msgstr ""
|
||||
msgstr "Уклони"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ElectronClient/gui/SideBar.min.js:285
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/components/side-menu-content.js:148
|
||||
|
@ -2211,7 +2212,7 @@ msgstr "Не могу да преместим бележницу у \"%s\" бе
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Resource.js:286
|
||||
msgid "Not downloaded"
|
||||
msgstr ""
|
||||
msgstr "Није преузето"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Resource.js:287
|
||||
#, fuzzy
|
||||
|
@ -2220,7 +2221,7 @@ msgstr "Преузимам ресурсе..."
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Resource.js:288
|
||||
msgid "Downloaded"
|
||||
msgstr ""
|
||||
msgstr "Преузето"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:27
|
||||
#, javascript-format
|
||||
|
@ -2236,7 +2237,7 @@ msgstr ""
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:45
|
||||
msgid "Keyboard Mode"
|
||||
msgstr ""
|
||||
msgstr "Режим тастатуре"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:48
|
||||
#, fuzzy
|
||||
|
@ -2245,11 +2246,11 @@ msgstr "Подразумевано: %s"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:49
|
||||
msgid "Emacs"
|
||||
msgstr ""
|
||||
msgstr "Емакс"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:50
|
||||
msgid "Vim"
|
||||
msgstr ""
|
||||
msgstr "Вим"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:60
|
||||
msgid "Synchronisation target"
|
||||
|
@ -2348,19 +2349,19 @@ msgstr "Тамна"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:246
|
||||
msgid "Dracula"
|
||||
msgstr ""
|
||||
msgstr "Дракула"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:247
|
||||
msgid "Solarised Light"
|
||||
msgstr ""
|
||||
msgstr "Соларизовано светло"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:248
|
||||
msgid "Solarised Dark"
|
||||
msgstr ""
|
||||
msgstr "Соларизовано тамно"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:249
|
||||
msgid "Nord"
|
||||
msgstr ""
|
||||
msgstr "Норд"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:251
|
||||
#, fuzzy
|
||||
|
@ -2390,19 +2391,19 @@ msgstr "&Приказ"
|
|||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:266
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:267
|
||||
msgid "Split View"
|
||||
msgstr ""
|
||||
msgstr "Раздвојени преглед"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:264
|
||||
#, javascript-format
|
||||
msgid "%s / %s / %s"
|
||||
msgstr ""
|
||||
msgstr "%s / %s / %s"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:265
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:266
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:267
|
||||
#, javascript-format
|
||||
msgid "%s / %s"
|
||||
msgstr ""
|
||||
msgstr "%s / %s"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:270
|
||||
msgid "Uncompleted to-dos on top"
|
||||
|
@ -2418,7 +2419,7 @@ msgstr "Сортирај белешке по"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:296
|
||||
msgid "Auto-pair braces, parenthesis, quotations, etc."
|
||||
msgstr ""
|
||||
msgstr "Ауто-упари заграде, цитате, итд."
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:298
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:316
|
||||
|
@ -2559,7 +2560,7 @@ msgstr ""
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:471
|
||||
msgid "Custom stylesheet for Joplin-wide app styles"
|
||||
msgstr ""
|
||||
msgstr "Прилагођена таблица стилова за стилове Џоплин програма"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:477
|
||||
msgid "Automatically update the application"
|
||||
|
@ -2611,43 +2612,43 @@ msgstr ""
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:505
|
||||
msgid "Page size for PDF export"
|
||||
msgstr ""
|
||||
msgstr "Величина странице за извоз у ПДФ формат"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:507
|
||||
msgid "A4"
|
||||
msgstr ""
|
||||
msgstr "А4"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:508
|
||||
msgid "Letter"
|
||||
msgstr ""
|
||||
msgstr "Писмо"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:509
|
||||
msgid "A3"
|
||||
msgstr ""
|
||||
msgstr "А3"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:510
|
||||
msgid "A5"
|
||||
msgstr ""
|
||||
msgstr "А5"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:511
|
||||
msgid "Tabloid"
|
||||
msgstr ""
|
||||
msgstr "Таблоид"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:512
|
||||
msgid "Legal"
|
||||
msgstr ""
|
||||
msgstr "Правно"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:515
|
||||
msgid "Page orientation for PDF export"
|
||||
msgstr ""
|
||||
msgstr "Орјентација странице за извоз у ПДФ"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:517
|
||||
msgid "Portrait"
|
||||
msgstr ""
|
||||
msgstr "Усправно"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:518
|
||||
msgid "Landscape"
|
||||
msgstr ""
|
||||
msgstr "Положено"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/models/Setting.js:533
|
||||
msgid "Custom TLS certificates"
|
||||
|
@ -2866,7 +2867,7 @@ msgstr "Приложи фајл"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/services/report.js:172
|
||||
msgid "Downloaded and decrypted"
|
||||
msgstr ""
|
||||
msgstr "Преузето и дешифровано"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/services/report.js:172
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/services/report.js:173
|
||||
|
@ -2877,7 +2878,7 @@ msgstr "Укупно: %d/%d"
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/services/report.js:173
|
||||
msgid "Downloaded and encrypted"
|
||||
msgstr ""
|
||||
msgstr "Преузето и шифровано"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/services/report.js:186
|
||||
#, fuzzy
|
||||
|
@ -3063,7 +3064,7 @@ msgstr "Унесите нове ознаке или одаберите са ли
|
|||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/components/screens/config.js:51
|
||||
msgid "Warning"
|
||||
msgstr ""
|
||||
msgstr "Упозорење"
|
||||
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/components/screens/config.js:51
|
||||
#: /Users/tessus/data/work/joplin/Tools/../ReactNativeClient/lib/components/screens/config.js:152
|
||||
|
|
|
@ -2121,8 +2121,7 @@
|
|||
"ansi-regex": {
|
||||
"version": "2.1.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"aproba": {
|
||||
"version": "1.2.0",
|
||||
|
@ -2143,14 +2142,12 @@
|
|||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
|
@ -2165,20 +2162,17 @@
|
|||
"code-point-at": {
|
||||
"version": "1.1.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"console-control-strings": {
|
||||
"version": "1.1.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"core-util-is": {
|
||||
"version": "1.0.2",
|
||||
|
@ -2295,8 +2289,7 @@
|
|||
"inherits": {
|
||||
"version": "2.0.4",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"ini": {
|
||||
"version": "1.3.5",
|
||||
|
@ -2308,7 +2301,6 @@
|
|||
"version": "1.0.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"number-is-nan": "^1.0.0"
|
||||
}
|
||||
|
@ -2323,7 +2315,6 @@
|
|||
"version": "3.0.4",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
|
@ -2331,14 +2322,12 @@
|
|||
"minimist": {
|
||||
"version": "0.0.8",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"minipass": {
|
||||
"version": "2.9.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"safe-buffer": "^5.1.2",
|
||||
"yallist": "^3.0.0"
|
||||
|
@ -2357,7 +2346,6 @@
|
|||
"version": "0.5.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"minimist": "0.0.8"
|
||||
}
|
||||
|
@ -2447,8 +2435,7 @@
|
|||
"number-is-nan": {
|
||||
"version": "1.0.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"object-assign": {
|
||||
"version": "4.1.1",
|
||||
|
@ -2460,7 +2447,6 @@
|
|||
"version": "1.4.0",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
|
@ -2546,8 +2532,7 @@
|
|||
"safe-buffer": {
|
||||
"version": "5.1.2",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
|
@ -2583,7 +2568,6 @@
|
|||
"version": "1.0.2",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"code-point-at": "^1.0.0",
|
||||
"is-fullwidth-code-point": "^1.0.0",
|
||||
|
@ -2603,7 +2587,6 @@
|
|||
"version": "3.0.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"ansi-regex": "^2.0.0"
|
||||
}
|
||||
|
@ -2647,14 +2630,12 @@
|
|||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
},
|
||||
"yallist": {
|
||||
"version": "3.1.1",
|
||||
"bundled": true,
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -6595,7 +6576,7 @@
|
|||
"requires": {
|
||||
"chalk": "^2.1.0",
|
||||
"emphasize": "^1.5.0",
|
||||
"node-emoji": "git+https://github.com/laurent22/node-emoji.git",
|
||||
"node-emoji": "git+https://github.com/laurent22/node-emoji.git#9fa01eac463e94dde1316ef8c53089eeef4973b5",
|
||||
"slice-ansi": "^1.0.0",
|
||||
"string-width": "^2.1.1",
|
||||
"terminal-kit": "^1.13.11",
|
||||
|
|
|
@ -14,14 +14,14 @@ describe('InteropService_Importer_Md: importLocalImages', function() {
|
|||
it('should import linked files and modify tags appropriately', async function() {
|
||||
const tagNonExistentFile = '';
|
||||
const note = await importer.importFile(`${__dirname}/md_to_md/sample.md`, 'notebook');
|
||||
let items = await Note.linkedItems(note.body);
|
||||
const items = await Note.linkedItems(note.body);
|
||||
expect(items.length).toBe(2);
|
||||
const inexistentLinkUnchanged = note.body.includes(tagNonExistentFile);
|
||||
expect(inexistentLinkUnchanged).toBe(true);
|
||||
});
|
||||
it('should only create 1 resource for duplicate links, all tags should be updated', async function() {
|
||||
const note = await importer.importFile(`${__dirname}/md_to_md/sample-duplicate-links.md`, 'notebook');
|
||||
let items = await Note.linkedItems(note.body);
|
||||
const items = await Note.linkedItems(note.body);
|
||||
expect(items.length).toBe(1);
|
||||
const reg = new RegExp(items[0].id, 'g');
|
||||
const matched = note.body.match(reg);
|
||||
|
@ -29,12 +29,12 @@ describe('InteropService_Importer_Md: importLocalImages', function() {
|
|||
});
|
||||
it('should import linked files and modify tags appropriately when link is also in alt text', async function() {
|
||||
const note = await importer.importFile(`${__dirname}/md_to_md/sample-link-in-alt-text.md`, 'notebook');
|
||||
let items = await Note.linkedItems(note.body);
|
||||
const items = await Note.linkedItems(note.body);
|
||||
expect(items.length).toBe(1);
|
||||
});
|
||||
it('should passthrough unchanged if no links present', async function() {
|
||||
const note = await importer.importFile(`${__dirname}/md_to_md/sample-no-links.md`, 'notebook');
|
||||
let items = await Note.linkedItems(note.body);
|
||||
const items = await Note.linkedItems(note.body);
|
||||
expect(items.length).toBe(0);
|
||||
expect(note.body).toContain('Unidentified vessel travelling at sub warp speed, bearing 235.7. Fluctuations in energy readings from it, Captain. All transporters off.');
|
||||
});
|
||||
|
|
|
@ -44,19 +44,19 @@ describe('integration_ShowAllNotes', function() {
|
|||
|
||||
it('should show all notes', asyncTest(async () => {
|
||||
// setup
|
||||
let folders = await createNTestFolders(3);
|
||||
const folders = await createNTestFolders(3);
|
||||
Folder.moveToFolder(id(folders[2]), id(folders[1])); // subfolder
|
||||
await time.msleep(100);
|
||||
let notes0 = await createNTestNotes(3, folders[0]);
|
||||
let notes1 = await createNTestNotes(3, folders[1]);
|
||||
let notes2 = await createNTestNotes(3, folders[2]);
|
||||
const notes0 = await createNTestNotes(3, folders[0]);
|
||||
const notes1 = await createNTestNotes(3, folders[1]);
|
||||
const notes2 = await createNTestNotes(3, folders[2]);
|
||||
|
||||
// TEST ACTION: View all-notes
|
||||
testApp.dispatch({ type: 'SMART_FILTER_SELECT', id: ALL_NOTES_FILTER_ID });
|
||||
await time.msleep(100);
|
||||
|
||||
// check: all the notes are shown
|
||||
let state = testApp.store().getState();
|
||||
const state = testApp.store().getState();
|
||||
expect(state.notesParentType).toEqual('SmartFilter');
|
||||
expect(state.selectedSmartFilterId).toEqual(ALL_NOTES_FILTER_ID);
|
||||
expect(sortedIds(state.notes)).toEqual(sortedIds(notes0.concat(notes1).concat(notes2)));
|
||||
|
@ -64,9 +64,9 @@ describe('integration_ShowAllNotes', function() {
|
|||
|
||||
it('should show retain note selection when going from a folder to all-notes', asyncTest(async () => {
|
||||
// setup
|
||||
let folders = await createNTestFolders(2);
|
||||
let notes0 = await createNTestNotes(3, folders[0]);
|
||||
let notes1 = await createNTestNotes(3, folders[1]);
|
||||
const folders = await createNTestFolders(2);
|
||||
const notes0 = await createNTestNotes(3, folders[0]);
|
||||
const notes1 = await createNTestNotes(3, folders[1]);
|
||||
testApp.dispatch({ type: 'FOLDER_SELECT', id: id(folders[1]) });
|
||||
await time.msleep(100);
|
||||
testApp.dispatch({ type: 'NOTE_SELECT', id: id(notes1[1]) });
|
||||
|
|
|
@ -8,27 +8,27 @@ const Tag = require('lib/models/Tag.js');
|
|||
const { time } = require('lib/time-utils.js');
|
||||
|
||||
async function createNTestFolders(n) {
|
||||
let folders = [];
|
||||
const folders = [];
|
||||
for (let i = 0; i < n; i++) {
|
||||
let folder = await Folder.save({ title: 'folder' });
|
||||
const folder = await Folder.save({ title: 'folder' });
|
||||
folders.push(folder);
|
||||
}
|
||||
return folders;
|
||||
}
|
||||
|
||||
async function createNTestNotes(n, folder) {
|
||||
let notes = [];
|
||||
const notes = [];
|
||||
for (let i = 0; i < n; i++) {
|
||||
let note = await Note.save({ title: 'note', parent_id: folder.id, is_conflict: 0 });
|
||||
const note = await Note.save({ title: 'note', parent_id: folder.id, is_conflict: 0 });
|
||||
notes.push(note);
|
||||
}
|
||||
return notes;
|
||||
}
|
||||
|
||||
async function createNTestTags(n) {
|
||||
let tags = [];
|
||||
const tags = [];
|
||||
for (let i = 0; i < n; i++) {
|
||||
let tag = await Tag.save({ title: 'tag' });
|
||||
const tag = await Tag.save({ title: 'tag' });
|
||||
tags.push(tag);
|
||||
}
|
||||
return tags;
|
||||
|
@ -58,9 +58,9 @@ describe('integration_TagList', function() {
|
|||
// the tag list should be cleared if the next note has no tags
|
||||
it('should clear tag list when a note is deleted', asyncTest(async () => {
|
||||
// setup and select the note
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
let tags = await createNTestTags(3);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
const tags = await createNTestTags(3);
|
||||
|
||||
await Tag.addNote(tags[2].id, notes[2].id);
|
||||
|
||||
|
@ -96,9 +96,9 @@ describe('integration_TagList', function() {
|
|||
// the tag list should be updated if the next note has tags
|
||||
it('should update tag list when a note is deleted', asyncTest(async () => {
|
||||
// set up and select the note
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
let tags = await createNTestTags(3);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
const tags = await createNTestTags(3);
|
||||
|
||||
await Tag.addNote(tags[1].id, notes[1].id);
|
||||
await Tag.addNote(tags[0].id, notes[0].id);
|
||||
|
@ -130,8 +130,8 @@ describe('integration_TagList', function() {
|
|||
|
||||
// check the tag list is updated
|
||||
state = testApp.store().getState();
|
||||
let tagIds = state.selectedNoteTags.map(n => n.id).sort();
|
||||
let expectedTagIds = [tags[0].id, tags[2].id].sort();
|
||||
const tagIds = state.selectedNoteTags.map(n => n.id).sort();
|
||||
const expectedTagIds = [tags[0].id, tags[2].id].sort();
|
||||
expect(state.selectedNoteTags.length).toEqual(2);
|
||||
expect(tagIds).toEqual(expectedTagIds);
|
||||
}));
|
||||
|
|
|
@ -16,8 +16,8 @@ process.on('unhandledRejection', (reason, p) => {
|
|||
});
|
||||
|
||||
async function allItems() {
|
||||
let folders = await Folder.all();
|
||||
let notes = await Note.all();
|
||||
const folders = await Folder.all();
|
||||
const notes = await Note.all();
|
||||
return folders.concat(notes);
|
||||
}
|
||||
|
||||
|
@ -32,27 +32,27 @@ describe('models_BaseItem', function() {
|
|||
// This is to handle the case where a property is removed from a BaseItem table - in that case files in
|
||||
// the sync target will still have the old property but we don't need it locally.
|
||||
it('should ignore properties that are present in sync file but not in database when serialising', asyncTest(async () => {
|
||||
let folder = await Folder.save({ title: 'folder1' });
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
|
||||
let serialized = await Folder.serialize(folder);
|
||||
serialized += '\nignore_me: true';
|
||||
|
||||
let unserialized = await Folder.unserialize(serialized);
|
||||
const unserialized = await Folder.unserialize(serialized);
|
||||
|
||||
expect('ignore_me' in unserialized).toBe(false);
|
||||
}));
|
||||
|
||||
it('should not modify title when unserializing', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: '' });
|
||||
let folder2 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: '' });
|
||||
const folder2 = await Folder.save({ title: 'folder1' });
|
||||
|
||||
let serialized1 = await Folder.serialize(folder1);
|
||||
let unserialized1 = await Folder.unserialize(serialized1);
|
||||
const serialized1 = await Folder.serialize(folder1);
|
||||
const unserialized1 = await Folder.unserialize(serialized1);
|
||||
|
||||
expect(unserialized1.title).toBe(folder1.title);
|
||||
|
||||
let serialized2 = await Folder.serialize(folder2);
|
||||
let unserialized2 = await Folder.unserialize(serialized2);
|
||||
const serialized2 = await Folder.serialize(folder2);
|
||||
const unserialized2 = await Folder.unserialize(serialized2);
|
||||
|
||||
expect(unserialized2.title).toBe(folder2.title);
|
||||
}));
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
require('app-module-path').addPath(__dirname);
|
||||
|
||||
const { time } = require('lib/time-utils.js');
|
||||
const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
|
||||
const { createNTestNotes, asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
|
||||
const Folder = require('lib/models/Folder.js');
|
||||
const Note = require('lib/models/Note.js');
|
||||
const BaseModel = require('lib/BaseModel.js');
|
||||
|
@ -14,8 +14,8 @@ process.on('unhandledRejection', (reason, p) => {
|
|||
});
|
||||
|
||||
async function allItems() {
|
||||
let folders = await Folder.all();
|
||||
let notes = await Note.all();
|
||||
const folders = await Folder.all();
|
||||
const notes = await Note.all();
|
||||
return folders.concat(notes);
|
||||
}
|
||||
|
||||
|
@ -28,10 +28,10 @@ describe('models_Folder', function() {
|
|||
});
|
||||
|
||||
it('should tell if a notebook can be nested under another one', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'folder1' });
|
||||
let f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
let f3 = await Folder.save({ title: 'folder3', parent_id: f2.id });
|
||||
let f4 = await Folder.save({ title: 'folder4' });
|
||||
const f1 = await Folder.save({ title: 'folder1' });
|
||||
const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id });
|
||||
const f4 = await Folder.save({ title: 'folder4' });
|
||||
|
||||
expect(await Folder.canNestUnder(f1.id, f2.id)).toBe(false);
|
||||
expect(await Folder.canNestUnder(f2.id, f2.id)).toBe(false);
|
||||
|
@ -44,9 +44,16 @@ describe('models_Folder', function() {
|
|||
}));
|
||||
|
||||
it('should recursively delete notes and sub-notebooks', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'folder1' });
|
||||
let f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
let n1 = await Note.save({ title: 'note1', parent_id: f2.id });
|
||||
const f1 = await Folder.save({ title: 'folder1' });
|
||||
const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id });
|
||||
const f4 = await Folder.save({ title: 'folder4', parent_id: f1.id });
|
||||
|
||||
const noOfNotes = 20;
|
||||
await createNTestNotes(noOfNotes, f1, null, 'note1');
|
||||
await createNTestNotes(noOfNotes, f2, null, 'note2');
|
||||
await createNTestNotes(noOfNotes, f3, null, 'note3');
|
||||
await createNTestNotes(noOfNotes, f4, null, 'note4');
|
||||
|
||||
await Folder.delete(f1.id);
|
||||
|
||||
|
@ -57,10 +64,10 @@ describe('models_Folder', function() {
|
|||
it('should sort by last modified, based on content', asyncTest(async () => {
|
||||
let folders;
|
||||
|
||||
let f1 = await Folder.save({ title: 'folder1' }); await sleep(0.1);
|
||||
let f2 = await Folder.save({ title: 'folder2' }); await sleep(0.1);
|
||||
let f3 = await Folder.save({ title: 'folder3' }); await sleep(0.1);
|
||||
let n1 = await Note.save({ title: 'note1', parent_id: f2.id });
|
||||
const f1 = await Folder.save({ title: 'folder1' }); await sleep(0.1);
|
||||
const f2 = await Folder.save({ title: 'folder2' }); await sleep(0.1);
|
||||
const f3 = await Folder.save({ title: 'folder3' }); await sleep(0.1);
|
||||
const n1 = await Note.save({ title: 'note1', parent_id: f2.id });
|
||||
|
||||
folders = await Folder.orderByLastModified(await Folder.all(), 'desc');
|
||||
expect(folders.length).toBe(3);
|
||||
|
@ -68,7 +75,7 @@ describe('models_Folder', function() {
|
|||
expect(folders[1].id).toBe(f3.id);
|
||||
expect(folders[2].id).toBe(f1.id);
|
||||
|
||||
let n2 = await Note.save({ title: 'note1', parent_id: f1.id });
|
||||
const n2 = await Note.save({ title: 'note1', parent_id: f1.id });
|
||||
|
||||
folders = await Folder.orderByLastModified(await Folder.all(), 'desc');
|
||||
expect(folders[0].id).toBe(f1.id);
|
||||
|
@ -91,10 +98,10 @@ describe('models_Folder', function() {
|
|||
it('should sort by last modified, based on content (sub-folders too)', asyncTest(async () => {
|
||||
let folders;
|
||||
|
||||
let f1 = await Folder.save({ title: 'folder1' }); await sleep(0.1);
|
||||
let f2 = await Folder.save({ title: 'folder2' }); await sleep(0.1);
|
||||
let f3 = await Folder.save({ title: 'folder3', parent_id: f1.id }); await sleep(0.1);
|
||||
let n1 = await Note.save({ title: 'note1', parent_id: f3.id });
|
||||
const f1 = await Folder.save({ title: 'folder1' }); await sleep(0.1);
|
||||
const f2 = await Folder.save({ title: 'folder2' }); await sleep(0.1);
|
||||
const f3 = await Folder.save({ title: 'folder3', parent_id: f1.id }); await sleep(0.1);
|
||||
const n1 = await Note.save({ title: 'note1', parent_id: f3.id });
|
||||
|
||||
folders = await Folder.orderByLastModified(await Folder.all(), 'desc');
|
||||
expect(folders.length).toBe(3);
|
||||
|
@ -102,7 +109,7 @@ describe('models_Folder', function() {
|
|||
expect(folders[1].id).toBe(f3.id);
|
||||
expect(folders[2].id).toBe(f2.id);
|
||||
|
||||
let n2 = await Note.save({ title: 'note2', parent_id: f2.id });
|
||||
const n2 = await Note.save({ title: 'note2', parent_id: f2.id });
|
||||
folders = await Folder.orderByLastModified(await Folder.all(), 'desc');
|
||||
|
||||
expect(folders[0].id).toBe(f2.id);
|
||||
|
@ -116,8 +123,8 @@ describe('models_Folder', function() {
|
|||
expect(folders[1].id).toBe(f3.id);
|
||||
expect(folders[2].id).toBe(f2.id);
|
||||
|
||||
let f4 = await Folder.save({ title: 'folder4', parent_id: f1.id }); await sleep(0.1);
|
||||
let n3 = await Note.save({ title: 'note3', parent_id: f4.id });
|
||||
const f4 = await Folder.save({ title: 'folder4', parent_id: f1.id }); await sleep(0.1);
|
||||
const n3 = await Note.save({ title: 'note3', parent_id: f4.id });
|
||||
|
||||
folders = await Folder.orderByLastModified(await Folder.all(), 'desc');
|
||||
expect(folders.length).toBe(4);
|
||||
|
@ -128,14 +135,14 @@ describe('models_Folder', function() {
|
|||
}));
|
||||
|
||||
it('should add node counts', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'folder1' });
|
||||
let f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
let f3 = await Folder.save({ title: 'folder3', parent_id: f2.id });
|
||||
let f4 = await Folder.save({ title: 'folder4' });
|
||||
const f1 = await Folder.save({ title: 'folder1' });
|
||||
const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id });
|
||||
const f4 = await Folder.save({ title: 'folder4' });
|
||||
|
||||
let n1 = await Note.save({ title: 'note1', parent_id: f3.id });
|
||||
let n2 = await Note.save({ title: 'note1', parent_id: f3.id });
|
||||
let n3 = await Note.save({ title: 'note1', parent_id: f1.id });
|
||||
const n1 = await Note.save({ title: 'note1', parent_id: f3.id });
|
||||
const n2 = await Note.save({ title: 'note1', parent_id: f3.id });
|
||||
const n3 = await Note.save({ title: 'note1', parent_id: f1.id });
|
||||
|
||||
const folders = await Folder.all();
|
||||
await Folder.addNoteCounts(folders);
|
||||
|
@ -152,17 +159,17 @@ describe('models_Folder', function() {
|
|||
|
||||
it('should not count completed to-dos', asyncTest(async () => {
|
||||
|
||||
let f1 = await Folder.save({ title: 'folder1' });
|
||||
let f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
let f3 = await Folder.save({ title: 'folder3', parent_id: f2.id });
|
||||
let f4 = await Folder.save({ title: 'folder4' });
|
||||
const f1 = await Folder.save({ title: 'folder1' });
|
||||
const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id });
|
||||
const f4 = await Folder.save({ title: 'folder4' });
|
||||
|
||||
let n1 = await Note.save({ title: 'note1', parent_id: f3.id });
|
||||
let n2 = await Note.save({ title: 'note2', parent_id: f3.id });
|
||||
let n3 = await Note.save({ title: 'note3', parent_id: f1.id });
|
||||
let n4 = await Note.save({ title: 'note4', parent_id: f3.id, is_todo: true, todo_completed: 0 });
|
||||
let n5 = await Note.save({ title: 'note5', parent_id: f3.id, is_todo: true, todo_completed: 999 });
|
||||
let n6 = await Note.save({ title: 'note6', parent_id: f3.id, is_todo: true, todo_completed: 999 });
|
||||
const n1 = await Note.save({ title: 'note1', parent_id: f3.id });
|
||||
const n2 = await Note.save({ title: 'note2', parent_id: f3.id });
|
||||
const n3 = await Note.save({ title: 'note3', parent_id: f1.id });
|
||||
const n4 = await Note.save({ title: 'note4', parent_id: f3.id, is_todo: true, todo_completed: 0 });
|
||||
const n5 = await Note.save({ title: 'note5', parent_id: f3.id, is_todo: true, todo_completed: 999 });
|
||||
const n6 = await Note.save({ title: 'note6', parent_id: f3.id, is_todo: true, todo_completed: 999 });
|
||||
|
||||
const folders = await Folder.all();
|
||||
await Folder.addNoteCounts(folders, false);
|
||||
|
@ -177,4 +184,19 @@ describe('models_Folder', function() {
|
|||
expect(foldersById[f4.id].note_count).toBe(0);
|
||||
}));
|
||||
|
||||
it('should recursively find folder path', asyncTest(async () => {
|
||||
|
||||
const f1 = await Folder.save({ title: 'folder1' });
|
||||
const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id });
|
||||
|
||||
const folders = await Folder.all();
|
||||
const folderPath = await Folder.folderPath(folders, f3.id);
|
||||
|
||||
expect(folderPath.length).toBe(3);
|
||||
expect(folderPath[0].id).toBe(f1.id);
|
||||
expect(folderPath[1].id).toBe(f2.id);
|
||||
expect(folderPath[2].id).toBe(f3.id);
|
||||
}));
|
||||
|
||||
});
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
require('app-module-path').addPath(__dirname);
|
||||
|
||||
const { time } = require('lib/time-utils.js');
|
||||
const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
|
||||
const { sortedIds, createNTestNotes, asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
|
||||
const Folder = require('lib/models/Folder.js');
|
||||
const Note = require('lib/models/Note.js');
|
||||
const BaseModel = require('lib/BaseModel.js');
|
||||
|
@ -14,8 +14,13 @@ process.on('unhandledRejection', (reason, p) => {
|
|||
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
|
||||
});
|
||||
|
||||
describe('models_Note', function() {
|
||||
async function allItems() {
|
||||
const folders = await Folder.all();
|
||||
const notes = await Note.all();
|
||||
return folders.concat(notes);
|
||||
}
|
||||
|
||||
describe('models_Note', function() {
|
||||
beforeEach(async (done) => {
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
await switchClient(1);
|
||||
|
@ -23,8 +28,8 @@ describe('models_Note', function() {
|
|||
});
|
||||
|
||||
it('should find resource and note IDs', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'ma deuxième note', body: `Lien vers première note : ${Note.markdownTag(note1)}`, parent_id: folder1.id });
|
||||
|
||||
let items = await Note.linkedItems(note2.body);
|
||||
|
@ -69,7 +74,7 @@ describe('models_Note', function() {
|
|||
}));
|
||||
|
||||
it('should change the type of notes', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
note1 = await Note.load(note1.id);
|
||||
|
||||
|
@ -90,7 +95,7 @@ describe('models_Note', function() {
|
|||
}));
|
||||
|
||||
it('should serialize and unserialize without modifying data', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const testCases = [
|
||||
[{ title: '', body: 'Body and no title\nSecond line\nThird Line', parent_id: folder1.id },
|
||||
'', 'Body and no title\nSecond line\nThird Line'],
|
||||
|
@ -107,9 +112,9 @@ describe('models_Note', function() {
|
|||
const expectedTitle = t[1];
|
||||
const expectedBody = t[1];
|
||||
|
||||
let note1 = await Note.save(input);
|
||||
let serialized = await Note.serialize(note1);
|
||||
let unserialized = await Note.unserialize(serialized);
|
||||
const note1 = await Note.save(input);
|
||||
const serialized = await Note.serialize(note1);
|
||||
const unserialized = await Note.unserialize(serialized);
|
||||
|
||||
expect(unserialized.title).toBe(input.title);
|
||||
expect(unserialized.body).toBe(input.body);
|
||||
|
@ -117,10 +122,10 @@ describe('models_Note', function() {
|
|||
}));
|
||||
|
||||
it('should reset fields for a duplicate', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'note', parent_id: folder1.id });
|
||||
|
||||
let duplicatedNote = await Note.duplicate(note1.id);
|
||||
const duplicatedNote = await Note.duplicate(note1.id);
|
||||
|
||||
expect(duplicatedNote !== note1).toBe(true);
|
||||
expect(duplicatedNote.created_time !== note1.created_time).toBe(true);
|
||||
|
@ -129,4 +134,65 @@ describe('models_Note', function() {
|
|||
expect(duplicatedNote.user_updated_time !== note1.user_updated_time).toBe(true);
|
||||
}));
|
||||
|
||||
it('should delete a set of notes', asyncTest(async () => {
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const noOfNotes = 20;
|
||||
await createNTestNotes(noOfNotes, folder1);
|
||||
|
||||
const noteIds = await Folder.noteIds(folder1.id);
|
||||
await Note.batchDelete(noteIds);
|
||||
|
||||
const all = await allItems();
|
||||
expect(all.length).toBe(1);
|
||||
expect(all[0].id).toBe(folder1.id);
|
||||
}));
|
||||
|
||||
it('should delete only the selected notes', asyncTest(async () => {
|
||||
const f1 = await Folder.save({ title: 'folder1' });
|
||||
const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
|
||||
const noOfNotes = 20;
|
||||
await createNTestNotes(noOfNotes, f1, null, 'note1');
|
||||
await createNTestNotes(noOfNotes, f2, null, 'note1');
|
||||
|
||||
const allBeforeDelete = await allItems();
|
||||
|
||||
const notesInFolder1IDs = await Folder.noteIds(f1.id);
|
||||
const notesInFolder2IDs = await Folder.noteIds(f2.id);
|
||||
|
||||
const notesToRemoveFromFolder1 = notesInFolder1IDs.slice(0, 6);
|
||||
const notesToRemoveFromFolder2 = notesInFolder2IDs.slice(11, 14);
|
||||
|
||||
await Note.batchDelete(notesToRemoveFromFolder1);
|
||||
await Note.batchDelete(notesToRemoveFromFolder2);
|
||||
|
||||
const allAfterDelete = await allItems();
|
||||
|
||||
const expectedLength = allBeforeDelete.length - notesToRemoveFromFolder1.length - notesToRemoveFromFolder2.length;
|
||||
expect(allAfterDelete.length).toBe(expectedLength);
|
||||
|
||||
// Common elements between the to-be-deleted notes and the notes and folders remaining after the delete
|
||||
const intersection = [...notesToRemoveFromFolder1, ...notesToRemoveFromFolder2].filter(x => allAfterDelete.includes(x));
|
||||
// Should be empty
|
||||
expect(intersection.length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should delete nothing', asyncTest(async () => {
|
||||
const f1 = await Folder.save({ title: 'folder1' });
|
||||
const f2 = await Folder.save({ title: 'folder2', parent_id: f1.id });
|
||||
const f3 = await Folder.save({ title: 'folder3', parent_id: f2.id });
|
||||
const f4 = await Folder.save({ title: 'folder4', parent_id: f1.id });
|
||||
|
||||
const noOfNotes = 20;
|
||||
await createNTestNotes(noOfNotes, f1, null, 'note1');
|
||||
await createNTestNotes(noOfNotes, f2, null, 'note2');
|
||||
await createNTestNotes(noOfNotes, f3, null, 'note3');
|
||||
await createNTestNotes(noOfNotes, f4, null, 'note4');
|
||||
|
||||
const beforeDelete = await allItems();
|
||||
await Note.batchDelete([]);
|
||||
const afterDelete = await allItems();
|
||||
|
||||
expect(sortedIds(afterDelete)).toEqual(sortedIds(beforeDelete));
|
||||
}));
|
||||
});
|
||||
|
|
|
@ -27,30 +27,30 @@ describe('models_Resource', function() {
|
|||
});
|
||||
|
||||
it('should have a "done" fetch_status when created locally', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, testImagePath);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
let ls = await Resource.localState(resource1);
|
||||
const resource1 = (await Resource.all())[0];
|
||||
const ls = await Resource.localState(resource1);
|
||||
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_DONE);
|
||||
}));
|
||||
|
||||
it('should have a default local state', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, testImagePath);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
let ls = await Resource.localState(resource1);
|
||||
const resource1 = (await Resource.all())[0];
|
||||
const ls = await Resource.localState(resource1);
|
||||
expect(!ls.id).toBe(true);
|
||||
expect(ls.resource_id).toBe(resource1.id);
|
||||
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_DONE);
|
||||
}));
|
||||
|
||||
it('should save and delete local state', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, testImagePath);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
const resource1 = (await Resource.all())[0];
|
||||
await Resource.setLocalState(resource1, { fetch_status: Resource.FETCH_STATUS_IDLE });
|
||||
|
||||
let ls = await Resource.localState(resource1);
|
||||
|
@ -63,13 +63,13 @@ describe('models_Resource', function() {
|
|||
}));
|
||||
|
||||
it('should resize the resource if the image is below the required dimensions', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const previousMax = Resource.IMAGE_MAX_DIMENSION;
|
||||
Resource.IMAGE_MAX_DIMENSION = 5;
|
||||
await shim.attachFileToNote(note1, testImagePath);
|
||||
Resource.IMAGE_MAX_DIMENSION = previousMax;
|
||||
let resource1 = (await Resource.all())[0];
|
||||
const resource1 = (await Resource.all())[0];
|
||||
|
||||
const originalStat = await shim.fsDriver().stat(testImagePath);
|
||||
const newStat = await shim.fsDriver().stat(Resource.fullPath(resource1));
|
||||
|
@ -78,10 +78,10 @@ describe('models_Resource', function() {
|
|||
}));
|
||||
|
||||
it('should not resize the resource if the image is below the required dimensions', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, testImagePath);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
const resource1 = (await Resource.all())[0];
|
||||
|
||||
const originalStat = await shim.fsDriver().stat(testImagePath);
|
||||
const newStat = await shim.fsDriver().stat(Resource.fullPath(resource1));
|
||||
|
|
|
@ -92,7 +92,7 @@ describe('models_Revision', function() {
|
|||
- How to view a note history%0A%0AWhile all the apps
|
||||
+%C2%A0How does it work?%0A%0AAll the apps save a version of the modified notes every 10 minutes.
|
||||
%0A%0A# `,
|
||||
expected: [-(19+27+2), 17+67+4],
|
||||
expected: [-(19 + 27 + 2), 17 + 67 + 4],
|
||||
},
|
||||
];
|
||||
|
||||
|
|
|
@ -24,8 +24,8 @@ describe('models_Tag', function() {
|
|||
});
|
||||
|
||||
it('should add tags by title', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
|
||||
await Tag.setNoteTagsByTitles(note1.id, ['un', 'deux']);
|
||||
|
||||
|
@ -34,8 +34,8 @@ describe('models_Tag', function() {
|
|||
}));
|
||||
|
||||
it('should not allow renaming tag to existing tag names', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
|
||||
await Tag.setNoteTagsByTitles(note1.id, ['un', 'deux']);
|
||||
|
||||
|
@ -46,8 +46,8 @@ describe('models_Tag', function() {
|
|||
}));
|
||||
|
||||
it('should not return tags without notes', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await Tag.setNoteTagsByTitles(note1.id, ['un']);
|
||||
|
||||
let tags = await Tag.allWithNotes();
|
||||
|
@ -60,9 +60,9 @@ describe('models_Tag', function() {
|
|||
}));
|
||||
|
||||
it('should return tags with note counts', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'ma 2nd note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'ma 2nd note', parent_id: folder1.id });
|
||||
await Tag.setNoteTagsByTitles(note1.id, ['un']);
|
||||
await Tag.setNoteTagsByTitles(note2.id, ['un']);
|
||||
|
||||
|
@ -83,10 +83,10 @@ describe('models_Tag', function() {
|
|||
}));
|
||||
|
||||
it('should load individual tags with note count', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'ma 2nd note', parent_id: folder1.id });
|
||||
let tag = await Tag.save({ title: 'mytag' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'ma 2nd note', parent_id: folder1.id });
|
||||
const tag = await Tag.save({ title: 'mytag' });
|
||||
await Tag.addNote(tag.id, note1.id);
|
||||
|
||||
let tagWithCount = await Tag.loadWithCount(tag.id);
|
||||
|
@ -98,16 +98,16 @@ describe('models_Tag', function() {
|
|||
}));
|
||||
|
||||
it('should get common tags for set of notes', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let taga = await Tag.save({ title: 'mytaga' });
|
||||
let tagb = await Tag.save({ title: 'mytagb' });
|
||||
let tagc = await Tag.save({ title: 'mytagc' });
|
||||
let tagd = await Tag.save({ title: 'mytagd' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const taga = await Tag.save({ title: 'mytaga' });
|
||||
const tagb = await Tag.save({ title: 'mytagb' });
|
||||
const tagc = await Tag.save({ title: 'mytagc' });
|
||||
const tagd = await Tag.save({ title: 'mytagd' });
|
||||
|
||||
let note0 = await Note.save({ title: 'ma note 0', parent_id: folder1.id });
|
||||
let note1 = await Note.save({ title: 'ma note 1', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'ma note 2', parent_id: folder1.id });
|
||||
let note3 = await Note.save({ title: 'ma note 3', parent_id: folder1.id });
|
||||
const note0 = await Note.save({ title: 'ma note 0', parent_id: folder1.id });
|
||||
const note1 = await Note.save({ title: 'ma note 1', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'ma note 2', parent_id: folder1.id });
|
||||
const note3 = await Note.save({ title: 'ma note 3', parent_id: folder1.id });
|
||||
|
||||
await Tag.addNote(taga.id, note1.id);
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ const Note = require('lib/models/Note.js');
|
|||
const Tag = require('lib/models/Tag.js');
|
||||
const { reducer, defaultState, stateUtils } = require('lib/reducer.js');
|
||||
|
||||
function initTestState(folders, selectedFolderIndex, notes, selectedNoteIndexes, tags=null, selectedTagIndex=null) {
|
||||
function initTestState(folders, selectedFolderIndex, notes, selectedNoteIndexes, tags = null, selectedTagIndex = null) {
|
||||
let state = defaultState;
|
||||
|
||||
if (selectedFolderIndex != null) {
|
||||
|
@ -20,7 +20,7 @@ function initTestState(folders, selectedFolderIndex, notes, selectedNoteIndexes,
|
|||
state = reducer(state, { type: 'NOTE_UPDATE_ALL', notes: notes, noteSource: 'test' });
|
||||
}
|
||||
if (selectedNoteIndexes != null) {
|
||||
let selectedIds = [];
|
||||
const selectedIds = [];
|
||||
for (let i = 0; i < selectedNoteIndexes.length; i++) {
|
||||
selectedIds.push(notes[selectedNoteIndexes[i]].id);
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ function initTestState(folders, selectedFolderIndex, notes, selectedNoteIndexes,
|
|||
}
|
||||
|
||||
function createExpectedState(items, keepIndexes, selectedIndexes) {
|
||||
let expected = { items: [], selectedIds: [] };
|
||||
const expected = { items: [], selectedIds: [] };
|
||||
|
||||
for (let i = 0; i < selectedIndexes.length; i++) {
|
||||
expected.selectedIds.push(items[selectedIndexes[i]].id);
|
||||
|
@ -48,8 +48,8 @@ function createExpectedState(items, keepIndexes, selectedIndexes) {
|
|||
return expected;
|
||||
}
|
||||
|
||||
function getIds(items, indexes=null) {
|
||||
let ids = [];
|
||||
function getIds(items, indexes = null) {
|
||||
const ids = [];
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
if (indexes == null || i in indexes) {
|
||||
ids.push(items[i].id);
|
||||
|
@ -76,9 +76,9 @@ describe('Reducer', function() {
|
|||
// tests for NOTE_DELETE
|
||||
it('should delete selected note', asyncTest(async () => {
|
||||
// create 1 folder
|
||||
let folders = await createNTestFolders(1);
|
||||
const folders = await createNTestFolders(1);
|
||||
// create 5 notes
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
// select the 1st folder and the 3rd note
|
||||
let state = initTestState(folders, 0, notes, [2]);
|
||||
|
||||
|
@ -87,7 +87,7 @@ describe('Reducer', function() {
|
|||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[2].id });
|
||||
|
||||
// expect that the third note is missing, and the 4th note is now selected
|
||||
let expected = createExpectedState(notes, [0,1,3,4], [3]);
|
||||
const expected = createExpectedState(notes, [0,1,3,4], [3]);
|
||||
|
||||
// check the ids of all the remaining notes
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
|
@ -96,136 +96,136 @@ describe('Reducer', function() {
|
|||
}));
|
||||
|
||||
it('should delete selected note at top', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [1]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[0].id });
|
||||
|
||||
let expected = createExpectedState(notes, [1,2,3,4], [1]);
|
||||
const expected = createExpectedState(notes, [1,2,3,4], [1]);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
}));
|
||||
|
||||
it('should delete last remaining note', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(1, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(1, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [0]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[0].id });
|
||||
|
||||
let expected = createExpectedState(notes, [], []);
|
||||
const expected = createExpectedState(notes, [], []);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
}));
|
||||
|
||||
it('should delete selected note at bottom', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [4]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[4].id });
|
||||
|
||||
let expected = createExpectedState(notes, [0,1,2,3], [3]);
|
||||
const expected = createExpectedState(notes, [0,1,2,3], [3]);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
}));
|
||||
|
||||
it('should delete note when a note below is selected', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [3]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[1].id });
|
||||
|
||||
let expected = createExpectedState(notes, [0,2,3,4], [3]);
|
||||
const expected = createExpectedState(notes, [0,2,3,4], [3]);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
}));
|
||||
|
||||
it('should delete note when a note above is selected', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [1]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[3].id });
|
||||
|
||||
let expected = createExpectedState(notes, [0,1,2,4], [1]);
|
||||
const expected = createExpectedState(notes, [0,1,2,4], [1]);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
}));
|
||||
|
||||
it('should delete selected notes', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [1,2]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[1].id });
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[2].id });
|
||||
|
||||
let expected = createExpectedState(notes, [0,3,4], [3]);
|
||||
const expected = createExpectedState(notes, [0,3,4], [3]);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
}));
|
||||
|
||||
it('should delete note when a notes below it are selected', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [3,4]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[1].id });
|
||||
|
||||
let expected = createExpectedState(notes, [0,2,3,4], [3,4]);
|
||||
const expected = createExpectedState(notes, [0,2,3,4], [3,4]);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
}));
|
||||
|
||||
it('should delete note when a notes above it are selected', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [1,2]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[3].id });
|
||||
|
||||
let expected = createExpectedState(notes, [0,1,2,4], [1,2]);
|
||||
const expected = createExpectedState(notes, [0,1,2,4], [1,2]);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
}));
|
||||
|
||||
it('should delete notes at end', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [3,4]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[3].id });
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[4].id });
|
||||
|
||||
let expected = createExpectedState(notes, [0,1,2], [2]);
|
||||
const expected = createExpectedState(notes, [0,1,2], [2]);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
}));
|
||||
|
||||
it('should delete notes when non-contiguous selection', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(1);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(1);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 0, notes, [0,2,4]);
|
||||
|
||||
// test action
|
||||
|
@ -233,7 +233,7 @@ describe('Reducer', function() {
|
|||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[2].id });
|
||||
state = reducer(state, { type: 'NOTE_DELETE', id: notes[4].id });
|
||||
|
||||
let expected = createExpectedState(notes, [1,3], [1]);
|
||||
const expected = createExpectedState(notes, [1,3], [1]);
|
||||
|
||||
expect(getIds(state.notes)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedNoteIds).toEqual(expected.selectedIds);
|
||||
|
@ -241,42 +241,42 @@ describe('Reducer', function() {
|
|||
|
||||
// tests for FOLDER_DELETE
|
||||
it('should delete selected notebook', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(5);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(5);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 2, notes, [2]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'FOLDER_DELETE', id: folders[2].id });
|
||||
|
||||
let expected = createExpectedState(folders, [0,1,3,4], [3]);
|
||||
const expected = createExpectedState(folders, [0,1,3,4], [3]);
|
||||
|
||||
expect(getIds(state.folders)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedFolderId).toEqual(expected.selectedIds[0]);
|
||||
}));
|
||||
|
||||
it('should delete notebook when a book above is selected', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(5);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(5);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 1, notes, [2]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'FOLDER_DELETE', id: folders[2].id });
|
||||
|
||||
let expected = createExpectedState(folders, [0,1,3,4], [1]);
|
||||
const expected = createExpectedState(folders, [0,1,3,4], [1]);
|
||||
|
||||
expect(getIds(state.folders)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedFolderId).toEqual(expected.selectedIds[0]);
|
||||
}));
|
||||
|
||||
it('should delete notebook when a book below is selected', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(5);
|
||||
let notes = await createNTestNotes(5, folders[0]);
|
||||
const folders = await createNTestFolders(5);
|
||||
const notes = await createNTestNotes(5, folders[0]);
|
||||
let state = initTestState(folders, 4, notes, [2]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'FOLDER_DELETE', id: folders[2].id });
|
||||
|
||||
let expected = createExpectedState(folders, [0,1,3,4], [4]);
|
||||
const expected = createExpectedState(folders, [0,1,3,4], [4]);
|
||||
|
||||
expect(getIds(state.folders)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedFolderId).toEqual(expected.selectedIds[0]);
|
||||
|
@ -284,47 +284,47 @@ describe('Reducer', function() {
|
|||
|
||||
// tests for TAG_DELETE
|
||||
it('should delete selected tag', asyncTest(async () => {
|
||||
let tags = await createNTestTags(5);
|
||||
const tags = await createNTestTags(5);
|
||||
let state = initTestState(null, null, null, null, tags, [2]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'TAG_DELETE', id: tags[2].id });
|
||||
|
||||
let expected = createExpectedState(tags, [0,1,3,4], [3]);
|
||||
const expected = createExpectedState(tags, [0,1,3,4], [3]);
|
||||
|
||||
expect(getIds(state.tags)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedTagId).toEqual(expected.selectedIds[0]);
|
||||
}));
|
||||
|
||||
it('should delete tag when a tag above is selected', asyncTest(async () => {
|
||||
let tags = await createNTestTags(5);
|
||||
const tags = await createNTestTags(5);
|
||||
let state = initTestState(null, null, null, null, tags, [2]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'TAG_DELETE', id: tags[4].id });
|
||||
|
||||
let expected = createExpectedState(tags, [0,1,2,3], [2]);
|
||||
const expected = createExpectedState(tags, [0,1,2,3], [2]);
|
||||
|
||||
expect(getIds(state.tags)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedTagId).toEqual(expected.selectedIds[0]);
|
||||
}));
|
||||
|
||||
it('should delete tag when a tag below is selected', asyncTest(async () => {
|
||||
let tags = await createNTestTags(5);
|
||||
const tags = await createNTestTags(5);
|
||||
let state = initTestState(null, null, null, null, tags, [2]);
|
||||
|
||||
// test action
|
||||
state = reducer(state, { type: 'TAG_DELETE', id: tags[0].id });
|
||||
|
||||
let expected = createExpectedState(tags, [1,2,3,4], [2]);
|
||||
const expected = createExpectedState(tags, [1,2,3,4], [2]);
|
||||
|
||||
expect(getIds(state.tags)).toEqual(getIds(expected.items));
|
||||
expect(state.selectedTagId).toEqual(expected.selectedIds[0]);
|
||||
}));
|
||||
|
||||
it('should select all notes', asyncTest(async () => {
|
||||
let folders = await createNTestFolders(2);
|
||||
let notes = [];
|
||||
const folders = await createNTestFolders(2);
|
||||
const notes = [];
|
||||
for (let i = 0; i < folders.length; i++) {
|
||||
notes.push(...await createNTestNotes(3, folders[i]));
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ jasmine.DEFAULT_TIMEOUT_INTERVAL = 15000; // The first test is slow because the
|
|||
|
||||
let service = null;
|
||||
|
||||
describe('Encryption', function() {
|
||||
describe('services_EncryptionService', function() {
|
||||
|
||||
beforeEach(async (done) => {
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
|
@ -49,7 +49,6 @@ describe('Encryption', function() {
|
|||
|
||||
it('should generate and decrypt a master key', asyncTest(async () => {
|
||||
const masterKey = await service.generateMasterKey('123456');
|
||||
expect(!!masterKey.checksum).toBe(true);
|
||||
expect(!!masterKey.content).toBe(true);
|
||||
|
||||
let hasThrown = false;
|
||||
|
@ -65,6 +64,91 @@ describe('Encryption', function() {
|
|||
expect(decryptedMasterKey.length).toBe(512);
|
||||
}));
|
||||
|
||||
it('should upgrade a master key', asyncTest(async () => {
|
||||
// Create an old style master key
|
||||
let masterKey = await service.generateMasterKey('123456', {
|
||||
encryptionMethod: EncryptionService.METHOD_SJCL_2,
|
||||
});
|
||||
masterKey = await MasterKey.save(masterKey);
|
||||
|
||||
let upgradedMasterKey = await service.upgradeMasterKey(masterKey, '123456');
|
||||
upgradedMasterKey = await MasterKey.save(upgradedMasterKey);
|
||||
|
||||
// Check that master key has been upgraded (different ciphertext)
|
||||
expect(masterKey.content).not.toBe(upgradedMasterKey.content);
|
||||
|
||||
// Check that master key plain text is still the same
|
||||
const plainTextOld = await service.decryptMasterKey_(masterKey, '123456');
|
||||
const plainTextNew = await service.decryptMasterKey_(upgradedMasterKey, '123456');
|
||||
expect(plainTextOld.content).toBe(plainTextNew.content);
|
||||
|
||||
// Check that old content can be decrypted with new master key
|
||||
await service.loadMasterKey_(masterKey, '123456', true);
|
||||
const cipherText = await service.encryptString('some secret');
|
||||
const plainTextFromOld = await service.decryptString(cipherText);
|
||||
|
||||
await service.loadMasterKey_(upgradedMasterKey, '123456', true);
|
||||
const plainTextFromNew = await service.decryptString(cipherText);
|
||||
|
||||
expect(plainTextFromOld).toBe(plainTextFromNew);
|
||||
}));
|
||||
|
||||
it('should not upgrade master key if invalid password', asyncTest(async () => {
|
||||
const masterKey = await service.generateMasterKey('123456', {
|
||||
encryptionMethod: EncryptionService.METHOD_SJCL_2,
|
||||
});
|
||||
|
||||
const hasThrown = await checkThrowAsync(async () => await service.upgradeMasterKey(masterKey, '777'));
|
||||
}));
|
||||
|
||||
it('should require a checksum only for old master keys', asyncTest(async () => {
|
||||
const masterKey = await service.generateMasterKey('123456', {
|
||||
encryptionMethod: EncryptionService.METHOD_SJCL_2,
|
||||
});
|
||||
|
||||
expect(!!masterKey.checksum).toBe(true);
|
||||
expect(!!masterKey.content).toBe(true);
|
||||
}));
|
||||
|
||||
it('should not require a checksum for new master keys', asyncTest(async () => {
|
||||
const masterKey = await service.generateMasterKey('123456', {
|
||||
encryptionMethod: EncryptionService.METHOD_SJCL_4,
|
||||
});
|
||||
|
||||
expect(!masterKey.checksum).toBe(true);
|
||||
expect(!!masterKey.content).toBe(true);
|
||||
|
||||
const decryptedMasterKey = await service.decryptMasterKey_(masterKey, '123456');
|
||||
expect(decryptedMasterKey.length).toBe(512);
|
||||
}));
|
||||
|
||||
it('should throw an error if master key decryption fails', asyncTest(async () => {
|
||||
const masterKey = await service.generateMasterKey('123456', {
|
||||
encryptionMethod: EncryptionService.METHOD_SJCL_4,
|
||||
});
|
||||
|
||||
const hasThrown = await checkThrowAsync(async () => await service.decryptMasterKey_(masterKey, 'wrong'));
|
||||
|
||||
expect(hasThrown).toBe(true);
|
||||
}));
|
||||
|
||||
it('should return the master keys that need an upgrade', asyncTest(async () => {
|
||||
const masterKey1 = await MasterKey.save(await service.generateMasterKey('123456', {
|
||||
encryptionMethod: EncryptionService.METHOD_SJCL_2,
|
||||
}));
|
||||
|
||||
const masterKey2 = await MasterKey.save(await service.generateMasterKey('123456', {
|
||||
encryptionMethod: EncryptionService.METHOD_SJCL,
|
||||
}));
|
||||
|
||||
const masterKey3 = await MasterKey.save(await service.generateMasterKey('123456'));
|
||||
|
||||
const needUpgrade = service.masterKeysThatNeedUpgrading(await MasterKey.all());
|
||||
|
||||
expect(needUpgrade.length).toBe(2);
|
||||
expect(needUpgrade.map(k => k.id).sort()).toEqual([masterKey1.id, masterKey2.id].sort());
|
||||
}));
|
||||
|
||||
it('should encrypt and decrypt with a master key', asyncTest(async () => {
|
||||
let masterKey = await service.generateMasterKey('123456');
|
||||
masterKey = await MasterKey.save(masterKey);
|
||||
|
@ -123,7 +207,7 @@ describe('Encryption', function() {
|
|||
|
||||
await service.unloadMasterKey(masterKey);
|
||||
|
||||
let hasThrown = await checkThrowAsync(async () => await service.decryptString(cipherText));
|
||||
const hasThrown = await checkThrowAsync(async () => await service.decryptString(cipherText));
|
||||
|
||||
expect(hasThrown).toBe(true);
|
||||
}));
|
||||
|
@ -138,7 +222,7 @@ describe('Encryption', function() {
|
|||
let cipherText = await service.encryptString('some secret');
|
||||
cipherText += 'ABCDEFGHIJ';
|
||||
|
||||
let hasThrown = await checkThrowAsync(async () => await service.decryptString(cipherText));
|
||||
const hasThrown = await checkThrowAsync(async () => await service.decryptString(cipherText));
|
||||
|
||||
expect(hasThrown).toBe(true);
|
||||
}));
|
||||
|
@ -148,10 +232,10 @@ describe('Encryption', function() {
|
|||
masterKey = await MasterKey.save(masterKey);
|
||||
await service.loadMasterKey_(masterKey, '123456', true);
|
||||
|
||||
let folder = await Folder.save({ title: 'folder' });
|
||||
let note = await Note.save({ title: 'encrypted note', body: 'something', parent_id: folder.id });
|
||||
let serialized = await Note.serializeForSync(note);
|
||||
let deserialized = Note.filter(await Note.unserialize(serialized));
|
||||
const folder = await Folder.save({ title: 'folder' });
|
||||
const note = await Note.save({ title: 'encrypted note', body: 'something', parent_id: folder.id });
|
||||
const serialized = await Note.serializeForSync(note);
|
||||
const deserialized = Note.filter(await Note.unserialize(serialized));
|
||||
|
||||
// Check that required properties are not encrypted
|
||||
expect(deserialized.id).toBe(note.id);
|
|
@ -59,7 +59,7 @@ describe('services_InteropService', function() {
|
|||
// Check that a new folder, with a new ID, has been created
|
||||
|
||||
expect(await Folder.count()).toBe(1);
|
||||
let folder2 = (await Folder.all())[0];
|
||||
const folder2 = (await Folder.all())[0];
|
||||
expect(folder2.id).not.toBe(folder1.id);
|
||||
expect(folder2.title).toBe(folder1.title);
|
||||
|
||||
|
@ -68,7 +68,7 @@ describe('services_InteropService', function() {
|
|||
// As there was already a folder with the same title, check that the new one has been renamed
|
||||
|
||||
await Folder.delete(folder2.id);
|
||||
let folder3 = (await Folder.all())[0];
|
||||
const folder3 = (await Folder.all())[0];
|
||||
expect(await Folder.count()).toBe(1);
|
||||
expect(folder3.title).not.toBe(folder2.title);
|
||||
|
||||
|
@ -81,7 +81,7 @@ describe('services_InteropService', function() {
|
|||
|
||||
it('should export and import folders and notes', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
note1 = await Note.load(note1.id);
|
||||
const filePath = `${exportDir()}/test.jex`;
|
||||
|
@ -95,7 +95,7 @@ describe('services_InteropService', function() {
|
|||
|
||||
expect(await Note.count()).toBe(1);
|
||||
let note2 = (await Note.all())[0];
|
||||
let folder2 = (await Folder.all())[0];
|
||||
const folder2 = (await Folder.all())[0];
|
||||
|
||||
expect(note1.parent_id).not.toBe(note2.parent_id);
|
||||
expect(note1.id).not.toBe(note2.id);
|
||||
|
@ -110,7 +110,7 @@ describe('services_InteropService', function() {
|
|||
await service.import({ path: filePath });
|
||||
|
||||
note2 = (await Note.all())[0];
|
||||
let note3 = (await Note.all())[1];
|
||||
const note3 = (await Note.all())[1];
|
||||
|
||||
expect(note2.id).not.toBe(note3.id);
|
||||
expect(note2.parent_id).not.toBe(note3.parent_id);
|
||||
|
@ -120,7 +120,7 @@ describe('services_InteropService', function() {
|
|||
|
||||
it('should export and import notes to specific folder', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
note1 = await Note.load(note1.id);
|
||||
const filePath = `${exportDir()}/test.jex`;
|
||||
|
@ -140,8 +140,8 @@ describe('services_InteropService', function() {
|
|||
it('should export and import tags', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
const filePath = `${exportDir()}/test.jex`;
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
let tag1 = await Tag.save({ title: 'mon tag' });
|
||||
tag1 = await Tag.load(tag1.id);
|
||||
await Tag.addNote(tag1.id, note1.id);
|
||||
|
@ -155,8 +155,8 @@ describe('services_InteropService', function() {
|
|||
await service.import({ path: filePath });
|
||||
|
||||
expect(await Tag.count()).toBe(1);
|
||||
let tag2 = (await Tag.all())[0];
|
||||
let note2 = (await Note.all())[0];
|
||||
const tag2 = (await Tag.all())[0];
|
||||
const note2 = (await Note.all())[0];
|
||||
expect(tag1.id).not.toBe(tag2.id);
|
||||
|
||||
let fieldNames = Note.fieldNames();
|
||||
|
@ -180,12 +180,12 @@ describe('services_InteropService', function() {
|
|||
it('should export and import resources', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
const filePath = `${exportDir()}/test.jex`;
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
note1 = await Note.load(note1.id);
|
||||
let resourceIds = await Note.linkedResourceIds(note1.body);
|
||||
let resource1 = await Resource.load(resourceIds[0]);
|
||||
const resource1 = await Resource.load(resourceIds[0]);
|
||||
|
||||
await service.export({ path: filePath });
|
||||
|
||||
|
@ -195,11 +195,11 @@ describe('services_InteropService', function() {
|
|||
|
||||
expect(await Resource.count()).toBe(2);
|
||||
|
||||
let note2 = (await Note.all())[0];
|
||||
const note2 = (await Note.all())[0];
|
||||
expect(note2.body).not.toBe(note1.body);
|
||||
resourceIds = await Note.linkedResourceIds(note2.body);
|
||||
expect(resourceIds.length).toBe(1);
|
||||
let resource2 = await Resource.load(resourceIds[0]);
|
||||
const resource2 = await Resource.load(resourceIds[0]);
|
||||
expect(resource2.id).not.toBe(resource1.id);
|
||||
|
||||
let fieldNames = Note.fieldNames();
|
||||
|
@ -216,8 +216,8 @@ describe('services_InteropService', function() {
|
|||
it('should export and import single notes', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
const filePath = `${exportDir()}/test.jex`;
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
|
||||
await service.export({ path: filePath, sourceNoteIds: [note1.id] });
|
||||
|
||||
|
@ -229,15 +229,15 @@ describe('services_InteropService', function() {
|
|||
expect(await Note.count()).toBe(1);
|
||||
expect(await Folder.count()).toBe(1);
|
||||
|
||||
let folder2 = (await Folder.all())[0];
|
||||
const folder2 = (await Folder.all())[0];
|
||||
expect(folder2.title).toBe('test');
|
||||
}));
|
||||
|
||||
it('should export and import single folders', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
const filePath = `${exportDir()}/test.jex`;
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
|
||||
await service.export({ path: filePath, sourceFolderIds: [folder1.id] });
|
||||
|
||||
|
@ -249,7 +249,7 @@ describe('services_InteropService', function() {
|
|||
expect(await Note.count()).toBe(1);
|
||||
expect(await Folder.count()).toBe(1);
|
||||
|
||||
let folder2 = (await Folder.all())[0];
|
||||
const folder2 = (await Folder.all())[0];
|
||||
expect(folder2.title).toBe('folder1');
|
||||
}));
|
||||
|
||||
|
@ -257,11 +257,11 @@ describe('services_InteropService', function() {
|
|||
|
||||
const service = new InteropService();
|
||||
const filePath = `${exportDir()}/test.jex`;
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
let folder3 = await Folder.save({ title: 'folder3', parent_id: folder2.id });
|
||||
let folder4 = await Folder.save({ title: 'folder4', parent_id: folder2.id });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder4.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
const folder3 = await Folder.save({ title: 'folder3', parent_id: folder2.id });
|
||||
const folder4 = await Folder.save({ title: 'folder4', parent_id: folder2.id });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder4.id });
|
||||
|
||||
await service.export({ path: filePath, sourceFolderIds: [folder1.id] });
|
||||
|
||||
|
@ -276,11 +276,11 @@ describe('services_InteropService', function() {
|
|||
expect(await Note.count()).toBe(1);
|
||||
expect(await Folder.count()).toBe(4);
|
||||
|
||||
let folder1_2 = await Folder.loadByTitle('folder1');
|
||||
let folder2_2 = await Folder.loadByTitle('folder2');
|
||||
let folder3_2 = await Folder.loadByTitle('folder3');
|
||||
let folder4_2 = await Folder.loadByTitle('folder4');
|
||||
let note1_2 = await Note.loadByTitle('ma note');
|
||||
const folder1_2 = await Folder.loadByTitle('folder1');
|
||||
const folder2_2 = await Folder.loadByTitle('folder2');
|
||||
const folder3_2 = await Folder.loadByTitle('folder3');
|
||||
const folder4_2 = await Folder.loadByTitle('folder4');
|
||||
const note1_2 = await Note.loadByTitle('ma note');
|
||||
|
||||
expect(folder2_2.parent_id).toBe(folder1_2.id);
|
||||
expect(folder3_2.parent_id).toBe(folder2_2.id);
|
||||
|
@ -291,9 +291,9 @@ describe('services_InteropService', function() {
|
|||
it('should export and import links to notes', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
const filePath = `${exportDir()}/test.jex`;
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'ma deuxième note', body: `Lien vers première note : ${Note.markdownTag(note1)}`, parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'ma deuxième note', body: `Lien vers première note : ${Note.markdownTag(note1)}`, parent_id: folder1.id });
|
||||
|
||||
await service.export({ path: filePath, sourceFolderIds: [folder1.id] });
|
||||
|
||||
|
@ -306,15 +306,15 @@ describe('services_InteropService', function() {
|
|||
expect(await Note.count()).toBe(2);
|
||||
expect(await Folder.count()).toBe(1);
|
||||
|
||||
let note1_2 = await Note.loadByTitle('ma note');
|
||||
let note2_2 = await Note.loadByTitle('ma deuxième note');
|
||||
const note1_2 = await Note.loadByTitle('ma note');
|
||||
const note2_2 = await Note.loadByTitle('ma deuxième note');
|
||||
|
||||
expect(note2_2.body.indexOf(note1_2.id) >= 0).toBe(true);
|
||||
}));
|
||||
|
||||
it('should export into json format', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
note1 = await Note.load(note1.id);
|
||||
const filePath = exportDir();
|
||||
|
@ -325,8 +325,8 @@ describe('services_InteropService', function() {
|
|||
const items = [folder1, note1];
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const jsonFile = `${filePath}/${items[i].id}.json`;
|
||||
let json = await fs.readFile(jsonFile, 'utf-8');
|
||||
let obj = JSON.parse(json);
|
||||
const json = await fs.readFile(jsonFile, 'utf-8');
|
||||
const obj = JSON.parse(json);
|
||||
expect(obj.id).toBe(items[i].id);
|
||||
expect(obj.type_).toBe(items[i].type_);
|
||||
expect(obj.title).toBe(items[i].title);
|
||||
|
@ -336,7 +336,7 @@ describe('services_InteropService', function() {
|
|||
|
||||
it('should export selected notes in md format', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note11 = await Note.save({ title: 'title note11', parent_id: folder1.id });
|
||||
note11 = await Note.load(note11.id);
|
||||
let note12 = await Note.save({ title: 'title note12', parent_id: folder1.id });
|
||||
|
@ -365,15 +365,15 @@ describe('services_InteropService', function() {
|
|||
|
||||
it('should export MD with unicode filenames', asyncTest(async () => {
|
||||
const service = new InteropService();
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let folder2 = await Folder.save({ title: 'ジョプリン' });
|
||||
let note1 = await Note.save({ title: '生活', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: '生活', parent_id: folder1.id });
|
||||
let note2b = await Note.save({ title: '生活', parent_id: folder1.id });
|
||||
let note3 = await Note.save({ title: '', parent_id: folder1.id });
|
||||
let note4 = await Note.save({ title: '', parent_id: folder1.id });
|
||||
let note5 = await Note.save({ title: 'salut, ça roule ?', parent_id: folder1.id });
|
||||
let note6 = await Note.save({ title: 'ジョプリン', parent_id: folder2.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'ジョプリン' });
|
||||
const note1 = await Note.save({ title: '生活', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: '生活', parent_id: folder1.id });
|
||||
const note2b = await Note.save({ title: '生活', parent_id: folder1.id });
|
||||
const note3 = await Note.save({ title: '', parent_id: folder1.id });
|
||||
const note4 = await Note.save({ title: '', parent_id: folder1.id });
|
||||
const note5 = await Note.save({ title: 'salut, ça roule ?', parent_id: folder1.id });
|
||||
const note6 = await Note.save({ title: 'ジョプリン', parent_id: folder2.id });
|
||||
|
||||
const outDir = exportDir();
|
||||
|
||||
|
|
|
@ -49,9 +49,9 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
});
|
||||
};
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'note2', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'note2', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
note1 = await Note.load(note1.id);
|
||||
queueExportItem(BaseModel.TYPE_FOLDER, folder1.id);
|
||||
|
@ -59,7 +59,7 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
queueExportItem(BaseModel.TYPE_NOTE, note2);
|
||||
queueExportItem(BaseModel.TYPE_RESOURCE, (await Note.linkedResourceIds(note1.body))[0]);
|
||||
|
||||
let folder2 = await Folder.save({ title: 'folder2' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
let note3 = await Note.save({ title: 'note3', parent_id: folder2.id });
|
||||
await shim.attachFileToNote(note3, `${__dirname}/../tests/support/photo.jpg`);
|
||||
note3 = await Note.load(note3.id);
|
||||
|
@ -91,9 +91,9 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
});
|
||||
};
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
let note1_2 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
const note1_2 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
queueExportItem(BaseModel.TYPE_FOLDER, folder1.id);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note1);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note1_2);
|
||||
|
@ -118,8 +118,8 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
});
|
||||
};
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
queueExportItem(BaseModel.TYPE_FOLDER, folder1.id);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note1);
|
||||
|
||||
|
@ -145,23 +145,23 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
});
|
||||
};
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
note1 = await Note.load(note1.id);
|
||||
queueExportItem(BaseModel.TYPE_FOLDER, folder1.id);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note1);
|
||||
queueExportItem(BaseModel.TYPE_RESOURCE, (await Note.linkedResourceIds(note1.body))[0]);
|
||||
let resource1 = await Resource.load(itemsToExport[2].itemOrId);
|
||||
const resource1 = await Resource.load(itemsToExport[2].itemOrId);
|
||||
|
||||
let folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
const folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'note2', parent_id: folder2.id });
|
||||
await shim.attachFileToNote(note2, `${__dirname}/../tests/support/photo.jpg`);
|
||||
note2 = await Note.load(note2.id);
|
||||
queueExportItem(BaseModel.TYPE_FOLDER, folder2.id);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note2);
|
||||
queueExportItem(BaseModel.TYPE_RESOURCE, (await Note.linkedResourceIds(note2.body))[0]);
|
||||
let resource2 = await Resource.load(itemsToExport[5].itemOrId);
|
||||
const resource2 = await Resource.load(itemsToExport[5].itemOrId);
|
||||
|
||||
await exporter.processResource(resource1, Resource.fullPath(resource1));
|
||||
await exporter.processResource(resource2, Resource.fullPath(resource2));
|
||||
|
@ -182,13 +182,13 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
});
|
||||
};
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
|
||||
let folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'note2', parent_id: folder2.id });
|
||||
const folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'note2', parent_id: folder2.id });
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note2);
|
||||
|
||||
let folder3 = await Folder.save({ title: 'folder3', parent_id: folder1.id });
|
||||
const folder3 = await Folder.save({ title: 'folder3', parent_id: folder1.id });
|
||||
queueExportItem(BaseModel.TYPE_FOLDER, folder3.id);
|
||||
|
||||
await exporter.processItem(Folder, folder2);
|
||||
|
@ -213,18 +213,18 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
});
|
||||
};
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
queueExportItem(BaseModel.TYPE_FOLDER, folder1.id);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note1);
|
||||
|
||||
let folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'note2', parent_id: folder2.id });
|
||||
const folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'note2', parent_id: folder2.id });
|
||||
queueExportItem(BaseModel.TYPE_FOLDER, folder2.id);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note2);
|
||||
|
||||
let folder3 = await Folder.save({ title: 'folder3' });
|
||||
let note3 = await Note.save({ title: 'note3', parent_id: folder3.id });
|
||||
const folder3 = await Folder.save({ title: 'folder3' });
|
||||
const note3 = await Note.save({ title: 'note3', parent_id: folder3.id });
|
||||
queueExportItem(BaseModel.TYPE_FOLDER, folder3.id);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note3);
|
||||
|
||||
|
@ -250,24 +250,24 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
});
|
||||
};
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
note1 = await Note.load(note1.id);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note1);
|
||||
let resource1 = await Resource.load((await Note.linkedResourceIds(note1.body))[0]);
|
||||
const resource1 = await Resource.load((await Note.linkedResourceIds(note1.body))[0]);
|
||||
|
||||
let folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
const folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'note2', parent_id: folder2.id });
|
||||
await shim.attachFileToNote(note2, `${__dirname}/../tests/support/photo.jpg`);
|
||||
note2 = await Note.load(note2.id);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note2);
|
||||
let resource2 = await Resource.load((await Note.linkedResourceIds(note2.body))[0]);
|
||||
const resource2 = await Resource.load((await Note.linkedResourceIds(note2.body))[0]);
|
||||
|
||||
await exporter.processItem(Folder, folder1);
|
||||
await exporter.processItem(Folder, folder2);
|
||||
await exporter.prepareForProcessingItemType(BaseModel.TYPE_NOTE, itemsToExport);
|
||||
let context = {
|
||||
const context = {
|
||||
resourcePaths: {},
|
||||
};
|
||||
context.resourcePaths[resource1.id] = 'resource1.jpg';
|
||||
|
@ -276,8 +276,8 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
await exporter.processItem(Note, note1);
|
||||
await exporter.processItem(Note, note2);
|
||||
|
||||
let note1_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note1.id]}`);
|
||||
let note2_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note2.id]}`);
|
||||
const note1_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note1.id]}`);
|
||||
const note2_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note2.id]}`);
|
||||
|
||||
expect(note1_body).toContain('](../_resources/resource1.jpg)', 'Resource id should be replaced with a relative path.');
|
||||
expect(note2_body).toContain('](../../_resources/resource2.jpg)', 'Resource id should be replaced with a relative path.');
|
||||
|
@ -301,13 +301,13 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
return await Note.load(note.id);
|
||||
};
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
|
||||
let folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
const folder2 = await Folder.save({ title: 'folder2', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'note2', parent_id: folder2.id });
|
||||
|
||||
let folder3 = await Folder.save({ title: 'folder3' });
|
||||
const folder3 = await Folder.save({ title: 'folder3' });
|
||||
let note3 = await Note.save({ title: 'note3', parent_id: folder3.id });
|
||||
|
||||
note1 = await changeNoteBodyAndReload(note1, `# Some text \n\n [A link to note3](:/${note3.id})`);
|
||||
|
@ -325,9 +325,9 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
await exporter.processItem(Note, note2);
|
||||
await exporter.processItem(Note, note3);
|
||||
|
||||
let note1_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note1.id]}`);
|
||||
let note2_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note2.id]}`);
|
||||
let note3_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note3.id]}`);
|
||||
const note1_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note1.id]}`);
|
||||
const note2_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note2.id]}`);
|
||||
const note3_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note3.id]}`);
|
||||
|
||||
expect(note1_body).toContain('](../folder3/note3.md)', 'Note id should be replaced with a relative path.');
|
||||
expect(note2_body).toContain('](../../folder3/note3.md)', 'Resource id should be replaced with a relative path.');
|
||||
|
@ -347,9 +347,9 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
});
|
||||
};
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder with space1' });
|
||||
let note1 = await Note.save({ title: 'note1 name with space', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'note2', parent_id: folder1.id, body: `[link](:/${note1.id})` });
|
||||
const folder1 = await Folder.save({ title: 'folder with space1' });
|
||||
const note1 = await Note.save({ title: 'note1 name with space', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'note2', parent_id: folder1.id, body: `[link](:/${note1.id})` });
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note1);
|
||||
queueExportItem(BaseModel.TYPE_NOTE, note2);
|
||||
|
||||
|
@ -358,7 +358,7 @@ describe('services_InteropService_Exporter_Md', function() {
|
|||
await exporter.processItem(Note, note1);
|
||||
await exporter.processItem(Note, note2);
|
||||
|
||||
let note2_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note2.id]}`);
|
||||
const note2_body = await shim.fsDriver().readFile(`${exportDir}/${exporter.context().notePaths[note2.id]}`);
|
||||
expect(note2_body).toContain('[link](../folder%20with%20space1/note1%20name%20with%20space.md)', 'Whitespace in URL should be encoded');
|
||||
}));
|
||||
});
|
||||
|
|
|
@ -48,10 +48,10 @@ describe('services_ResourceService', function() {
|
|||
it('should delete orphaned resources', asyncTest(async () => {
|
||||
const service = new ResourceService();
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
note1 = await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
const resource1 = (await Resource.all())[0];
|
||||
const resourcePath = Resource.fullPath(resource1);
|
||||
|
||||
await service.indexNoteResources();
|
||||
|
@ -79,11 +79,11 @@ describe('services_ResourceService', function() {
|
|||
it('should not delete resource if still associated with at least one note', asyncTest(async () => {
|
||||
const service = new ResourceService();
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'ma deuxième note', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'ma deuxième note', parent_id: folder1.id });
|
||||
note1 = await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
const resource1 = (await Resource.all())[0];
|
||||
|
||||
await service.indexNoteResources();
|
||||
|
||||
|
@ -113,10 +113,10 @@ describe('services_ResourceService', function() {
|
|||
it('should not delete resource if it is used in an IMG tag', asyncTest(async () => {
|
||||
const service = new ResourceService();
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
note1 = await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
const resource1 = (await Resource.all())[0];
|
||||
|
||||
await service.indexNoteResources();
|
||||
|
||||
|
@ -132,10 +132,10 @@ describe('services_ResourceService', function() {
|
|||
it('should not process twice the same change', asyncTest(async () => {
|
||||
const service = new ResourceService();
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
note1 = await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
const resource1 = (await Resource.all())[0];
|
||||
|
||||
await service.indexNoteResources();
|
||||
|
||||
|
@ -169,8 +169,8 @@ describe('services_ResourceService', function() {
|
|||
const masterKey = await loadEncryptionMasterKey();
|
||||
await encryptionService().enableEncryption(masterKey, '123456');
|
||||
await encryptionService().loadMasterKeysFromSettings();
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`); // R1
|
||||
await resourceService().indexNoteResources();
|
||||
await synchronizer().start();
|
||||
|
@ -199,7 +199,7 @@ describe('services_ResourceService', function() {
|
|||
it('should double-check if the resource is still linked before deleting it', asyncTest(async () => {
|
||||
SearchEngine.instance().setDb(db()); // /!\ Note that we use the global search engine here, which we shouldn't but will work for now
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
note1 = await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
await resourceService().indexNoteResources();
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
/* eslint-disable no-unused-vars */
|
||||
/* eslint prefer-const: 0*/
|
||||
|
||||
require('app-module-path').addPath(__dirname);
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ const Folder = require('lib/models/Folder');
|
|||
const Resource = require('lib/models/Resource');
|
||||
const Note = require('lib/models/Note');
|
||||
const Tag = require('lib/models/Tag');
|
||||
const NoteTag = require('lib/models/NoteTag');
|
||||
const { shim } = require('lib/shim');
|
||||
|
||||
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
|
||||
|
@ -36,26 +37,26 @@ describe('services_rest_Api', function() {
|
|||
}));
|
||||
|
||||
it('should get folders', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'mon carnet' });
|
||||
const f1 = await Folder.save({ title: 'mon carnet' });
|
||||
const response = await api.route('GET', 'folders');
|
||||
expect(response.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('should update folders', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'mon carnet' });
|
||||
const f1 = await Folder.save({ title: 'mon carnet' });
|
||||
const response = await api.route('PUT', `folders/${f1.id}`, null, JSON.stringify({
|
||||
title: 'modifié',
|
||||
}));
|
||||
|
||||
let f1b = await Folder.load(f1.id);
|
||||
const f1b = await Folder.load(f1.id);
|
||||
expect(f1b.title).toBe('modifié');
|
||||
}));
|
||||
|
||||
it('should delete folders', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'mon carnet' });
|
||||
const f1 = await Folder.save({ title: 'mon carnet' });
|
||||
await api.route('DELETE', `folders/${f1.id}`);
|
||||
|
||||
let f1b = await Folder.load(f1.id);
|
||||
const f1b = await Folder.load(f1.id);
|
||||
expect(!f1b).toBe(true);
|
||||
}));
|
||||
|
||||
|
@ -66,13 +67,13 @@ describe('services_rest_Api', function() {
|
|||
|
||||
expect(!!response.id).toBe(true);
|
||||
|
||||
let f = await Folder.all();
|
||||
const f = await Folder.all();
|
||||
expect(f.length).toBe(1);
|
||||
expect(f[0].title).toBe('from api');
|
||||
}));
|
||||
|
||||
it('should get one folder', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'mon carnet' });
|
||||
const f1 = await Folder.save({ title: 'mon carnet' });
|
||||
const response = await api.route('GET', `folders/${f1.id}`);
|
||||
expect(response.id).toBe(f1.id);
|
||||
|
||||
|
@ -81,7 +82,7 @@ describe('services_rest_Api', function() {
|
|||
}));
|
||||
|
||||
it('should get the folder notes', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'mon carnet' });
|
||||
const f1 = await Folder.save({ title: 'mon carnet' });
|
||||
const response2 = await api.route('GET', `folders/${f1.id}/notes`);
|
||||
expect(response2.length).toBe(0);
|
||||
|
||||
|
@ -326,4 +327,84 @@ describe('services_rest_Api', function() {
|
|||
expect(response3.length).toBe(2);
|
||||
}));
|
||||
|
||||
it('should update tags when updating notes', asyncTest(async () => {
|
||||
const tag1 = await Tag.save({ title: 'mon étiquette 1' });
|
||||
const tag2 = await Tag.save({ title: 'mon étiquette 2' });
|
||||
const tag3 = await Tag.save({ title: 'mon étiquette 3' });
|
||||
|
||||
const note = await Note.save({
|
||||
title: 'ma note un',
|
||||
});
|
||||
Tag.addNote(tag1.id, note.id);
|
||||
Tag.addNote(tag2.id, note.id);
|
||||
|
||||
const response = await api.route('PUT', `notes/${note.id}`, null, JSON.stringify({
|
||||
tags: `${tag1.title},${tag3.title}`,
|
||||
}));
|
||||
const tagIds = await NoteTag.tagIdsByNoteId(note.id);
|
||||
expect(response.tags === `${tag1.title},${tag3.title}`).toBe(true);
|
||||
expect(tagIds.length === 2).toBe(true);
|
||||
expect(tagIds.includes(tag1.id)).toBe(true);
|
||||
expect(tagIds.includes(tag3.id)).toBe(true);
|
||||
}));
|
||||
|
||||
it('should create and update tags when updating notes', asyncTest(async () => {
|
||||
const tag1 = await Tag.save({ title: 'mon étiquette 1' });
|
||||
const tag2 = await Tag.save({ title: 'mon étiquette 2' });
|
||||
const newTagTitle = 'mon étiquette 3';
|
||||
|
||||
const note = await Note.save({
|
||||
title: 'ma note un',
|
||||
});
|
||||
Tag.addNote(tag1.id, note.id);
|
||||
Tag.addNote(tag2.id, note.id);
|
||||
|
||||
const response = await api.route('PUT', `notes/${note.id}`, null, JSON.stringify({
|
||||
tags: `${tag1.title},${newTagTitle}`,
|
||||
}));
|
||||
const newTag = await Tag.loadByTitle(newTagTitle);
|
||||
const tagIds = await NoteTag.tagIdsByNoteId(note.id);
|
||||
expect(response.tags === `${tag1.title},${newTag.title}`).toBe(true);
|
||||
expect(tagIds.length === 2).toBe(true);
|
||||
expect(tagIds.includes(tag1.id)).toBe(true);
|
||||
expect(tagIds.includes(newTag.id)).toBe(true);
|
||||
}));
|
||||
|
||||
it('should not update tags if tags is not mentioned when updating', asyncTest(async () => {
|
||||
const tag1 = await Tag.save({ title: 'mon étiquette 1' });
|
||||
const tag2 = await Tag.save({ title: 'mon étiquette 2' });
|
||||
|
||||
const note = await Note.save({
|
||||
title: 'ma note un',
|
||||
});
|
||||
Tag.addNote(tag1.id, note.id);
|
||||
Tag.addNote(tag2.id, note.id);
|
||||
|
||||
const response = await api.route('PUT', `notes/${note.id}`, null, JSON.stringify({
|
||||
title: 'Some other title',
|
||||
}));
|
||||
const tagIds = await NoteTag.tagIdsByNoteId(note.id);
|
||||
expect(response.tags === undefined).toBe(true);
|
||||
expect(tagIds.length === 2).toBe(true);
|
||||
expect(tagIds.includes(tag1.id)).toBe(true);
|
||||
expect(tagIds.includes(tag2.id)).toBe(true);
|
||||
}));
|
||||
|
||||
it('should remove tags from note if tags is set to empty string when updating', asyncTest(async () => {
|
||||
const tag1 = await Tag.save({ title: 'mon étiquette 1' });
|
||||
const tag2 = await Tag.save({ title: 'mon étiquette 2' });
|
||||
|
||||
const note = await Note.save({
|
||||
title: 'ma note un',
|
||||
});
|
||||
Tag.addNote(tag1.id, note.id);
|
||||
Tag.addNote(tag2.id, note.id);
|
||||
|
||||
const response = await api.route('PUT', `notes/${note.id}`, null, JSON.stringify({
|
||||
tags: '',
|
||||
}));
|
||||
const tagIds = await NoteTag.tagIdsByNoteId(note.id);
|
||||
expect(response.tags === '').toBe(true);
|
||||
expect(tagIds.length === 0).toBe(true);
|
||||
}));
|
||||
});
|
||||
|
|
|
@ -27,8 +27,8 @@ process.on('unhandledRejection', (reason, p) => {
|
|||
jasmine.DEFAULT_TIMEOUT_INTERVAL = 60000 + 30000; // The first test is slow because the database needs to be built
|
||||
|
||||
async function allNotesFolders() {
|
||||
let folders = await Folder.all();
|
||||
let notes = await Note.all();
|
||||
const folders = await Folder.all();
|
||||
const notes = await Note.all();
|
||||
return folders.concat(notes);
|
||||
}
|
||||
|
||||
|
@ -66,9 +66,9 @@ async function localNotesFoldersSameAsRemote(locals, expect) {
|
|||
expect(locals.length).toBe(nf.length);
|
||||
|
||||
for (let i = 0; i < locals.length; i++) {
|
||||
let dbItem = locals[i];
|
||||
let path = BaseItem.systemPath(dbItem);
|
||||
let remote = await fileApi().stat(path);
|
||||
const dbItem = locals[i];
|
||||
const path = BaseItem.systemPath(dbItem);
|
||||
const remote = await fileApi().stat(path);
|
||||
|
||||
expect(!!remote).toBe(true);
|
||||
if (!remote) continue;
|
||||
|
@ -101,10 +101,10 @@ describe('synchronizer', function() {
|
|||
});
|
||||
|
||||
it('should create remote items', asyncTest(async () => {
|
||||
let folder = await Folder.save({ title: 'folder1' });
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder.id });
|
||||
|
||||
let all = await allNotesFolders();
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await synchronizer().start();
|
||||
|
||||
|
@ -112,20 +112,20 @@ describe('synchronizer', function() {
|
|||
}));
|
||||
|
||||
it('should update remote items', asyncTest(async () => {
|
||||
let folder = await Folder.save({ title: 'folder1' });
|
||||
let note = await Note.save({ title: 'un', parent_id: folder.id });
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
const note = await Note.save({ title: 'un', parent_id: folder.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await Note.save({ title: 'un UPDATE', id: note.id });
|
||||
|
||||
let all = await allNotesFolders();
|
||||
const all = await allNotesFolders();
|
||||
await synchronizer().start();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should create local items', asyncTest(async () => {
|
||||
let folder = await Folder.save({ title: 'folder1' });
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
await Note.save({ title: 'un', parent_id: folder.id });
|
||||
await synchronizer().start();
|
||||
|
||||
|
@ -133,14 +133,14 @@ describe('synchronizer', function() {
|
|||
|
||||
await synchronizer().start();
|
||||
|
||||
let all = await allNotesFolders();
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should update local items', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -160,14 +160,14 @@ describe('synchronizer', function() {
|
|||
|
||||
await synchronizer().start();
|
||||
|
||||
let all = await allNotesFolders();
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should resolve note conflicts', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -186,29 +186,29 @@ describe('synchronizer', function() {
|
|||
await Note.save(note2conf);
|
||||
note2conf = await Note.load(note1.id);
|
||||
await synchronizer().start();
|
||||
let conflictedNotes = await Note.conflictedNotes();
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
expect(conflictedNotes.length).toBe(1);
|
||||
|
||||
// Other than the id (since the conflicted note is a duplicate), and the is_conflict property
|
||||
// the conflicted and original note must be the same in every way, to make sure no data has been lost.
|
||||
let conflictedNote = conflictedNotes[0];
|
||||
const conflictedNote = conflictedNotes[0];
|
||||
expect(conflictedNote.id == note2conf.id).toBe(false);
|
||||
for (let n in conflictedNote) {
|
||||
for (const n in conflictedNote) {
|
||||
if (!conflictedNote.hasOwnProperty(n)) continue;
|
||||
if (n == 'id' || n == 'is_conflict') continue;
|
||||
expect(conflictedNote[n]).toBe(note2conf[n], `Property: ${n}`);
|
||||
}
|
||||
|
||||
let noteUpdatedFromRemote = await Note.load(note1.id);
|
||||
for (let n in noteUpdatedFromRemote) {
|
||||
const noteUpdatedFromRemote = await Note.load(note1.id);
|
||||
for (const n in noteUpdatedFromRemote) {
|
||||
if (!noteUpdatedFromRemote.hasOwnProperty(n)) continue;
|
||||
expect(noteUpdatedFromRemote[n]).toBe(note2[n], `Property: ${n}`);
|
||||
}
|
||||
}));
|
||||
|
||||
it('should resolve folders conflicts', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2); // ----------------------------------
|
||||
|
@ -235,13 +235,13 @@ describe('synchronizer', function() {
|
|||
|
||||
await synchronizer().start();
|
||||
|
||||
let folder1_final = await Folder.load(folder1.id);
|
||||
const folder1_final = await Folder.load(folder1.id);
|
||||
expect(folder1_final.title).toBe(folder1_modRemote.title);
|
||||
}));
|
||||
|
||||
it('should delete remote notes', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -258,13 +258,13 @@ describe('synchronizer', function() {
|
|||
expect(remotes.length).toBe(1);
|
||||
expect(remotes[0].id).toBe(folder1.id);
|
||||
|
||||
let deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should not created deleted_items entries for items deleted via sync', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -276,7 +276,7 @@ describe('synchronizer', function() {
|
|||
await switchClient(1);
|
||||
|
||||
await synchronizer().start();
|
||||
let deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
}));
|
||||
|
||||
|
@ -285,9 +285,9 @@ describe('synchronizer', function() {
|
|||
// property of the basicDelta() function is cleared properly at the end of a sync operation. If it is not cleared
|
||||
// it means items will no longer be deleted locally via sync.
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'deux', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const note2 = await Note.save({ title: 'deux', parent_id: folder1.id });
|
||||
let context1 = await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -299,17 +299,17 @@ describe('synchronizer', function() {
|
|||
await switchClient(1);
|
||||
|
||||
context1 = await synchronizer().start({ context: context1 });
|
||||
let items = await allNotesFolders();
|
||||
const items = await allNotesFolders();
|
||||
expect(items.length).toBe(2);
|
||||
let deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
const deletedItems = await BaseItem.deletedItems(syncTargetId());
|
||||
expect(deletedItems.length).toBe(0);
|
||||
await Note.delete(note2.id);
|
||||
context1 = await synchronizer().start({ context: context1 });
|
||||
}));
|
||||
|
||||
it('should delete remote folder', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let folder2 = await Folder.save({ title: 'folder2' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -322,30 +322,30 @@ describe('synchronizer', function() {
|
|||
|
||||
await synchronizer().start();
|
||||
|
||||
let all = await allNotesFolders();
|
||||
const all = await allNotesFolders();
|
||||
await localNotesFoldersSameAsRemote(all, expect);
|
||||
}));
|
||||
|
||||
it('should delete local folder', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let folder2 = await Folder.save({ title: 'folder2' });
|
||||
let context1 = await synchronizer().start();
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
const context1 = await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
let context2 = await synchronizer().start();
|
||||
const context2 = await synchronizer().start();
|
||||
await Folder.delete(folder2.id);
|
||||
await synchronizer().start({ context: context2 });
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizer().start({ context: context1 });
|
||||
let items = await allNotesFolders();
|
||||
const items = await allNotesFolders();
|
||||
await localNotesFoldersSameAsRemote(items, expect);
|
||||
}));
|
||||
|
||||
it('should resolve conflict if remote folder has been deleted, but note has been added to folder locally', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -356,17 +356,17 @@ describe('synchronizer', function() {
|
|||
|
||||
await switchClient(1);
|
||||
|
||||
let note = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
const note = await Note.save({ title: 'note1', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
let items = await allNotesFolders();
|
||||
const items = await allNotesFolders();
|
||||
expect(items.length).toBe(1);
|
||||
expect(items[0].title).toBe('note1');
|
||||
expect(items[0].is_conflict).toBe(1);
|
||||
}));
|
||||
|
||||
it('should resolve conflict if note has been deleted remotely and locally', asyncTest(async () => {
|
||||
let folder = await Folder.save({ title: 'folder' });
|
||||
let note = await Note.save({ title: 'note', parent_id: folder.title });
|
||||
const folder = await Folder.save({ title: 'folder' });
|
||||
const note = await Note.save({ title: 'note', parent_id: folder.title });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -380,7 +380,7 @@ describe('synchronizer', function() {
|
|||
await Note.delete(note.id);
|
||||
await synchronizer().start();
|
||||
|
||||
let items = await allNotesFolders();
|
||||
const items = await allNotesFolders();
|
||||
expect(items.length).toBe(1);
|
||||
expect(items[0].title).toBe('folder');
|
||||
|
||||
|
@ -391,8 +391,8 @@ describe('synchronizer', function() {
|
|||
// If client1 and 2 have two folders, client 1 deletes item 1 and client
|
||||
// 2 deletes item 2, they should both end up with no items after sync.
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let folder2 = await Folder.save({ title: 'folder2' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -413,21 +413,21 @@ describe('synchronizer', function() {
|
|||
|
||||
await synchronizer().start();
|
||||
|
||||
let items2 = await allNotesFolders();
|
||||
const items2 = await allNotesFolders();
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizer().start();
|
||||
|
||||
let items1 = await allNotesFolders();
|
||||
const items1 = await allNotesFolders();
|
||||
|
||||
expect(items1.length).toBe(0);
|
||||
expect(items1.length).toBe(items2.length);
|
||||
}));
|
||||
|
||||
it('should handle conflict when remote note is deleted then local note is modified', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -442,25 +442,25 @@ describe('synchronizer', function() {
|
|||
|
||||
await switchClient(1);
|
||||
|
||||
let newTitle = 'Modified after having been deleted';
|
||||
const newTitle = 'Modified after having been deleted';
|
||||
await Note.save({ id: note1.id, title: newTitle });
|
||||
|
||||
await synchronizer().start();
|
||||
|
||||
let conflictedNotes = await Note.conflictedNotes();
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
|
||||
expect(conflictedNotes.length).toBe(1);
|
||||
expect(conflictedNotes[0].title).toBe(newTitle);
|
||||
|
||||
let unconflictedNotes = await Note.unconflictedNotes();
|
||||
const unconflictedNotes = await Note.unconflictedNotes();
|
||||
|
||||
expect(unconflictedNotes.length).toBe(0);
|
||||
}));
|
||||
|
||||
it('should handle conflict when remote folder is deleted then local folder is renamed', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let folder2 = await Folder.save({ title: 'folder2' });
|
||||
let note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder2 = await Folder.save({ title: 'folder2' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -477,18 +477,18 @@ describe('synchronizer', function() {
|
|||
|
||||
await sleep(0.1);
|
||||
|
||||
let newTitle = 'Modified after having been deleted';
|
||||
const newTitle = 'Modified after having been deleted';
|
||||
await Folder.save({ id: folder1.id, title: newTitle });
|
||||
|
||||
await synchronizer().start();
|
||||
|
||||
let items = await allNotesFolders();
|
||||
const items = await allNotesFolders();
|
||||
|
||||
expect(items.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('should allow duplicate folder titles', asyncTest(async () => {
|
||||
let localF1 = await Folder.save({ title: 'folder' });
|
||||
const localF1 = await Folder.save({ title: 'folder' });
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
|
@ -501,7 +501,7 @@ describe('synchronizer', function() {
|
|||
|
||||
await synchronizer().start();
|
||||
|
||||
let localF2 = await Folder.load(remoteF2.id);
|
||||
const localF2 = await Folder.load(remoteF2.id);
|
||||
|
||||
expect(localF2.title == remoteF2.title).toBe(true);
|
||||
|
||||
|
@ -528,10 +528,10 @@ describe('synchronizer', function() {
|
|||
masterKey = await loadEncryptionMasterKey();
|
||||
}
|
||||
|
||||
let f1 = await Folder.save({ title: 'folder' });
|
||||
let n1 = await Note.save({ title: 'mynote' });
|
||||
let n2 = await Note.save({ title: 'mynote2' });
|
||||
let tag = await Tag.save({ title: 'mytag' });
|
||||
const f1 = await Folder.save({ title: 'folder' });
|
||||
const n1 = await Note.save({ title: 'mynote' });
|
||||
const n2 = await Note.save({ title: 'mynote2' });
|
||||
const tag = await Tag.save({ title: 'mytag' });
|
||||
let context1 = await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -540,10 +540,10 @@ describe('synchronizer', function() {
|
|||
if (withEncryption) {
|
||||
const masterKey_2 = await MasterKey.load(masterKey.id);
|
||||
await encryptionService().loadMasterKey_(masterKey_2, '123456', true);
|
||||
let t = await Tag.load(tag.id);
|
||||
const t = await Tag.load(tag.id);
|
||||
await Tag.decrypt(t);
|
||||
}
|
||||
let remoteTag = await Tag.loadByTitle(tag.title);
|
||||
const remoteTag = await Tag.loadByTitle(tag.title);
|
||||
expect(!!remoteTag).toBe(true);
|
||||
expect(remoteTag.id).toBe(tag.id);
|
||||
await Tag.addNote(remoteTag.id, n1.id);
|
||||
|
@ -579,22 +579,22 @@ describe('synchronizer', function() {
|
|||
}));
|
||||
|
||||
it('should not sync notes with conflicts', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'folder' });
|
||||
let n1 = await Note.save({ title: 'mynote', parent_id: f1.id, is_conflict: 1 });
|
||||
const f1 = await Folder.save({ title: 'folder' });
|
||||
const n1 = await Note.save({ title: 'mynote', parent_id: f1.id, is_conflict: 1 });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizer().start();
|
||||
let notes = await Note.all();
|
||||
let folders = await Folder.all();
|
||||
const notes = await Note.all();
|
||||
const folders = await Folder.all();
|
||||
expect(notes.length).toBe(0);
|
||||
expect(folders.length).toBe(1);
|
||||
}));
|
||||
|
||||
it('should not try to delete on remote conflicted notes that have been deleted', asyncTest(async () => {
|
||||
let f1 = await Folder.save({ title: 'folder' });
|
||||
let n1 = await Note.save({ title: 'mynote', parent_id: f1.id });
|
||||
const f1 = await Folder.save({ title: 'folder' });
|
||||
const n1 = await Note.save({ title: 'mynote', parent_id: f1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -613,8 +613,8 @@ describe('synchronizer', function() {
|
|||
await loadEncryptionMasterKey();
|
||||
}
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -625,7 +625,7 @@ describe('synchronizer', function() {
|
|||
await decryptionWorker().start();
|
||||
}
|
||||
let note2 = await Note.load(note1.id);
|
||||
note2.todo_completed = time.unixMs()-1;
|
||||
note2.todo_completed = time.unixMs() - 1;
|
||||
await Note.save(note2);
|
||||
note2 = await Note.load(note2.id);
|
||||
await synchronizer().start();
|
||||
|
@ -646,10 +646,10 @@ describe('synchronizer', function() {
|
|||
// but in practice it doesn't matter, we can just take the date when the
|
||||
// todo was marked as "done" the first time.
|
||||
|
||||
let conflictedNotes = await Note.conflictedNotes();
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
expect(conflictedNotes.length).toBe(0);
|
||||
|
||||
let notes = await Note.all();
|
||||
const notes = await Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
expect(notes[0].id).toBe(note1.id);
|
||||
expect(notes[0].todo_completed).toBe(note2.todo_completed);
|
||||
|
@ -658,10 +658,10 @@ describe('synchronizer', function() {
|
|||
// smart conflict resolving since we don't know the content, so in that
|
||||
// case it's handled as a regular conflict.
|
||||
|
||||
let conflictedNotes = await Note.conflictedNotes();
|
||||
const conflictedNotes = await Note.conflictedNotes();
|
||||
expect(conflictedNotes.length).toBe(1);
|
||||
|
||||
let notes = await Note.all();
|
||||
const notes = await Note.all();
|
||||
expect(notes.length).toBe(2);
|
||||
}
|
||||
}
|
||||
|
@ -675,14 +675,14 @@ describe('synchronizer', function() {
|
|||
}));
|
||||
|
||||
it('items should be downloaded again when user cancels in the middle of delta operation', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
synchronizer().testingHooks_ = ['cancelDeltaLoop2'];
|
||||
let context = await synchronizer().start();
|
||||
const context = await synchronizer().start();
|
||||
let notes = await Note.all();
|
||||
expect(notes.length).toBe(0);
|
||||
|
||||
|
@ -693,8 +693,8 @@ describe('synchronizer', function() {
|
|||
}));
|
||||
|
||||
it('should skip items that cannot be synced', asyncTest(async () => {
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', is_todo: 1, parent_id: folder1.id });
|
||||
const noteId = note1.id;
|
||||
await synchronizer().start();
|
||||
let disabledItems = await BaseItem.syncDisabledItems(syncTargetId());
|
||||
|
@ -708,7 +708,7 @@ describe('synchronizer', function() {
|
|||
await switchClient(2);
|
||||
|
||||
await synchronizer().start();
|
||||
let notes = await Note.all();
|
||||
const notes = await Note.all();
|
||||
expect(notes.length).toBe(1);
|
||||
expect(notes[0].title).toBe('un');
|
||||
|
||||
|
@ -721,7 +721,7 @@ describe('synchronizer', function() {
|
|||
it('notes and folders should get encrypted when encryption is enabled', asyncTest(async () => {
|
||||
Setting.setValue('encryption.enabled', true);
|
||||
const masterKey = await loadEncryptionMasterKey();
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', body: 'to be encrypted', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
// After synchronisation, remote items should be encrypted but local ones remain plain text
|
||||
|
@ -733,7 +733,7 @@ describe('synchronizer', function() {
|
|||
await synchronizer().start();
|
||||
let folder1_2 = await Folder.load(folder1.id);
|
||||
let note1_2 = await Note.load(note1.id);
|
||||
let masterKey_2 = await MasterKey.load(masterKey.id);
|
||||
const masterKey_2 = await MasterKey.load(masterKey.id);
|
||||
// On this side however it should be received encrypted
|
||||
expect(!note1_2.title).toBe(true);
|
||||
expect(!folder1_2.title).toBe(true);
|
||||
|
@ -820,7 +820,7 @@ describe('synchronizer', function() {
|
|||
|
||||
it('should encrypt existing notes too when enabling E2EE', asyncTest(async () => {
|
||||
// First create a folder, without encryption enabled, and sync it
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await synchronizer().start();
|
||||
let files = await fileApi().list();
|
||||
let content = await fileApi().get(files.items[0].path);
|
||||
|
@ -848,18 +848,18 @@ describe('synchronizer', function() {
|
|||
it('should sync resources', asyncTest(async () => {
|
||||
while (insideBeforeEach) await time.msleep(500);
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
let resourcePath1 = Resource.fullPath(resource1);
|
||||
const resource1 = (await Resource.all())[0];
|
||||
const resourcePath1 = Resource.fullPath(resource1);
|
||||
await synchronizer().start();
|
||||
expect((await remoteNotesFoldersResources()).length).toBe(3);
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizer().start();
|
||||
let allResources = await Resource.all();
|
||||
const allResources = await Resource.all();
|
||||
expect(allResources.length).toBe(1);
|
||||
let resource1_2 = allResources[0];
|
||||
let ls = await Resource.localState(resource1_2);
|
||||
|
@ -874,18 +874,18 @@ describe('synchronizer', function() {
|
|||
ls = await Resource.localState(resource1_2);
|
||||
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_DONE);
|
||||
|
||||
let resourcePath1_2 = Resource.fullPath(resource1_2);
|
||||
const resourcePath1_2 = Resource.fullPath(resource1_2);
|
||||
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
||||
}));
|
||||
|
||||
it('should handle resource download errors', asyncTest(async () => {
|
||||
while (insideBeforeEach) await time.msleep(500);
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
let resourcePath1 = Resource.fullPath(resource1);
|
||||
const resourcePath1 = Resource.fullPath(resource1);
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -902,7 +902,7 @@ describe('synchronizer', function() {
|
|||
await fetcher.waitForAllFinished();
|
||||
|
||||
resource1 = await Resource.load(resource1.id);
|
||||
let ls = await Resource.localState(resource1);
|
||||
const ls = await Resource.localState(resource1);
|
||||
expect(ls.fetch_status).toBe(Resource.FETCH_STATUS_ERROR);
|
||||
expect(ls.fetch_error).toBe('did not work');
|
||||
}));
|
||||
|
@ -910,8 +910,8 @@ describe('synchronizer', function() {
|
|||
it('should set the resource file size if it is missing', asyncTest(async () => {
|
||||
while (insideBeforeEach) await time.msleep(500);
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
await synchronizer().start();
|
||||
|
||||
|
@ -933,11 +933,11 @@ describe('synchronizer', function() {
|
|||
it('should delete resources', asyncTest(async () => {
|
||||
while (insideBeforeEach) await time.msleep(500);
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
let resourcePath1 = Resource.fullPath(resource1);
|
||||
const resource1 = (await Resource.all())[0];
|
||||
const resourcePath1 = Resource.fullPath(resource1);
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -945,7 +945,7 @@ describe('synchronizer', function() {
|
|||
await synchronizer().start();
|
||||
let allResources = await Resource.all();
|
||||
expect(allResources.length).toBe(1);
|
||||
let all = await fileApi().list();
|
||||
const all = await fileApi().list();
|
||||
expect((await remoteNotesFoldersResources()).length).toBe(3);
|
||||
await Resource.delete(resource1.id);
|
||||
await synchronizer().start();
|
||||
|
@ -967,11 +967,11 @@ describe('synchronizer', function() {
|
|||
Setting.setValue('encryption.enabled', true);
|
||||
const masterKey = await loadEncryptionMasterKey();
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
let resourcePath1 = Resource.fullPath(resource1);
|
||||
const resource1 = (await Resource.all())[0];
|
||||
const resourcePath1 = Resource.fullPath(resource1);
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -986,7 +986,7 @@ describe('synchronizer', function() {
|
|||
|
||||
let resource1_2 = (await Resource.all())[0];
|
||||
resource1_2 = await Resource.decrypt(resource1_2);
|
||||
let resourcePath1_2 = Resource.fullPath(resource1_2);
|
||||
const resourcePath1_2 = Resource.fullPath(resource1_2);
|
||||
|
||||
expect(fileContentEqual(resourcePath1, resourcePath1_2)).toBe(true);
|
||||
}));
|
||||
|
@ -995,7 +995,7 @@ describe('synchronizer', function() {
|
|||
Setting.setValue('encryption.enabled', true);
|
||||
const masterKey = await loadEncryptionMasterKey();
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await synchronizer().start();
|
||||
|
||||
let allEncrypted = await allSyncTargetItemsEncrypted();
|
||||
|
@ -1016,7 +1016,7 @@ describe('synchronizer', function() {
|
|||
Setting.setValue('encryption.enabled', true);
|
||||
const masterKey = await loadEncryptionMasterKey();
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -1049,12 +1049,12 @@ describe('synchronizer', function() {
|
|||
Setting.setValue('encryption.enabled', true);
|
||||
const masterKey = await loadEncryptionMasterKey();
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
let resource1 = (await Resource.all())[0];
|
||||
const resource1 = (await Resource.all())[0];
|
||||
await Resource.setFileSizeOnly(resource1.id, -1);
|
||||
let resourcePath1 = Resource.fullPath(resource1);
|
||||
const resourcePath1 = Resource.fullPath(resource1);
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -1075,8 +1075,8 @@ describe('synchronizer', function() {
|
|||
it('should encrypt remote resources after encryption has been enabled', asyncTest(async () => {
|
||||
while (insideBeforeEach) await time.msleep(100);
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
await synchronizer().start();
|
||||
|
||||
|
@ -1094,22 +1094,22 @@ describe('synchronizer', function() {
|
|||
it('should upload encrypted resource, but it should not mark the blob as encrypted locally', asyncTest(async () => {
|
||||
while (insideBeforeEach) await time.msleep(100);
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
|
||||
await shim.attachFileToNote(note1, `${__dirname}/../tests/support/photo.jpg`);
|
||||
const masterKey = await loadEncryptionMasterKey();
|
||||
await encryptionService().enableEncryption(masterKey, '123456');
|
||||
await encryptionService().loadMasterKeysFromSettings();
|
||||
await synchronizer().start();
|
||||
|
||||
let resource1 = (await Resource.all())[0];
|
||||
const resource1 = (await Resource.all())[0];
|
||||
expect(resource1.encryption_blob_encrypted).toBe(0);
|
||||
}));
|
||||
|
||||
it('should create remote items with UTF-8 content', asyncTest(async () => {
|
||||
let folder = await Folder.save({ title: 'Fahrräder' });
|
||||
const folder = await Folder.save({ title: 'Fahrräder' });
|
||||
await Note.save({ title: 'Fahrräder', body: 'Fahrräder', parent_id: folder.id });
|
||||
let all = await allNotesFolders();
|
||||
const all = await allNotesFolders();
|
||||
|
||||
await synchronizer().start();
|
||||
|
||||
|
@ -1117,8 +1117,8 @@ describe('synchronizer', function() {
|
|||
}));
|
||||
|
||||
it('should update remote items but not pull remote changes', asyncTest(async () => {
|
||||
let folder = await Folder.save({ title: 'folder1' });
|
||||
let note = await Note.save({ title: 'un', parent_id: folder.id });
|
||||
const folder = await Folder.save({ title: 'folder1' });
|
||||
const note = await Note.save({ title: 'un', parent_id: folder.id });
|
||||
await synchronizer().start();
|
||||
|
||||
await switchClient(2);
|
||||
|
@ -1131,13 +1131,13 @@ describe('synchronizer', function() {
|
|||
|
||||
await Note.save({ title: 'un UPDATE', id: note.id });
|
||||
await synchronizer().start({ syncSteps: ['update_remote'] });
|
||||
let all = await allNotesFolders();
|
||||
const all = await allNotesFolders();
|
||||
expect(all.length).toBe(2);
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizer().start();
|
||||
let note2 = await Note.load(note.id);
|
||||
const note2 = await Note.load(note.id);
|
||||
expect(note2.title).toBe('un UPDATE');
|
||||
}));
|
||||
|
||||
|
@ -1544,8 +1544,8 @@ describe('synchronizer', function() {
|
|||
Setting.setValue('encryption.enabled', true);
|
||||
await loadEncryptionMasterKey();
|
||||
|
||||
let folder1 = await Folder.save({ title: 'folder1' });
|
||||
let note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
const folder1 = await Folder.save({ title: 'folder1' });
|
||||
const note1 = await Note.save({ title: 'un', parent_id: folder1.id });
|
||||
let note2 = await Note.save({ title: 'deux', parent_id: folder1.id });
|
||||
await synchronizer().start();
|
||||
|
||||
|
@ -1570,12 +1570,12 @@ describe('synchronizer', function() {
|
|||
await synchronizer().start();
|
||||
|
||||
// The shared note should be decrypted
|
||||
let note2_2 = await Note.load(note2.id);
|
||||
const note2_2 = await Note.load(note2.id);
|
||||
expect(note2_2.title).toBe('deux');
|
||||
expect(note2_2.is_shared).toBe(1);
|
||||
|
||||
// The non-shared note should be encrypted
|
||||
let note1_2 = await Note.load(note1.id);
|
||||
const note1_2 = await Note.load(note1.id);
|
||||
expect(note1_2.title).toBe('');
|
||||
}));
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
const fs = require('fs-extra');
|
||||
const { JoplinDatabase } = require('lib/joplin-database.js');
|
||||
const { DatabaseDriverNode } = require('lib/database-driver-node.js');
|
||||
const { BaseApplication }= require('lib/BaseApplication.js');
|
||||
const { BaseApplication } = require('lib/BaseApplication.js');
|
||||
const BaseModel = require('lib/BaseModel.js');
|
||||
const Folder = require('lib/models/Folder.js');
|
||||
const Note = require('lib/models/Note.js');
|
||||
|
@ -41,13 +41,13 @@ const KvStore = require('lib/services/KvStore.js');
|
|||
const WebDavApi = require('lib/WebDavApi');
|
||||
const DropboxApi = require('lib/DropboxApi');
|
||||
|
||||
let databases_ = [];
|
||||
let synchronizers_ = [];
|
||||
let encryptionServices_ = [];
|
||||
let revisionServices_ = [];
|
||||
let decryptionWorkers_ = [];
|
||||
let resourceServices_ = [];
|
||||
let kvStores_ = [];
|
||||
const databases_ = [];
|
||||
const synchronizers_ = [];
|
||||
const encryptionServices_ = [];
|
||||
const revisionServices_ = [];
|
||||
const decryptionWorkers_ = [];
|
||||
const resourceServices_ = [];
|
||||
const kvStores_ = [];
|
||||
let fileApi_ = null;
|
||||
let currentClient_ = 1;
|
||||
|
||||
|
@ -341,7 +341,7 @@ function fileApi() {
|
|||
|
||||
function objectsEqual(o1, o2) {
|
||||
if (Object.getOwnPropertyNames(o1).length !== Object.getOwnPropertyNames(o2).length) return false;
|
||||
for (let n in o1) {
|
||||
for (const n in o1) {
|
||||
if (!o1.hasOwnProperty(n)) continue;
|
||||
if (o1[n] !== o2[n]) return false;
|
||||
}
|
||||
|
@ -427,7 +427,7 @@ function sortedIds(a) {
|
|||
}
|
||||
|
||||
function at(a, indexes) {
|
||||
let out = [];
|
||||
const out = [];
|
||||
for (let i = 0; i < indexes.length; i++) {
|
||||
out.push(a[indexes[i]]);
|
||||
}
|
||||
|
@ -435,19 +435,19 @@ function at(a, indexes) {
|
|||
}
|
||||
|
||||
async function createNTestFolders(n) {
|
||||
let folders = [];
|
||||
const folders = [];
|
||||
for (let i = 0; i < n; i++) {
|
||||
let folder = await Folder.save({ title: 'folder' });
|
||||
const folder = await Folder.save({ title: 'folder' });
|
||||
folders.push(folder);
|
||||
}
|
||||
return folders;
|
||||
}
|
||||
|
||||
async function createNTestNotes(n, folder, tagIds = null, title = 'note') {
|
||||
let notes = [];
|
||||
const notes = [];
|
||||
for (let i = 0; i < n; i++) {
|
||||
let title_ = n > 1 ? `${title}${i}` : title;
|
||||
let note = await Note.save({ title: title_, parent_id: folder.id, is_conflict: 0 });
|
||||
const title_ = n > 1 ? `${title}${i}` : title;
|
||||
const note = await Note.save({ title: title_, parent_id: folder.id, is_conflict: 0 });
|
||||
notes.push(note);
|
||||
}
|
||||
if (tagIds) {
|
||||
|
@ -459,9 +459,9 @@ async function createNTestNotes(n, folder, tagIds = null, title = 'note') {
|
|||
}
|
||||
|
||||
async function createNTestTags(n) {
|
||||
let tags = [];
|
||||
const tags = [];
|
||||
for (let i = 0; i < n; i++) {
|
||||
let tag = await Tag.save({ title: 'tag' });
|
||||
const tag = await Tag.save({ title: 'tag' });
|
||||
tags.push(tag);
|
||||
}
|
||||
return tags;
|
||||
|
|
|
@ -290,8 +290,8 @@
|
|||
// option to clip pages as HTML.
|
||||
function getStyleSheets(doc) {
|
||||
const output = [];
|
||||
for (var i=0; i<doc.styleSheets.length; i++) {
|
||||
var sheet = doc.styleSheets[i];
|
||||
for (let i = 0; i < doc.styleSheets.length; i++) {
|
||||
const sheet = doc.styleSheets[i];
|
||||
try {
|
||||
for (const cssRule of sheet.cssRules) {
|
||||
output.push({ type: 'text', value: cssRule.cssText });
|
||||
|
@ -530,7 +530,7 @@
|
|||
|
||||
} else if (command.name === 'pageUrl') {
|
||||
|
||||
let url = pageLocationOrigin() + location.pathname + location.search;
|
||||
const url = pageLocationOrigin() + location.pathname + location.search;
|
||||
return clippedContentResponse(pageTitle(), url, getImageSizes(document), getAnchorNames(document));
|
||||
|
||||
} else {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"manifest_version": 2,
|
||||
"name": "Joplin Web Clipper [DEV]",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.25",
|
||||
"description": "Capture and save web pages and screenshots from your browser to Joplin.",
|
||||
"homepage_url": "https://joplinapp.org",
|
||||
"content_security_policy": "script-src 'self'; object-src 'self'",
|
||||
|
|
|
@ -1,45 +1,5 @@
|
|||
{
|
||||
"name": "joplin-webclipper",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"fs-extra": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-6.0.1.tgz",
|
||||
"integrity": "sha512-GnyIkKhhzXZUWFCaJzvyDLEEgDkPfb4/TPvJCJVuS8MWZgoSsErf++QpiAlDnKFcqhRlm+tIOcencCjyJE6ZCA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"graceful-fs": "^4.1.2",
|
||||
"jsonfile": "^4.0.0",
|
||||
"universalify": "^0.1.0"
|
||||
}
|
||||
},
|
||||
"graceful-fs": {
|
||||
"version": "4.1.11",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz",
|
||||
"integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=",
|
||||
"dev": true
|
||||
},
|
||||
"jsonfile": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
|
||||
"integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
}
|
||||
},
|
||||
"readability-node": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/readability-node/-/readability-node-0.1.0.tgz",
|
||||
"integrity": "sha1-DUBacMLCFZRKf0qbX3UGzQWpsao="
|
||||
},
|
||||
"universalify": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.1.tgz",
|
||||
"integrity": "sha1-+nG63UQ3r0wUiEHjs7Fl+enlkLc=",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
"lockfileVersion": 1
|
||||
}
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const paths = require('./paths');
|
||||
|
||||
// Make sure that including paths.js after env.js will read .env variables.
|
||||
delete require.cache[require.resolve('./paths')];
|
||||
|
||||
const NODE_ENV = process.env.NODE_ENV;
|
||||
if (!NODE_ENV) {
|
||||
throw new Error(
|
||||
'The NODE_ENV environment variable is required but was not specified.'
|
||||
);
|
||||
}
|
||||
|
||||
// https://github.com/bkeepers/dotenv#what-other-env-files-can-i-use
|
||||
const dotenvFiles = [
|
||||
`${paths.dotenv}.${NODE_ENV}.local`,
|
||||
`${paths.dotenv}.${NODE_ENV}`,
|
||||
// Don't include `.env.local` for `test` environment
|
||||
// since normally you expect tests to produce the same
|
||||
// results for everyone
|
||||
NODE_ENV !== 'test' && `${paths.dotenv}.local`,
|
||||
paths.dotenv,
|
||||
].filter(Boolean);
|
||||
|
||||
// Load environment variables from .env* files. Suppress warnings using silent
|
||||
// if this file is missing. dotenv will never modify any environment variables
|
||||
// that have already been set. Variable expansion is supported in .env files.
|
||||
// https://github.com/motdotla/dotenv
|
||||
// https://github.com/motdotla/dotenv-expand
|
||||
dotenvFiles.forEach(dotenvFile => {
|
||||
if (fs.existsSync(dotenvFile)) {
|
||||
require('dotenv-expand')(
|
||||
require('dotenv').config({
|
||||
path: dotenvFile,
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// We support resolving modules according to `NODE_PATH`.
|
||||
// This lets you use absolute paths in imports inside large monorepos:
|
||||
// https://github.com/facebook/create-react-app/issues/253.
|
||||
// It works similar to `NODE_PATH` in Node itself:
|
||||
// https://nodejs.org/api/modules.html#modules_loading_from_the_global_folders
|
||||
// Note that unlike in Node, only *relative* paths from `NODE_PATH` are honored.
|
||||
// Otherwise, we risk importing Node.js core modules into an app instead of Webpack shims.
|
||||
// https://github.com/facebook/create-react-app/issues/1023#issuecomment-265344421
|
||||
// We also resolve them to make sure all tools using them work consistently.
|
||||
const appDirectory = fs.realpathSync(process.cwd());
|
||||
process.env.NODE_PATH = (process.env.NODE_PATH || '')
|
||||
.split(path.delimiter)
|
||||
.filter(folder => folder && !path.isAbsolute(folder))
|
||||
.map(folder => path.resolve(appDirectory, folder))
|
||||
.join(path.delimiter);
|
||||
|
||||
// Grab NODE_ENV and REACT_APP_* environment variables and prepare them to be
|
||||
// injected into the application via DefinePlugin in Webpack configuration.
|
||||
const REACT_APP = /^REACT_APP_/i;
|
||||
|
||||
function getClientEnvironment(publicUrl) {
|
||||
const raw = Object.keys(process.env)
|
||||
.filter(key => REACT_APP.test(key))
|
||||
.reduce(
|
||||
(env, key) => {
|
||||
env[key] = process.env[key];
|
||||
return env;
|
||||
},
|
||||
{
|
||||
// Useful for determining whether we’re running in production mode.
|
||||
// Most importantly, it switches React into the correct mode.
|
||||
NODE_ENV: process.env.NODE_ENV || 'development',
|
||||
// Useful for resolving the correct path to static assets in `public`.
|
||||
// For example, <img src={process.env.PUBLIC_URL + '/img/logo.png'} />.
|
||||
// This should only be used as an escape hatch. Normally you would put
|
||||
// images into the `src` and `import` them in code to get their paths.
|
||||
PUBLIC_URL: publicUrl,
|
||||
}
|
||||
);
|
||||
// Stringify all values so we can feed into Webpack DefinePlugin
|
||||
const stringified = {
|
||||
'process.env': Object.keys(raw).reduce((env, key) => {
|
||||
env[key] = JSON.stringify(raw[key]);
|
||||
return env;
|
||||
}, {}),
|
||||
};
|
||||
|
||||
return { raw, stringified };
|
||||
}
|
||||
|
||||
module.exports = getClientEnvironment;
|
|
@ -0,0 +1,14 @@
|
|||
'use strict';
|
||||
|
||||
// This is a custom Jest transformer turning style imports into empty objects.
|
||||
// http://facebook.github.io/jest/docs/en/webpack.html
|
||||
|
||||
module.exports = {
|
||||
process() {
|
||||
return 'module.exports = {};';
|
||||
},
|
||||
getCacheKey() {
|
||||
// The output is always the same.
|
||||
return 'cssTransform';
|
||||
},
|
||||
};
|
|
@ -0,0 +1,40 @@
|
|||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const camelcase = require('camelcase');
|
||||
|
||||
// This is a custom Jest transformer turning file imports into filenames.
|
||||
// http://facebook.github.io/jest/docs/en/webpack.html
|
||||
|
||||
module.exports = {
|
||||
process(src, filename) {
|
||||
const assetFilename = JSON.stringify(path.basename(filename));
|
||||
|
||||
if (filename.match(/\.svg$/)) {
|
||||
// Based on how SVGR generates a component name:
|
||||
// https://github.com/smooth-code/svgr/blob/01b194cf967347d43d4cbe6b434404731b87cf27/packages/core/src/state.js#L6
|
||||
const pascalCaseFilename = camelcase(path.parse(filename).name, {
|
||||
pascalCase: true,
|
||||
});
|
||||
const componentName = `Svg${pascalCaseFilename}`;
|
||||
return `const React = require('react');
|
||||
module.exports = {
|
||||
__esModule: true,
|
||||
default: ${assetFilename},
|
||||
ReactComponent: React.forwardRef(function ${componentName}(props, ref) {
|
||||
return {
|
||||
$$typeof: Symbol.for('react.element'),
|
||||
type: 'svg',
|
||||
ref: ref,
|
||||
key: null,
|
||||
props: Object.assign({}, props, {
|
||||
children: ${assetFilename}
|
||||
})
|
||||
};
|
||||
}),
|
||||
};`;
|
||||
}
|
||||
|
||||
return `module.exports = ${assetFilename};`;
|
||||
},
|
||||
};
|
|
@ -0,0 +1,141 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const paths = require('./paths');
|
||||
const chalk = require('react-dev-utils/chalk');
|
||||
const resolve = require('resolve');
|
||||
|
||||
/**
|
||||
* Get additional module paths based on the baseUrl of a compilerOptions object.
|
||||
*
|
||||
* @param {Object} options
|
||||
*/
|
||||
function getAdditionalModulePaths(options = {}) {
|
||||
const baseUrl = options.baseUrl;
|
||||
|
||||
// We need to explicitly check for null and undefined (and not a falsy value) because
|
||||
// TypeScript treats an empty string as `.`.
|
||||
if (baseUrl == null) {
|
||||
// If there's no baseUrl set we respect NODE_PATH
|
||||
// Note that NODE_PATH is deprecated and will be removed
|
||||
// in the next major release of create-react-app.
|
||||
|
||||
const nodePath = process.env.NODE_PATH || '';
|
||||
return nodePath.split(path.delimiter).filter(Boolean);
|
||||
}
|
||||
|
||||
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
|
||||
|
||||
// We don't need to do anything if `baseUrl` is set to `node_modules`. This is
|
||||
// the default behavior.
|
||||
if (path.relative(paths.appNodeModules, baseUrlResolved) === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Allow the user set the `baseUrl` to `appSrc`.
|
||||
if (path.relative(paths.appSrc, baseUrlResolved) === '') {
|
||||
return [paths.appSrc];
|
||||
}
|
||||
|
||||
// If the path is equal to the root directory we ignore it here.
|
||||
// We don't want to allow importing from the root directly as source files are
|
||||
// not transpiled outside of `src`. We do allow importing them with the
|
||||
// absolute path (e.g. `src/Components/Button.js`) but we set that up with
|
||||
// an alias.
|
||||
if (path.relative(paths.appPath, baseUrlResolved) === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Otherwise, throw an error.
|
||||
throw new Error(
|
||||
chalk.red.bold(
|
||||
'Your project\'s `baseUrl` can only be set to `src` or `node_modules`.' +
|
||||
' Create React App does not support other values at this time.'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get webpack aliases based on the baseUrl of a compilerOptions object.
|
||||
*
|
||||
* @param {*} options
|
||||
*/
|
||||
function getWebpackAliases(options = {}) {
|
||||
const baseUrl = options.baseUrl;
|
||||
|
||||
if (!baseUrl) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
|
||||
|
||||
if (path.relative(paths.appPath, baseUrlResolved) === '') {
|
||||
return {
|
||||
src: paths.appSrc,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get jest aliases based on the baseUrl of a compilerOptions object.
|
||||
*
|
||||
* @param {*} options
|
||||
*/
|
||||
function getJestAliases(options = {}) {
|
||||
const baseUrl = options.baseUrl;
|
||||
|
||||
if (!baseUrl) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
|
||||
|
||||
if (path.relative(paths.appPath, baseUrlResolved) === '') {
|
||||
return {
|
||||
'^src/(.*)$': '<rootDir>/src/$1',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function getModules() {
|
||||
// Check if TypeScript is setup
|
||||
const hasTsConfig = fs.existsSync(paths.appTsConfig);
|
||||
const hasJsConfig = fs.existsSync(paths.appJsConfig);
|
||||
|
||||
if (hasTsConfig && hasJsConfig) {
|
||||
throw new Error(
|
||||
'You have both a tsconfig.json and a jsconfig.json. If you are using TypeScript please remove your jsconfig.json file.'
|
||||
);
|
||||
}
|
||||
|
||||
let config;
|
||||
|
||||
// If there's a tsconfig.json we assume it's a
|
||||
// TypeScript project and set up the config
|
||||
// based on tsconfig.json
|
||||
if (hasTsConfig) {
|
||||
const ts = require(resolve.sync('typescript', {
|
||||
basedir: paths.appNodeModules,
|
||||
}));
|
||||
config = ts.readConfigFile(paths.appTsConfig, ts.sys.readFile).config;
|
||||
// Otherwise we'll check if there is jsconfig.json
|
||||
// for non TS projects.
|
||||
} else if (hasJsConfig) {
|
||||
config = require(paths.appJsConfig);
|
||||
}
|
||||
|
||||
config = config || {};
|
||||
const options = config.compilerOptions || {};
|
||||
|
||||
const additionalModulePaths = getAdditionalModulePaths(options);
|
||||
|
||||
return {
|
||||
additionalModulePaths: additionalModulePaths,
|
||||
webpackAliases: getWebpackAliases(options),
|
||||
jestAliases: getJestAliases(options),
|
||||
hasTsConfig,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getModules();
|
|
@ -0,0 +1,90 @@
|
|||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const url = require('url');
|
||||
|
||||
// Make sure any symlinks in the project folder are resolved:
|
||||
// https://github.com/facebook/create-react-app/issues/637
|
||||
const appDirectory = fs.realpathSync(process.cwd());
|
||||
const resolveApp = relativePath => path.resolve(appDirectory, relativePath);
|
||||
|
||||
const envPublicUrl = process.env.PUBLIC_URL;
|
||||
|
||||
function ensureSlash(inputPath, needsSlash) {
|
||||
const hasSlash = inputPath.endsWith('/');
|
||||
if (hasSlash && !needsSlash) {
|
||||
return inputPath.substr(0, inputPath.length - 1);
|
||||
} else if (!hasSlash && needsSlash) {
|
||||
return `${inputPath}/`;
|
||||
} else {
|
||||
return inputPath;
|
||||
}
|
||||
}
|
||||
|
||||
const getPublicUrl = appPackageJson =>
|
||||
envPublicUrl || require(appPackageJson).homepage;
|
||||
|
||||
// We use `PUBLIC_URL` environment variable or "homepage" field to infer
|
||||
// "public path" at which the app is served.
|
||||
// Webpack needs to know it to put the right <script> hrefs into HTML even in
|
||||
// single-page apps that may serve index.html for nested URLs like /todos/42.
|
||||
// We can't use a relative path in HTML because we don't want to load something
|
||||
// like /todos/42/static/js/bundle.7289d.js. We have to know the root.
|
||||
function getServedPath(appPackageJson) {
|
||||
const publicUrl = getPublicUrl(appPackageJson);
|
||||
const servedUrl =
|
||||
envPublicUrl || (publicUrl ? url.parse(publicUrl).pathname : '/');
|
||||
return ensureSlash(servedUrl, true);
|
||||
}
|
||||
|
||||
const moduleFileExtensions = [
|
||||
'web.mjs',
|
||||
'mjs',
|
||||
'web.js',
|
||||
'js',
|
||||
'web.ts',
|
||||
'ts',
|
||||
'web.tsx',
|
||||
'tsx',
|
||||
'json',
|
||||
'web.jsx',
|
||||
'jsx',
|
||||
];
|
||||
|
||||
// Resolve file paths in the same order as webpack
|
||||
const resolveModule = (resolveFn, filePath) => {
|
||||
const extension = moduleFileExtensions.find(extension =>
|
||||
fs.existsSync(resolveFn(`${filePath}.${extension}`))
|
||||
);
|
||||
|
||||
if (extension) {
|
||||
return resolveFn(`${filePath}.${extension}`);
|
||||
}
|
||||
|
||||
return resolveFn(`${filePath}.js`);
|
||||
};
|
||||
|
||||
// config after eject: we're in ./config/
|
||||
module.exports = {
|
||||
dotenv: resolveApp('.env'),
|
||||
appPath: resolveApp('.'),
|
||||
appBuild: resolveApp('build'),
|
||||
appPublic: resolveApp('public'),
|
||||
appHtml: resolveApp('public/index.html'),
|
||||
appIndexJs: resolveModule(resolveApp, 'src/index'),
|
||||
appPackageJson: resolveApp('package.json'),
|
||||
appSrc: resolveApp('src'),
|
||||
appTsConfig: resolveApp('tsconfig.json'),
|
||||
appJsConfig: resolveApp('jsconfig.json'),
|
||||
yarnLockFile: resolveApp('yarn.lock'),
|
||||
testsSetup: resolveModule(resolveApp, 'src/setupTests'),
|
||||
proxySetup: resolveApp('src/setupProxy.js'),
|
||||
appNodeModules: resolveApp('node_modules'),
|
||||
publicUrl: getPublicUrl(resolveApp('package.json')),
|
||||
servedPath: getServedPath(resolveApp('package.json')),
|
||||
};
|
||||
|
||||
|
||||
|
||||
module.exports.moduleFileExtensions = moduleFileExtensions;
|
|
@ -0,0 +1,35 @@
|
|||
'use strict';
|
||||
|
||||
const { resolveModuleName } = require('ts-pnp');
|
||||
|
||||
exports.resolveModuleName = (
|
||||
typescript,
|
||||
moduleName,
|
||||
containingFile,
|
||||
compilerOptions,
|
||||
resolutionHost
|
||||
) => {
|
||||
return resolveModuleName(
|
||||
moduleName,
|
||||
containingFile,
|
||||
compilerOptions,
|
||||
resolutionHost,
|
||||
typescript.resolveModuleName
|
||||
);
|
||||
};
|
||||
|
||||
exports.resolveTypeReferenceDirective = (
|
||||
typescript,
|
||||
moduleName,
|
||||
containingFile,
|
||||
compilerOptions,
|
||||
resolutionHost
|
||||
) => {
|
||||
return resolveModuleName(
|
||||
moduleName,
|
||||
containingFile,
|
||||
compilerOptions,
|
||||
resolutionHost,
|
||||
typescript.resolveTypeReferenceDirective
|
||||
);
|
||||
};
|
|
@ -0,0 +1,676 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const webpack = require('webpack');
|
||||
const resolve = require('resolve');
|
||||
const PnpWebpackPlugin = require('pnp-webpack-plugin');
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
||||
const InlineChunkHtmlPlugin = require('react-dev-utils/InlineChunkHtmlPlugin');
|
||||
const TerserPlugin = require('terser-webpack-plugin');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||
const safePostCssParser = require('postcss-safe-parser');
|
||||
const ManifestPlugin = require('webpack-manifest-plugin');
|
||||
const InterpolateHtmlPlugin = require('react-dev-utils/InterpolateHtmlPlugin');
|
||||
const WorkboxWebpackPlugin = require('workbox-webpack-plugin');
|
||||
const WatchMissingNodeModulesPlugin = require('react-dev-utils/WatchMissingNodeModulesPlugin');
|
||||
const ModuleScopePlugin = require('react-dev-utils/ModuleScopePlugin');
|
||||
const getCSSModuleLocalIdent = require('react-dev-utils/getCSSModuleLocalIdent');
|
||||
const paths = require('./paths');
|
||||
const modules = require('./modules');
|
||||
const getClientEnvironment = require('./env');
|
||||
const ModuleNotFoundPlugin = require('react-dev-utils/ModuleNotFoundPlugin');
|
||||
const ForkTsCheckerWebpackPlugin = require('react-dev-utils/ForkTsCheckerWebpackPlugin');
|
||||
const typescriptFormatter = require('react-dev-utils/typescriptFormatter');
|
||||
|
||||
const postcssNormalize = require('postcss-normalize');
|
||||
|
||||
const appPackageJson = require(paths.appPackageJson);
|
||||
|
||||
// Source maps are resource heavy and can cause out of memory issue for large source files.
|
||||
const shouldUseSourceMap = process.env.GENERATE_SOURCEMAP !== 'false';
|
||||
// Some apps do not need the benefits of saving a web request, so not inlining the chunk
|
||||
// makes for a smoother build process.
|
||||
const shouldInlineRuntimeChunk = process.env.INLINE_RUNTIME_CHUNK !== 'false';
|
||||
|
||||
const imageInlineSizeLimit = parseInt(
|
||||
process.env.IMAGE_INLINE_SIZE_LIMIT || '10000'
|
||||
);
|
||||
|
||||
// Check if TypeScript is setup
|
||||
const useTypeScript = fs.existsSync(paths.appTsConfig);
|
||||
|
||||
// style files regexes
|
||||
const cssRegex = /\.css$/;
|
||||
const cssModuleRegex = /\.module\.css$/;
|
||||
const sassRegex = /\.(scss|sass)$/;
|
||||
const sassModuleRegex = /\.module\.(scss|sass)$/;
|
||||
|
||||
// This is the production and development configuration.
|
||||
// It is focused on developer experience, fast rebuilds, and a minimal bundle.
|
||||
module.exports = function(webpackEnv) {
|
||||
const isEnvDevelopment = webpackEnv === 'development';
|
||||
const isEnvProduction = webpackEnv === 'production';
|
||||
|
||||
// Variable used for enabling profiling in Production
|
||||
// passed into alias object. Uses a flag if passed into the build command
|
||||
const isEnvProductionProfile =
|
||||
isEnvProduction && process.argv.includes('--profile');
|
||||
|
||||
// Webpack uses `publicPath` to determine where the app is being served from.
|
||||
// It requires a trailing slash, or the file assets will get an incorrect path.
|
||||
// In development, we always serve from the root. This makes config easier.
|
||||
const publicPath = isEnvProduction
|
||||
? paths.servedPath
|
||||
: isEnvDevelopment && '/';
|
||||
// Some apps do not use client-side routing with pushState.
|
||||
// For these, "homepage" can be set to "." to enable relative asset paths.
|
||||
const shouldUseRelativeAssetPaths = publicPath === './';
|
||||
|
||||
// `publicUrl` is just like `publicPath`, but we will provide it to our app
|
||||
// as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript.
|
||||
// Omit trailing slash as %PUBLIC_URL%/xyz looks better than %PUBLIC_URL%xyz.
|
||||
const publicUrl = isEnvProduction
|
||||
? publicPath.slice(0, -1)
|
||||
: isEnvDevelopment && '';
|
||||
// Get environment variables to inject into our app.
|
||||
const env = getClientEnvironment(publicUrl);
|
||||
|
||||
// common function to get style loaders
|
||||
const getStyleLoaders = (cssOptions, preProcessor) => {
|
||||
const loaders = [
|
||||
isEnvDevelopment && require.resolve('style-loader'),
|
||||
isEnvProduction && {
|
||||
loader: MiniCssExtractPlugin.loader,
|
||||
options: shouldUseRelativeAssetPaths ? { publicPath: '../../' } : {},
|
||||
},
|
||||
{
|
||||
loader: require.resolve('css-loader'),
|
||||
options: cssOptions,
|
||||
},
|
||||
{
|
||||
// Options for PostCSS as we reference these options twice
|
||||
// Adds vendor prefixing based on your specified browser support in
|
||||
// package.json
|
||||
loader: require.resolve('postcss-loader'),
|
||||
options: {
|
||||
// Necessary for external CSS imports to work
|
||||
// https://github.com/facebook/create-react-app/issues/2677
|
||||
ident: 'postcss',
|
||||
plugins: () => [
|
||||
require('postcss-flexbugs-fixes'),
|
||||
require('postcss-preset-env')({
|
||||
autoprefixer: {
|
||||
flexbox: 'no-2009',
|
||||
},
|
||||
stage: 3,
|
||||
}),
|
||||
// Adds PostCSS Normalize as the reset css with default options,
|
||||
// so that it honors browserslist config in package.json
|
||||
// which in turn let's users customize the target behavior as per their needs.
|
||||
postcssNormalize(),
|
||||
],
|
||||
sourceMap: isEnvProduction && shouldUseSourceMap,
|
||||
},
|
||||
},
|
||||
].filter(Boolean);
|
||||
if (preProcessor) {
|
||||
loaders.push(
|
||||
{
|
||||
loader: require.resolve('resolve-url-loader'),
|
||||
options: {
|
||||
sourceMap: isEnvProduction && shouldUseSourceMap,
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: require.resolve(preProcessor),
|
||||
options: {
|
||||
sourceMap: true,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
return loaders;
|
||||
};
|
||||
|
||||
return {
|
||||
mode: isEnvProduction ? 'production' : isEnvDevelopment && 'development',
|
||||
// Stop compilation early in production
|
||||
bail: isEnvProduction,
|
||||
devtool: isEnvProduction
|
||||
? shouldUseSourceMap
|
||||
? 'source-map'
|
||||
: false
|
||||
: isEnvDevelopment && 'cheap-module-source-map',
|
||||
// These are the "entry points" to our application.
|
||||
// This means they will be the "root" imports that are included in JS bundle.
|
||||
entry: [
|
||||
// Include an alternative client for WebpackDevServer. A client's job is to
|
||||
// connect to WebpackDevServer by a socket and get notified about changes.
|
||||
// When you save a file, the client will either apply hot updates (in case
|
||||
// of CSS changes), or refresh the page (in case of JS changes). When you
|
||||
// make a syntax error, this client will display a syntax error overlay.
|
||||
// Note: instead of the default WebpackDevServer client, we use a custom one
|
||||
// to bring better experience for Create React App users. You can replace
|
||||
// the line below with these two lines if you prefer the stock client:
|
||||
// require.resolve('webpack-dev-server/client') + '?/',
|
||||
// require.resolve('webpack/hot/dev-server'),
|
||||
isEnvDevelopment &&
|
||||
require.resolve('react-dev-utils/webpackHotDevClient'),
|
||||
// Finally, this is your app's code:
|
||||
paths.appIndexJs,
|
||||
// We include the app code last so that if there is a runtime error during
|
||||
// initialization, it doesn't blow up the WebpackDevServer client, and
|
||||
// changing JS code would still trigger a refresh.
|
||||
].filter(Boolean),
|
||||
output: {
|
||||
// The build folder.
|
||||
path: isEnvProduction ? paths.appBuild : undefined,
|
||||
// Add /* filename */ comments to generated require()s in the output.
|
||||
pathinfo: isEnvDevelopment,
|
||||
// There will be one main bundle, and one file per asynchronous chunk.
|
||||
// In development, it does not produce real files.
|
||||
filename: isEnvProduction
|
||||
? 'static/js/[name].js'
|
||||
: isEnvDevelopment && 'static/js/bundle.js',
|
||||
// TODO: remove this when upgrading to webpack 5
|
||||
futureEmitAssets: true,
|
||||
// There are also additional JS chunk files if you use code splitting.
|
||||
chunkFilename: isEnvProduction
|
||||
? 'static/js/[name].chunk.js'
|
||||
: isEnvDevelopment && 'static/js/[name].chunk.js',
|
||||
// We inferred the "public path" (such as / or /my-project) from homepage.
|
||||
// We use "/" in development.
|
||||
publicPath: publicPath,
|
||||
// Point sourcemap entries to original disk location (format as URL on Windows)
|
||||
devtoolModuleFilenameTemplate: isEnvProduction
|
||||
? info =>
|
||||
path
|
||||
.relative(paths.appSrc, info.absoluteResourcePath)
|
||||
.replace(/\\/g, '/')
|
||||
: isEnvDevelopment &&
|
||||
(info => path.resolve(info.absoluteResourcePath).replace(/\\/g, '/')),
|
||||
// Prevents conflicts when multiple Webpack runtimes (from different apps)
|
||||
// are used on the same page.
|
||||
jsonpFunction: `webpackJsonp${appPackageJson.name}`,
|
||||
// this defaults to 'window', but by setting it to 'this' then
|
||||
// module chunks which are built will work in web workers as well.
|
||||
globalObject: 'this',
|
||||
},
|
||||
optimization: {
|
||||
minimize: isEnvProduction,
|
||||
minimizer: [
|
||||
// This is only used in production mode
|
||||
new TerserPlugin({
|
||||
terserOptions: {
|
||||
parse: {
|
||||
// We want terser to parse ecma 8 code. However, we don't want it
|
||||
// to apply any minification steps that turns valid ecma 5 code
|
||||
// into invalid ecma 5 code. This is why the 'compress' and 'output'
|
||||
// sections only apply transformations that are ecma 5 safe
|
||||
// https://github.com/facebook/create-react-app/pull/4234
|
||||
ecma: 8,
|
||||
},
|
||||
compress: {
|
||||
ecma: 5,
|
||||
warnings: false,
|
||||
// Disabled because of an issue with Uglify breaking seemingly valid code:
|
||||
// https://github.com/facebook/create-react-app/issues/2376
|
||||
// Pending further investigation:
|
||||
// https://github.com/mishoo/UglifyJS2/issues/2011
|
||||
comparisons: false,
|
||||
// Disabled because of an issue with Terser breaking valid code:
|
||||
// https://github.com/facebook/create-react-app/issues/5250
|
||||
// Pending further investigation:
|
||||
// https://github.com/terser-js/terser/issues/120
|
||||
inline: 2,
|
||||
},
|
||||
mangle: {
|
||||
safari10: true,
|
||||
},
|
||||
// Added for profiling in devtools
|
||||
keep_classnames: isEnvProductionProfile,
|
||||
keep_fnames: isEnvProductionProfile,
|
||||
output: {
|
||||
ecma: 5,
|
||||
comments: false,
|
||||
// Turned on because emoji and regex is not minified properly using default
|
||||
// https://github.com/facebook/create-react-app/issues/2488
|
||||
ascii_only: true,
|
||||
},
|
||||
},
|
||||
sourceMap: shouldUseSourceMap,
|
||||
}),
|
||||
// This is only used in production mode
|
||||
new OptimizeCSSAssetsPlugin({
|
||||
cssProcessorOptions: {
|
||||
parser: safePostCssParser,
|
||||
map: shouldUseSourceMap
|
||||
? {
|
||||
// `inline: false` forces the sourcemap to be output into a
|
||||
// separate file
|
||||
inline: false,
|
||||
// `annotation: true` appends the sourceMappingURL to the end of
|
||||
// the css file, helping the browser find the sourcemap
|
||||
annotation: true,
|
||||
}
|
||||
: false,
|
||||
},
|
||||
cssProcessorPluginOptions: {
|
||||
preset: ['default', { minifyFontValues: { removeQuotes: false } }],
|
||||
},
|
||||
}),
|
||||
],
|
||||
// Automatically split vendor and commons
|
||||
// https://twitter.com/wSokra/status/969633336732905474
|
||||
// https://medium.com/webpack/webpack-4-code-splitting-chunk-graph-and-the-splitchunks-optimization-be739a861366
|
||||
splitChunks: {
|
||||
chunks: 'all',
|
||||
name: false,
|
||||
},
|
||||
// Keep the runtime chunk separated to enable long term caching
|
||||
// https://twitter.com/wSokra/status/969679223278505985
|
||||
// https://github.com/facebook/create-react-app/issues/5358
|
||||
runtimeChunk: {
|
||||
name: entrypoint => `runtime-${entrypoint.name}`,
|
||||
},
|
||||
},
|
||||
resolve: {
|
||||
// This allows you to set a fallback for where Webpack should look for modules.
|
||||
// We placed these paths second because we want `node_modules` to "win"
|
||||
// if there are any conflicts. This matches Node resolution mechanism.
|
||||
// https://github.com/facebook/create-react-app/issues/253
|
||||
modules: ['node_modules', paths.appNodeModules].concat(
|
||||
modules.additionalModulePaths || []
|
||||
),
|
||||
// These are the reasonable defaults supported by the Node ecosystem.
|
||||
// We also include JSX as a common component filename extension to support
|
||||
// some tools, although we do not recommend using it, see:
|
||||
// https://github.com/facebook/create-react-app/issues/290
|
||||
// `web` extension prefixes have been added for better support
|
||||
// for React Native Web.
|
||||
extensions: paths.moduleFileExtensions
|
||||
.map(ext => `.${ext}`)
|
||||
.filter(ext => useTypeScript || !ext.includes('ts')),
|
||||
alias: {
|
||||
// Support React Native Web
|
||||
// https://www.smashingmagazine.com/2016/08/a-glimpse-into-the-future-with-react-native-for-web/
|
||||
'react-native': 'react-native-web',
|
||||
// Allows for better profiling with ReactDevTools
|
||||
...(isEnvProductionProfile && {
|
||||
'react-dom$': 'react-dom/profiling',
|
||||
'scheduler/tracing': 'scheduler/tracing-profiling',
|
||||
}),
|
||||
...(modules.webpackAliases || {}),
|
||||
},
|
||||
plugins: [
|
||||
// Adds support for installing with Plug'n'Play, leading to faster installs and adding
|
||||
// guards against forgotten dependencies and such.
|
||||
PnpWebpackPlugin,
|
||||
// Prevents users from importing files from outside of src/ (or node_modules/).
|
||||
// This often causes confusion because we only process files within src/ with babel.
|
||||
// To fix this, we prevent you from importing files out of src/ -- if you'd like to,
|
||||
// please link the files into your node_modules/ and let module-resolution kick in.
|
||||
// Make sure your source files are compiled, as they will not be processed in any way.
|
||||
new ModuleScopePlugin(paths.appSrc, [paths.appPackageJson]),
|
||||
],
|
||||
},
|
||||
resolveLoader: {
|
||||
plugins: [
|
||||
// Also related to Plug'n'Play, but this time it tells Webpack to load its loaders
|
||||
// from the current package.
|
||||
PnpWebpackPlugin.moduleLoader(module),
|
||||
],
|
||||
},
|
||||
module: {
|
||||
strictExportPresence: true,
|
||||
rules: [
|
||||
// Disable require.ensure as it's not a standard language feature.
|
||||
{ parser: { requireEnsure: false } },
|
||||
|
||||
// First, run the linter.
|
||||
// It's important to do this before Babel processes the JS.
|
||||
// {
|
||||
// test: /\.(js|mjs|jsx|ts|tsx)$/,
|
||||
// enforce: 'pre',
|
||||
// use: [
|
||||
// {
|
||||
// options: {
|
||||
// cache: true,
|
||||
// formatter: require.resolve('react-dev-utils/eslintFormatter'),
|
||||
// eslintPath: require.resolve('eslint'),
|
||||
// resolvePluginsRelativeTo: __dirname,
|
||||
|
||||
// },
|
||||
// loader: require.resolve('eslint-loader'),
|
||||
// },
|
||||
// ],
|
||||
// include: paths.appSrc,
|
||||
// },
|
||||
{
|
||||
// "oneOf" will traverse all following loaders until one will
|
||||
// match the requirements. When no loader matches it will fall
|
||||
// back to the "file" loader at the end of the loader list.
|
||||
oneOf: [
|
||||
// "url" loader works like "file" loader except that it embeds assets
|
||||
// smaller than specified limit in bytes as data URLs to avoid requests.
|
||||
// A missing `test` is equivalent to a match.
|
||||
{
|
||||
test: [/\.bmp$/, /\.gif$/, /\.jpe?g$/, /\.png$/],
|
||||
loader: require.resolve('url-loader'),
|
||||
options: {
|
||||
limit: imageInlineSizeLimit,
|
||||
name: 'static/media/[name].[hash:8].[ext]',
|
||||
},
|
||||
},
|
||||
// Process application JS with Babel.
|
||||
// The preset includes JSX, Flow, TypeScript, and some ESnext features.
|
||||
{
|
||||
test: /\.(js|mjs|jsx|ts|tsx)$/,
|
||||
include: paths.appSrc,
|
||||
loader: require.resolve('babel-loader'),
|
||||
options: {
|
||||
customize: require.resolve(
|
||||
'babel-preset-react-app/webpack-overrides'
|
||||
),
|
||||
|
||||
plugins: [
|
||||
[
|
||||
require.resolve('babel-plugin-named-asset-import'),
|
||||
{
|
||||
loaderMap: {
|
||||
svg: {
|
||||
ReactComponent:
|
||||
'@svgr/webpack?-svgo,+titleProp,+ref![path]',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
// This is a feature of `babel-loader` for webpack (not Babel itself).
|
||||
// It enables caching results in ./node_modules/.cache/babel-loader/
|
||||
// directory for faster rebuilds.
|
||||
cacheDirectory: true,
|
||||
// See #6846 for context on why cacheCompression is disabled
|
||||
cacheCompression: false,
|
||||
compact: isEnvProduction,
|
||||
},
|
||||
},
|
||||
// Process any JS outside of the app with Babel.
|
||||
// Unlike the application JS, we only compile the standard ES features.
|
||||
{
|
||||
test: /\.(js|mjs)$/,
|
||||
exclude: /@babel(?:\/|\\{1,2})runtime/,
|
||||
loader: require.resolve('babel-loader'),
|
||||
options: {
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
compact: false,
|
||||
presets: [
|
||||
[
|
||||
require.resolve('babel-preset-react-app/dependencies'),
|
||||
{ helpers: true },
|
||||
],
|
||||
],
|
||||
cacheDirectory: true,
|
||||
// See #6846 for context on why cacheCompression is disabled
|
||||
cacheCompression: false,
|
||||
|
||||
// Babel sourcemaps are needed for debugging into node_modules
|
||||
// code. Without the options below, debuggers like VSCode
|
||||
// show incorrect code and set breakpoints on the wrong lines.
|
||||
sourceMaps: shouldUseSourceMap,
|
||||
inputSourceMap: shouldUseSourceMap,
|
||||
},
|
||||
},
|
||||
// "postcss" loader applies autoprefixer to our CSS.
|
||||
// "css" loader resolves paths in CSS and adds assets as dependencies.
|
||||
// "style" loader turns CSS into JS modules that inject <style> tags.
|
||||
// In production, we use MiniCSSExtractPlugin to extract that CSS
|
||||
// to a file, but in development "style" loader enables hot editing
|
||||
// of CSS.
|
||||
// By default we support CSS Modules with the extension .module.css
|
||||
{
|
||||
test: cssRegex,
|
||||
exclude: cssModuleRegex,
|
||||
use: getStyleLoaders({
|
||||
importLoaders: 1,
|
||||
sourceMap: isEnvProduction && shouldUseSourceMap,
|
||||
}),
|
||||
// Don't consider CSS imports dead code even if the
|
||||
// containing package claims to have no side effects.
|
||||
// Remove this when webpack adds a warning or an error for this.
|
||||
// See https://github.com/webpack/webpack/issues/6571
|
||||
sideEffects: true,
|
||||
},
|
||||
// Adds support for CSS Modules (https://github.com/css-modules/css-modules)
|
||||
// using the extension .module.css
|
||||
{
|
||||
test: cssModuleRegex,
|
||||
use: getStyleLoaders({
|
||||
importLoaders: 1,
|
||||
sourceMap: isEnvProduction && shouldUseSourceMap,
|
||||
modules: {
|
||||
getLocalIdent: getCSSModuleLocalIdent,
|
||||
},
|
||||
}),
|
||||
},
|
||||
// Opt-in support for SASS (using .scss or .sass extensions).
|
||||
// By default we support SASS Modules with the
|
||||
// extensions .module.scss or .module.sass
|
||||
{
|
||||
test: sassRegex,
|
||||
exclude: sassModuleRegex,
|
||||
use: getStyleLoaders(
|
||||
{
|
||||
importLoaders: 3,
|
||||
sourceMap: isEnvProduction && shouldUseSourceMap,
|
||||
},
|
||||
'sass-loader'
|
||||
),
|
||||
// Don't consider CSS imports dead code even if the
|
||||
// containing package claims to have no side effects.
|
||||
// Remove this when webpack adds a warning or an error for this.
|
||||
// See https://github.com/webpack/webpack/issues/6571
|
||||
sideEffects: true,
|
||||
},
|
||||
// Adds support for CSS Modules, but using SASS
|
||||
// using the extension .module.scss or .module.sass
|
||||
{
|
||||
test: sassModuleRegex,
|
||||
use: getStyleLoaders(
|
||||
{
|
||||
importLoaders: 3,
|
||||
sourceMap: isEnvProduction && shouldUseSourceMap,
|
||||
modules: {
|
||||
getLocalIdent: getCSSModuleLocalIdent,
|
||||
},
|
||||
},
|
||||
'sass-loader'
|
||||
),
|
||||
},
|
||||
// "file" loader makes sure those assets get served by WebpackDevServer.
|
||||
// When you `import` an asset, you get its (virtual) filename.
|
||||
// In production, they would get copied to the `build` folder.
|
||||
// This loader doesn't use a "test" so it will catch all modules
|
||||
// that fall through the other loaders.
|
||||
{
|
||||
loader: require.resolve('file-loader'),
|
||||
// Exclude `js` files to keep "css" loader working as it injects
|
||||
// its runtime that would otherwise be processed through "file" loader.
|
||||
// Also exclude `html` and `json` extensions so they get processed
|
||||
// by webpacks internal loaders.
|
||||
exclude: [/\.(js|mjs|jsx|ts|tsx)$/, /\.html$/, /\.json$/],
|
||||
options: {
|
||||
name: 'static/media/[name].[hash:8].[ext]',
|
||||
},
|
||||
},
|
||||
// ** STOP ** Are you adding a new loader?
|
||||
// Make sure to add the new loader(s) before the "file" loader.
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
plugins: [
|
||||
// Generates an `index.html` file with the <script> injected.
|
||||
new HtmlWebpackPlugin(
|
||||
Object.assign(
|
||||
{},
|
||||
{
|
||||
inject: true,
|
||||
template: paths.appHtml,
|
||||
},
|
||||
isEnvProduction
|
||||
? {
|
||||
minify: {
|
||||
removeComments: true,
|
||||
collapseWhitespace: true,
|
||||
removeRedundantAttributes: true,
|
||||
useShortDoctype: true,
|
||||
removeEmptyAttributes: true,
|
||||
removeStyleLinkTypeAttributes: true,
|
||||
keepClosingSlash: true,
|
||||
minifyJS: true,
|
||||
minifyCSS: true,
|
||||
minifyURLs: true,
|
||||
},
|
||||
}
|
||||
: undefined
|
||||
)
|
||||
),
|
||||
// Inlines the webpack runtime script. This script is too small to warrant
|
||||
// a network request.
|
||||
// https://github.com/facebook/create-react-app/issues/5358
|
||||
isEnvProduction &&
|
||||
shouldInlineRuntimeChunk &&
|
||||
new InlineChunkHtmlPlugin(HtmlWebpackPlugin, [/runtime-.+[.]js/]),
|
||||
// Makes some environment variables available in index.html.
|
||||
// The public URL is available as %PUBLIC_URL% in index.html, e.g.:
|
||||
// <link rel="icon" href="%PUBLIC_URL%/favicon.ico">
|
||||
// In production, it will be an empty string unless you specify "homepage"
|
||||
// in `package.json`, in which case it will be the pathname of that URL.
|
||||
// In development, this will be an empty string.
|
||||
new InterpolateHtmlPlugin(HtmlWebpackPlugin, env.raw),
|
||||
// This gives some necessary context to module not found errors, such as
|
||||
// the requesting resource.
|
||||
new ModuleNotFoundPlugin(paths.appPath),
|
||||
// Makes some environment variables available to the JS code, for example:
|
||||
// if (process.env.NODE_ENV === 'production') { ... }. See `./env.js`.
|
||||
// It is absolutely essential that NODE_ENV is set to production
|
||||
// during a production build.
|
||||
// Otherwise React will be compiled in the very slow development mode.
|
||||
new webpack.DefinePlugin(env.stringified),
|
||||
// This is necessary to emit hot updates (currently CSS only):
|
||||
isEnvDevelopment && new webpack.HotModuleReplacementPlugin(),
|
||||
// Watcher doesn't work well if you mistype casing in a path so we use
|
||||
// a plugin that prints an error when you attempt to do this.
|
||||
// See https://github.com/facebook/create-react-app/issues/240
|
||||
isEnvDevelopment && new CaseSensitivePathsPlugin(),
|
||||
// If you require a missing module and then `npm install` it, you still have
|
||||
// to restart the development server for Webpack to discover it. This plugin
|
||||
// makes the discovery automatic so you don't have to restart.
|
||||
// See https://github.com/facebook/create-react-app/issues/186
|
||||
isEnvDevelopment &&
|
||||
new WatchMissingNodeModulesPlugin(paths.appNodeModules),
|
||||
isEnvProduction &&
|
||||
new MiniCssExtractPlugin({
|
||||
// Options similar to the same options in webpackOptions.output
|
||||
// both options are optional
|
||||
filename: 'static/css/[name].css',
|
||||
chunkFilename: 'static/css/[name].chunk.css',
|
||||
}),
|
||||
// Generate an asset manifest file with the following content:
|
||||
// - "files" key: Mapping of all asset filenames to their corresponding
|
||||
// output file so that tools can pick it up without having to parse
|
||||
// `index.html`
|
||||
// - "entrypoints" key: Array of files which are included in `index.html`,
|
||||
// can be used to reconstruct the HTML if necessary
|
||||
new ManifestPlugin({
|
||||
fileName: 'asset-manifest.json',
|
||||
publicPath: publicPath,
|
||||
generate: (seed, files, entrypoints) => {
|
||||
const manifestFiles = files.reduce((manifest, file) => {
|
||||
manifest[file.name] = file.path;
|
||||
return manifest;
|
||||
}, seed);
|
||||
const entrypointFiles = entrypoints.main.filter(
|
||||
fileName => !fileName.endsWith('.map')
|
||||
);
|
||||
|
||||
return {
|
||||
files: manifestFiles,
|
||||
entrypoints: entrypointFiles,
|
||||
};
|
||||
},
|
||||
}),
|
||||
// Moment.js is an extremely popular library that bundles large locale files
|
||||
// by default due to how Webpack interprets its code. This is a practical
|
||||
// solution that requires the user to opt into importing specific locales.
|
||||
// https://github.com/jmblog/how-to-optimize-momentjs-with-webpack
|
||||
// You can remove this if you don't use Moment.js:
|
||||
new webpack.IgnorePlugin(/^\.\/locale$/, /moment$/),
|
||||
// Generate a service worker script that will precache, and keep up to date,
|
||||
// the HTML & assets that are part of the Webpack build.
|
||||
// isEnvProduction &&
|
||||
// new WorkboxWebpackPlugin.GenerateSW({
|
||||
// clientsClaim: true,
|
||||
// exclude: [/\.map$/, /asset-manifest\.json$/],
|
||||
// importWorkboxFrom: 'cdn',
|
||||
// navigateFallback: `${publicUrl}/index.html`,
|
||||
// navigateFallbackBlacklist: [
|
||||
// // Exclude URLs starting with /_, as they're likely an API call
|
||||
// new RegExp('^/_'),
|
||||
// // Exclude any URLs whose last part seems to be a file extension
|
||||
// // as they're likely a resource and not a SPA route.
|
||||
// // URLs containing a "?" character won't be blacklisted as they're likely
|
||||
// // a route with query params (e.g. auth callbacks).
|
||||
// new RegExp('/[^/?]+\\.[^/]+$'),
|
||||
// ],
|
||||
// }),
|
||||
// TypeScript type checking
|
||||
useTypeScript &&
|
||||
new ForkTsCheckerWebpackPlugin({
|
||||
typescript: resolve.sync('typescript', {
|
||||
basedir: paths.appNodeModules,
|
||||
}),
|
||||
async: isEnvDevelopment,
|
||||
useTypescriptIncrementalApi: true,
|
||||
checkSyntacticErrors: true,
|
||||
resolveModuleNameModule: process.versions.pnp
|
||||
? `${__dirname}/pnpTs.js`
|
||||
: undefined,
|
||||
resolveTypeReferenceDirectiveModule: process.versions.pnp
|
||||
? `${__dirname}/pnpTs.js`
|
||||
: undefined,
|
||||
tsconfig: paths.appTsConfig,
|
||||
reportFiles: [
|
||||
'**',
|
||||
'!**/__tests__/**',
|
||||
'!**/?(*.)(spec|test).*',
|
||||
'!**/src/setupProxy.*',
|
||||
'!**/src/setupTests.*',
|
||||
],
|
||||
silent: true,
|
||||
// The formatter is invoked directly in WebpackDevServerUtils during development
|
||||
formatter: isEnvProduction ? typescriptFormatter : undefined,
|
||||
}),
|
||||
].filter(Boolean),
|
||||
// Some libraries import Node modules but don't use them in the browser.
|
||||
// Tell Webpack to provide empty mocks for them so importing them works.
|
||||
node: {
|
||||
module: 'empty',
|
||||
dgram: 'empty',
|
||||
dns: 'mock',
|
||||
fs: 'empty',
|
||||
http2: 'empty',
|
||||
net: 'empty',
|
||||
tls: 'empty',
|
||||
child_process: 'empty',
|
||||
},
|
||||
// Turn off performance processing because we utilize
|
||||
// our own hints via the FileSizeReporter
|
||||
performance: false,
|
||||
};
|
||||
};
|
|
@ -0,0 +1,110 @@
|
|||
'use strict';
|
||||
|
||||
const errorOverlayMiddleware = require('react-dev-utils/errorOverlayMiddleware');
|
||||
const evalSourceMapMiddleware = require('react-dev-utils/evalSourceMapMiddleware');
|
||||
const noopServiceWorkerMiddleware = require('react-dev-utils/noopServiceWorkerMiddleware');
|
||||
const ignoredFiles = require('react-dev-utils/ignoredFiles');
|
||||
const paths = require('./paths');
|
||||
const fs = require('fs');
|
||||
|
||||
const protocol = process.env.HTTPS === 'true' ? 'https' : 'http';
|
||||
const host = process.env.HOST || '0.0.0.0';
|
||||
|
||||
module.exports = function(proxy, allowedHost) {
|
||||
return {
|
||||
// WebpackDevServer 2.4.3 introduced a security fix that prevents remote
|
||||
// websites from potentially accessing local content through DNS rebinding:
|
||||
// https://github.com/webpack/webpack-dev-server/issues/887
|
||||
// https://medium.com/webpack/webpack-dev-server-middleware-security-issues-1489d950874a
|
||||
// However, it made several existing use cases such as development in cloud
|
||||
// environment or subdomains in development significantly more complicated:
|
||||
// https://github.com/facebook/create-react-app/issues/2271
|
||||
// https://github.com/facebook/create-react-app/issues/2233
|
||||
// While we're investigating better solutions, for now we will take a
|
||||
// compromise. Since our WDS configuration only serves files in the `public`
|
||||
// folder we won't consider accessing them a vulnerability. However, if you
|
||||
// use the `proxy` feature, it gets more dangerous because it can expose
|
||||
// remote code execution vulnerabilities in backends like Django and Rails.
|
||||
// So we will disable the host check normally, but enable it if you have
|
||||
// specified the `proxy` setting. Finally, we let you override it if you
|
||||
// really know what you're doing with a special environment variable.
|
||||
disableHostCheck:
|
||||
!proxy || process.env.DANGEROUSLY_DISABLE_HOST_CHECK === 'true',
|
||||
// Enable gzip compression of generated files.
|
||||
compress: true,
|
||||
// Silence WebpackDevServer's own logs since they're generally not useful.
|
||||
// It will still show compile warnings and errors with this setting.
|
||||
clientLogLevel: 'none',
|
||||
// By default WebpackDevServer serves physical files from current directory
|
||||
// in addition to all the virtual build products that it serves from memory.
|
||||
// This is confusing because those files won’t automatically be available in
|
||||
// production build folder unless we copy them. However, copying the whole
|
||||
// project directory is dangerous because we may expose sensitive files.
|
||||
// Instead, we establish a convention that only files in `public` directory
|
||||
// get served. Our build script will copy `public` into the `build` folder.
|
||||
// In `index.html`, you can get URL of `public` folder with %PUBLIC_URL%:
|
||||
// <link rel="icon" href="%PUBLIC_URL%/favicon.ico">
|
||||
// In JavaScript code, you can access it with `process.env.PUBLIC_URL`.
|
||||
// Note that we only recommend to use `public` folder as an escape hatch
|
||||
// for files like `favicon.ico`, `manifest.json`, and libraries that are
|
||||
// for some reason broken when imported through Webpack. If you just want to
|
||||
// use an image, put it in `src` and `import` it from JavaScript instead.
|
||||
contentBase: paths.appPublic,
|
||||
// By default files from `contentBase` will not trigger a page reload.
|
||||
watchContentBase: true,
|
||||
// Enable hot reloading server. It will provide /sockjs-node/ endpoint
|
||||
// for the WebpackDevServer client so it can learn when the files were
|
||||
// updated. The WebpackDevServer client is included as an entry point
|
||||
// in the Webpack development configuration. Note that only changes
|
||||
// to CSS are currently hot reloaded. JS changes will refresh the browser.
|
||||
hot: true,
|
||||
// Use 'ws' instead of 'sockjs-node' on server since we're using native
|
||||
// websockets in `webpackHotDevClient`.
|
||||
transportMode: 'ws',
|
||||
// Prevent a WS client from getting injected as we're already including
|
||||
// `webpackHotDevClient`.
|
||||
injectClient: false,
|
||||
// It is important to tell WebpackDevServer to use the same "root" path
|
||||
// as we specified in the config. In development, we always serve from /.
|
||||
publicPath: '/',
|
||||
// WebpackDevServer is noisy by default so we emit custom message instead
|
||||
// by listening to the compiler events with `compiler.hooks[...].tap` calls above.
|
||||
quiet: true,
|
||||
// Reportedly, this avoids CPU overload on some systems.
|
||||
// https://github.com/facebook/create-react-app/issues/293
|
||||
// src/node_modules is not ignored to support absolute imports
|
||||
// https://github.com/facebook/create-react-app/issues/1065
|
||||
watchOptions: {
|
||||
ignored: ignoredFiles(paths.appSrc),
|
||||
},
|
||||
// Enable HTTPS if the HTTPS environment variable is set to 'true'
|
||||
https: protocol === 'https',
|
||||
host,
|
||||
overlay: false,
|
||||
historyApiFallback: {
|
||||
// Paths with dots should still use the history fallback.
|
||||
// See https://github.com/facebook/create-react-app/issues/387.
|
||||
disableDotRule: true,
|
||||
},
|
||||
public: allowedHost,
|
||||
proxy,
|
||||
before(app, server) {
|
||||
if (fs.existsSync(paths.proxySetup)) {
|
||||
// This registers user provided middleware for proxy reasons
|
||||
require(paths.proxySetup)(app);
|
||||
}
|
||||
|
||||
// This lets us fetch source contents from webpack for the error overlay
|
||||
app.use(evalSourceMapMiddleware(server));
|
||||
// This lets us open files from the runtime error overlay.
|
||||
app.use(errorOverlayMiddleware());
|
||||
|
||||
// This service worker file is effectively a 'no-op' that will reset any
|
||||
// previous service worker registered for the same host:port combination.
|
||||
// We do this in development to avoid hitting the production cache if
|
||||
// it used the same host and port.
|
||||
// https://github.com/facebook/create-react-app/issues/2272#issuecomment-302832432
|
||||
app.use(noopServiceWorkerMiddleware());
|
||||
},
|
||||
};
|
||||
};
|
File diff suppressed because it is too large
Load Diff
|
@ -4,19 +4,61 @@
|
|||
"private": true,
|
||||
"homepage": ".",
|
||||
"dependencies": {
|
||||
"@babel/core": "7.8.4",
|
||||
"@svgr/webpack": "4.3.3",
|
||||
"@typescript-eslint/eslint-plugin": "^2.10.0",
|
||||
"@typescript-eslint/parser": "^2.10.0",
|
||||
"babel-eslint": "10.0.3",
|
||||
"babel-jest": "^24.9.0",
|
||||
"babel-loader": "8.0.6",
|
||||
"babel-plugin-named-asset-import": "^0.3.6",
|
||||
"babel-preset-react-app": "^9.1.1",
|
||||
"camelcase": "^5.3.1",
|
||||
"case-sensitive-paths-webpack-plugin": "2.3.0",
|
||||
"css-loader": "3.4.2",
|
||||
"dotenv": "8.2.0",
|
||||
"dotenv-expand": "5.1.0",
|
||||
"file-loader": "4.3.0",
|
||||
"fs-extra": "^8.1.0",
|
||||
"html-webpack-plugin": "4.0.0-beta.11",
|
||||
"identity-obj-proxy": "3.0.0",
|
||||
"jest": "24.9.0",
|
||||
"jest-environment-jsdom-fourteen": "1.0.1",
|
||||
"jest-resolve": "24.9.0",
|
||||
"jest-watch-typeahead": "0.4.2",
|
||||
"mini-css-extract-plugin": "0.9.0",
|
||||
"optimize-css-assets-webpack-plugin": "5.0.3",
|
||||
"pnp-webpack-plugin": "1.6.0",
|
||||
"postcss-flexbugs-fixes": "4.1.0",
|
||||
"postcss-loader": "3.0.0",
|
||||
"postcss-normalize": "8.0.1",
|
||||
"postcss-preset-env": "6.7.0",
|
||||
"postcss-safe-parser": "4.0.1",
|
||||
"react": "^16.12.0",
|
||||
"react-app-polyfill": "^1.0.6",
|
||||
"react-dev-utils": "^10.1.0",
|
||||
"react-dom": "^16.12.0",
|
||||
"react-redux": "^5.0.7",
|
||||
"react-scripts": "3.3.1",
|
||||
"redux": "^4.0.0"
|
||||
"redux": "^4.0.0",
|
||||
"resolve": "1.15.0",
|
||||
"resolve-url-loader": "3.1.1",
|
||||
"sass-loader": "8.0.2",
|
||||
"semver": "6.3.0",
|
||||
"style-loader": "1.1.3",
|
||||
"terser-webpack-plugin": "2.3.4",
|
||||
"ts-pnp": "1.1.5",
|
||||
"url-loader": "2.3.0",
|
||||
"webpack": "4.41.5",
|
||||
"webpack-dev-server": "3.10.1",
|
||||
"webpack-manifest-plugin": "2.2.0",
|
||||
"workbox-webpack-plugin": "4.3.1"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "react-scripts start",
|
||||
"build": "SKIP_PREFLIGHT_CHECK=true react-scripts build",
|
||||
"test": "react-scripts test --env=jsdom",
|
||||
"eject": "react-scripts eject",
|
||||
"start": "node scripts/start.js",
|
||||
"build": "SKIP_PREFLIGHT_CHECK=true node scripts/build.js",
|
||||
"test": "node scripts/test.js --env=jsdom",
|
||||
"watch": "cra-build-watch",
|
||||
"postinstall": "node scripts/postinstall.js && npm run build"
|
||||
"postinstall": "node postinstall.js && npm run build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cra-build-watch": "^3.2.0",
|
||||
|
@ -27,5 +69,58 @@
|
|||
"not dead",
|
||||
"not ie <= 11",
|
||||
"not op_mini all"
|
||||
]
|
||||
],
|
||||
"jest": {
|
||||
"roots": [
|
||||
"<rootDir>/src"
|
||||
],
|
||||
"collectCoverageFrom": [
|
||||
"src/**/*.{js,jsx,ts,tsx}",
|
||||
"!src/**/*.d.ts"
|
||||
],
|
||||
"setupFiles": [
|
||||
"react-app-polyfill/jsdom"
|
||||
],
|
||||
"setupFilesAfterEnv": [],
|
||||
"testMatch": [
|
||||
"<rootDir>/src/**/__tests__/**/*.{js,jsx,ts,tsx}",
|
||||
"<rootDir>/src/**/*.{spec,test}.{js,jsx,ts,tsx}"
|
||||
],
|
||||
"testEnvironment": "jest-environment-jsdom-fourteen",
|
||||
"transform": {
|
||||
"^.+\\.(js|jsx|ts|tsx)$": "<rootDir>/node_modules/babel-jest",
|
||||
"^.+\\.css$": "<rootDir>/config/jest/cssTransform.js",
|
||||
"^(?!.*\\.(js|jsx|ts|tsx|css|json)$)": "<rootDir>/config/jest/fileTransform.js"
|
||||
},
|
||||
"transformIgnorePatterns": [
|
||||
"[/\\\\]node_modules[/\\\\].+\\.(js|jsx|ts|tsx)$",
|
||||
"^.+\\.module\\.(css|sass|scss)$"
|
||||
],
|
||||
"modulePaths": [],
|
||||
"moduleNameMapper": {
|
||||
"^react-native$": "react-native-web",
|
||||
"^.+\\.module\\.(css|sass|scss)$": "identity-obj-proxy"
|
||||
},
|
||||
"moduleFileExtensions": [
|
||||
"web.js",
|
||||
"js",
|
||||
"web.ts",
|
||||
"ts",
|
||||
"web.tsx",
|
||||
"tsx",
|
||||
"json",
|
||||
"web.jsx",
|
||||
"jsx",
|
||||
"node"
|
||||
],
|
||||
"watchPlugins": [
|
||||
"jest-watch-typeahead/filename",
|
||||
"jest-watch-typeahead/testname"
|
||||
]
|
||||
},
|
||||
"babel": {
|
||||
"presets": [
|
||||
"react-app"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,211 @@
|
|||
'use strict';
|
||||
|
||||
// Do this as the first thing so that any code reading it knows the right env.
|
||||
process.env.BABEL_ENV = 'production';
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
// Makes the script crash on unhandled rejections instead of silently
|
||||
// ignoring them. In the future, promise rejections that are not handled will
|
||||
// terminate the Node.js process with a non-zero exit code.
|
||||
process.on('unhandledRejection', err => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Ensure environment variables are read.
|
||||
require('../config/env');
|
||||
|
||||
|
||||
const path = require('path');
|
||||
const chalk = require('react-dev-utils/chalk');
|
||||
const fs = require('fs-extra');
|
||||
const webpack = require('webpack');
|
||||
const configFactory = require('../config/webpack.config');
|
||||
const paths = require('../config/paths');
|
||||
const checkRequiredFiles = require('react-dev-utils/checkRequiredFiles');
|
||||
const formatWebpackMessages = require('react-dev-utils/formatWebpackMessages');
|
||||
const printHostingInstructions = require('react-dev-utils/printHostingInstructions');
|
||||
const FileSizeReporter = require('react-dev-utils/FileSizeReporter');
|
||||
const printBuildError = require('react-dev-utils/printBuildError');
|
||||
|
||||
const measureFileSizesBeforeBuild =
|
||||
FileSizeReporter.measureFileSizesBeforeBuild;
|
||||
const printFileSizesAfterBuild = FileSizeReporter.printFileSizesAfterBuild;
|
||||
const useYarn = fs.existsSync(paths.yarnLockFile);
|
||||
|
||||
// These sizes are pretty large. We'll warn for bundles exceeding them.
|
||||
const WARN_AFTER_BUNDLE_GZIP_SIZE = 512 * 1024;
|
||||
const WARN_AFTER_CHUNK_GZIP_SIZE = 1024 * 1024;
|
||||
|
||||
const isInteractive = process.stdout.isTTY;
|
||||
|
||||
// Warn and crash if required files are missing
|
||||
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Generate configuration
|
||||
const config = configFactory('production');
|
||||
|
||||
// We require that you explicitly set browsers and do not fall back to
|
||||
// browserslist defaults.
|
||||
const { checkBrowsers } = require('react-dev-utils/browsersHelper');
|
||||
checkBrowsers(paths.appPath, isInteractive)
|
||||
.then(() => {
|
||||
// First, read the current file sizes in build directory.
|
||||
// This lets us display how much they changed later.
|
||||
return measureFileSizesBeforeBuild(paths.appBuild);
|
||||
})
|
||||
.then(previousFileSizes => {
|
||||
// Remove all content but keep the directory so that
|
||||
// if you're in it, you don't end up in Trash
|
||||
fs.emptyDirSync(paths.appBuild);
|
||||
// Merge with the public folder
|
||||
copyPublicFolder();
|
||||
// Start the webpack build
|
||||
return build(previousFileSizes);
|
||||
})
|
||||
.then(
|
||||
({ stats, previousFileSizes, warnings }) => {
|
||||
if (warnings.length) {
|
||||
console.log(chalk.yellow('Compiled with warnings.\n'));
|
||||
console.log(warnings.join('\n\n'));
|
||||
console.log(
|
||||
`\nSearch for the ${
|
||||
chalk.underline(chalk.yellow('keywords'))
|
||||
} to learn more about each warning.`
|
||||
);
|
||||
console.log(
|
||||
`To ignore, add ${
|
||||
chalk.cyan('// eslint-disable-next-line')
|
||||
} to the line before.\n`
|
||||
);
|
||||
} else {
|
||||
console.log(chalk.green('Compiled successfully.\n'));
|
||||
}
|
||||
|
||||
console.log('File sizes after gzip:\n');
|
||||
printFileSizesAfterBuild(
|
||||
stats,
|
||||
previousFileSizes,
|
||||
paths.appBuild,
|
||||
WARN_AFTER_BUNDLE_GZIP_SIZE,
|
||||
WARN_AFTER_CHUNK_GZIP_SIZE
|
||||
);
|
||||
console.log();
|
||||
|
||||
const appPackage = require(paths.appPackageJson);
|
||||
const publicUrl = paths.publicUrl;
|
||||
const publicPath = config.output.publicPath;
|
||||
const buildFolder = path.relative(process.cwd(), paths.appBuild);
|
||||
printHostingInstructions(
|
||||
appPackage,
|
||||
publicUrl,
|
||||
publicPath,
|
||||
buildFolder,
|
||||
useYarn
|
||||
);
|
||||
},
|
||||
err => {
|
||||
const tscCompileOnError = process.env.TSC_COMPILE_ON_ERROR === 'true';
|
||||
if (tscCompileOnError) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
'Compiled with the following type errors (you may want to check these before deploying your app):\n'
|
||||
)
|
||||
);
|
||||
printBuildError(err);
|
||||
} else {
|
||||
console.log(chalk.red('Failed to compile.\n'));
|
||||
printBuildError(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
)
|
||||
.catch(err => {
|
||||
if (err && err.message) {
|
||||
console.log(err.message);
|
||||
}
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Create the production build and print the deployment instructions.
|
||||
function build(previousFileSizes) {
|
||||
// We used to support resolving modules according to `NODE_PATH`.
|
||||
// This now has been deprecated in favor of jsconfig/tsconfig.json
|
||||
// This lets you use absolute paths in imports inside large monorepos:
|
||||
if (process.env.NODE_PATH) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
'Setting NODE_PATH to resolve modules absolutely has been deprecated in favor of setting baseUrl in jsconfig.json (or tsconfig.json if you are using TypeScript) and will be removed in a future major release of create-react-app.'
|
||||
)
|
||||
);
|
||||
console.log();
|
||||
}
|
||||
|
||||
console.log('Creating an optimized production build...');
|
||||
|
||||
const compiler = webpack(config);
|
||||
return new Promise((resolve, reject) => {
|
||||
compiler.run((err, stats) => {
|
||||
let messages;
|
||||
if (err) {
|
||||
if (!err.message) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
let errMessage = err.message;
|
||||
|
||||
// Add additional information for postcss errors
|
||||
if (Object.prototype.hasOwnProperty.call(err, 'postcssNode')) {
|
||||
errMessage +=
|
||||
`\nCompileError: Begins at CSS selector ${
|
||||
err['postcssNode'].selector}`;
|
||||
}
|
||||
|
||||
messages = formatWebpackMessages({
|
||||
errors: [errMessage],
|
||||
warnings: [],
|
||||
});
|
||||
} else {
|
||||
messages = formatWebpackMessages(
|
||||
stats.toJson({ all: false, warnings: true, errors: true })
|
||||
);
|
||||
}
|
||||
if (messages.errors.length) {
|
||||
// Only keep the first error. Others are often indicative
|
||||
// of the same problem, but confuse the reader with noise.
|
||||
if (messages.errors.length > 1) {
|
||||
messages.errors.length = 1;
|
||||
}
|
||||
return reject(new Error(messages.errors.join('\n\n')));
|
||||
}
|
||||
if (
|
||||
process.env.CI &&
|
||||
(typeof process.env.CI !== 'string' ||
|
||||
process.env.CI.toLowerCase() !== 'false') &&
|
||||
messages.warnings.length
|
||||
) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
'\nTreating warnings as errors because process.env.CI = true.\n' +
|
||||
'Most CI servers set it automatically.\n'
|
||||
)
|
||||
);
|
||||
return reject(new Error(messages.warnings.join('\n\n')));
|
||||
}
|
||||
|
||||
return resolve({
|
||||
stats,
|
||||
previousFileSizes,
|
||||
warnings: messages.warnings,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function copyPublicFolder() {
|
||||
fs.copySync(paths.appPublic, paths.appBuild, {
|
||||
dereference: true,
|
||||
filter: file => file !== paths.appHtml,
|
||||
});
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
'use strict';
|
||||
|
||||
// Do this as the first thing so that any code reading it knows the right env.
|
||||
process.env.BABEL_ENV = 'development';
|
||||
process.env.NODE_ENV = 'development';
|
||||
|
||||
// Makes the script crash on unhandled rejections instead of silently
|
||||
// ignoring them. In the future, promise rejections that are not handled will
|
||||
// terminate the Node.js process with a non-zero exit code.
|
||||
process.on('unhandledRejection', err => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Ensure environment variables are read.
|
||||
require('../config/env');
|
||||
|
||||
|
||||
const fs = require('fs');
|
||||
const chalk = require('react-dev-utils/chalk');
|
||||
const webpack = require('webpack');
|
||||
const WebpackDevServer = require('webpack-dev-server');
|
||||
const clearConsole = require('react-dev-utils/clearConsole');
|
||||
const checkRequiredFiles = require('react-dev-utils/checkRequiredFiles');
|
||||
const {
|
||||
choosePort,
|
||||
createCompiler,
|
||||
prepareProxy,
|
||||
prepareUrls,
|
||||
} = require('react-dev-utils/WebpackDevServerUtils');
|
||||
const openBrowser = require('react-dev-utils/openBrowser');
|
||||
const paths = require('../config/paths');
|
||||
const configFactory = require('../config/webpack.config');
|
||||
const createDevServerConfig = require('../config/webpackDevServer.config');
|
||||
|
||||
const useYarn = fs.existsSync(paths.yarnLockFile);
|
||||
const isInteractive = process.stdout.isTTY;
|
||||
|
||||
// Warn and crash if required files are missing
|
||||
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Tools like Cloud9 rely on this.
|
||||
const DEFAULT_PORT = parseInt(process.env.PORT, 10) || 3000;
|
||||
const HOST = process.env.HOST || '0.0.0.0';
|
||||
|
||||
if (process.env.HOST) {
|
||||
console.log(
|
||||
chalk.cyan(
|
||||
`Attempting to bind to HOST environment variable: ${chalk.yellow(
|
||||
chalk.bold(process.env.HOST)
|
||||
)}`
|
||||
)
|
||||
);
|
||||
console.log(
|
||||
'If this was unintentional, check that you haven\'t mistakenly set it in your shell.'
|
||||
);
|
||||
console.log(
|
||||
`Learn more here: ${chalk.yellow('https://bit.ly/CRA-advanced-config')}`
|
||||
);
|
||||
console.log();
|
||||
}
|
||||
|
||||
// We require that you explicitly set browsers and do not fall back to
|
||||
// browserslist defaults.
|
||||
const { checkBrowsers } = require('react-dev-utils/browsersHelper');
|
||||
checkBrowsers(paths.appPath, isInteractive)
|
||||
.then(() => {
|
||||
// We attempt to use the default port but if it is busy, we offer the user to
|
||||
// run on a different port. `choosePort()` Promise resolves to the next free port.
|
||||
return choosePort(HOST, DEFAULT_PORT);
|
||||
})
|
||||
.then(port => {
|
||||
if (port == null) {
|
||||
// We have not found a port.
|
||||
return;
|
||||
}
|
||||
const config = configFactory('development');
|
||||
const protocol = process.env.HTTPS === 'true' ? 'https' : 'http';
|
||||
const appName = require(paths.appPackageJson).name;
|
||||
const useTypeScript = fs.existsSync(paths.appTsConfig);
|
||||
const tscCompileOnError = process.env.TSC_COMPILE_ON_ERROR === 'true';
|
||||
const urls = prepareUrls(protocol, HOST, port);
|
||||
const devSocket = {
|
||||
warnings: warnings =>
|
||||
devServer.sockWrite(devServer.sockets, 'warnings', warnings),
|
||||
errors: errors =>
|
||||
devServer.sockWrite(devServer.sockets, 'errors', errors),
|
||||
};
|
||||
// Create a webpack compiler that is configured with custom messages.
|
||||
const compiler = createCompiler({
|
||||
appName,
|
||||
config,
|
||||
devSocket,
|
||||
urls,
|
||||
useYarn,
|
||||
useTypeScript,
|
||||
tscCompileOnError,
|
||||
webpack,
|
||||
});
|
||||
// Load proxy config
|
||||
const proxySetting = require(paths.appPackageJson).proxy;
|
||||
const proxyConfig = prepareProxy(proxySetting, paths.appPublic);
|
||||
// Serve webpack assets generated by the compiler over a web server.
|
||||
const serverConfig = createDevServerConfig(
|
||||
proxyConfig,
|
||||
urls.lanUrlForConfig
|
||||
);
|
||||
const devServer = new WebpackDevServer(compiler, serverConfig);
|
||||
// Launch WebpackDevServer.
|
||||
devServer.listen(port, HOST, err => {
|
||||
if (err) {
|
||||
return console.log(err);
|
||||
}
|
||||
if (isInteractive) {
|
||||
clearConsole();
|
||||
}
|
||||
|
||||
// We used to support resolving modules according to `NODE_PATH`.
|
||||
// This now has been deprecated in favor of jsconfig/tsconfig.json
|
||||
// This lets you use absolute paths in imports inside large monorepos:
|
||||
if (process.env.NODE_PATH) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
'Setting NODE_PATH to resolve modules absolutely has been deprecated in favor of setting baseUrl in jsconfig.json (or tsconfig.json if you are using TypeScript) and will be removed in a future major release of create-react-app.'
|
||||
)
|
||||
);
|
||||
console.log();
|
||||
}
|
||||
|
||||
console.log(chalk.cyan('Starting the development server...\n'));
|
||||
openBrowser(urls.localUrlForBrowser);
|
||||
});
|
||||
|
||||
['SIGINT', 'SIGTERM'].forEach(function(sig) {
|
||||
process.on(sig, function() {
|
||||
devServer.close();
|
||||
process.exit();
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
if (err && err.message) {
|
||||
console.log(err.message);
|
||||
}
|
||||
process.exit(1);
|
||||
});
|
|
@ -0,0 +1,53 @@
|
|||
'use strict';
|
||||
|
||||
// Do this as the first thing so that any code reading it knows the right env.
|
||||
process.env.BABEL_ENV = 'test';
|
||||
process.env.NODE_ENV = 'test';
|
||||
process.env.PUBLIC_URL = '';
|
||||
|
||||
// Makes the script crash on unhandled rejections instead of silently
|
||||
// ignoring them. In the future, promise rejections that are not handled will
|
||||
// terminate the Node.js process with a non-zero exit code.
|
||||
process.on('unhandledRejection', err => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Ensure environment variables are read.
|
||||
require('../config/env');
|
||||
|
||||
|
||||
const jest = require('jest');
|
||||
const execSync = require('child_process').execSync;
|
||||
const argv = process.argv.slice(2);
|
||||
|
||||
function isInGitRepository() {
|
||||
try {
|
||||
execSync('git rev-parse --is-inside-work-tree', { stdio: 'ignore' });
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function isInMercurialRepository() {
|
||||
try {
|
||||
execSync('hg --cwd . root', { stdio: 'ignore' });
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Watch unless on CI or explicitly running all tests
|
||||
if (
|
||||
!process.env.CI &&
|
||||
argv.indexOf('--watchAll') === -1 &&
|
||||
argv.indexOf('--watchAll=false') === -1
|
||||
) {
|
||||
// https://github.com/facebook/create-react-app/issues/5210
|
||||
const hasSourceControl = isInGitRepository() || isInMercurialRepository();
|
||||
argv.push(hasSourceControl ? '--watch' : '--watchAll');
|
||||
}
|
||||
|
||||
|
||||
jest.run(argv);
|
|
@ -45,8 +45,8 @@ class ElectronAppWrapper {
|
|||
|
||||
|
||||
const stateOptions = {
|
||||
defaultWidth: Math.round(0.8*screen.getPrimaryDisplay().workArea.width),
|
||||
defaultHeight: Math.round(0.8*screen.getPrimaryDisplay().workArea.height),
|
||||
defaultWidth: Math.round(0.8 * screen.getPrimaryDisplay().workArea.width),
|
||||
defaultHeight: Math.round(0.8 * screen.getPrimaryDisplay().workArea.height),
|
||||
file: `window-state-${this.env_}.json`,
|
||||
};
|
||||
|
||||
|
@ -89,7 +89,7 @@ class ElectronAppWrapper {
|
|||
if (!screen.getDisplayMatching(this.win_.getBounds())) {
|
||||
const { width: windowWidth, height: windowHeight } = this.win_.getBounds();
|
||||
const { width: primaryDisplayWidth, height: primaryDisplayHeight } = screen.getPrimaryDisplay().workArea;
|
||||
this.win_.setPosition(primaryDisplayWidth/2 - windowWidth, primaryDisplayHeight/2 - windowHeight);
|
||||
this.win_.setPosition(primaryDisplayWidth / 2 - windowWidth, primaryDisplayHeight / 2 - windowHeight);
|
||||
}
|
||||
|
||||
this.win_.loadURL(url.format({
|
||||
|
|
|
@ -23,7 +23,7 @@ const ResourceService = require('lib/services/ResourceService');
|
|||
const ClipperServer = require('lib/ClipperServer');
|
||||
const ExternalEditWatcher = require('lib/services/ExternalEditWatcher');
|
||||
const { bridge } = require('electron').remote.require('./bridge');
|
||||
const { shell, webFrame } = require('electron');
|
||||
const { shell, webFrame, clipboard } = require('electron');
|
||||
const Menu = bridge().Menu;
|
||||
const PluginManager = require('lib/services/PluginManager');
|
||||
const RevisionService = require('lib/services/RevisionService');
|
||||
|
@ -85,7 +85,7 @@ class Application extends BaseApplication {
|
|||
const currentRoute = state.route;
|
||||
|
||||
newState = Object.assign({}, state);
|
||||
let newNavHistory = state.navHistory.slice();
|
||||
const newNavHistory = state.navHistory.slice();
|
||||
|
||||
if (goingBack) {
|
||||
let newAction = null;
|
||||
|
@ -115,7 +115,7 @@ class Application extends BaseApplication {
|
|||
|
||||
{
|
||||
newState = Object.assign({}, state);
|
||||
let command = Object.assign({}, action);
|
||||
const command = Object.assign({}, action);
|
||||
delete command.type;
|
||||
newState.windowCommand = command.name ? command : null;
|
||||
}
|
||||
|
@ -143,13 +143,13 @@ class Application extends BaseApplication {
|
|||
const currentLayoutIndex = paneOptions.indexOf(currentLayout);
|
||||
const nextLayoutIndex = currentLayoutIndex === paneOptions.length - 1 ? 0 : currentLayoutIndex + 1;
|
||||
|
||||
let nextLayout = paneOptions[nextLayoutIndex];
|
||||
const nextLayout = paneOptions[nextLayoutIndex];
|
||||
return nextLayout === 'both' ? ['editor', 'viewer'] : [nextLayout];
|
||||
};
|
||||
|
||||
newState = Object.assign({}, state);
|
||||
|
||||
let panes = state.noteVisiblePanes.slice();
|
||||
const panes = state.noteVisiblePanes.slice();
|
||||
newState.noteVisiblePanes = getNextLayout(panes);
|
||||
}
|
||||
break;
|
||||
|
@ -328,7 +328,7 @@ class Application extends BaseApplication {
|
|||
const sortNoteFolderItems = (type) => {
|
||||
const sortItems = [];
|
||||
const sortOptions = Setting.enumOptions(`${type}.sortOrder.field`);
|
||||
for (let field in sortOptions) {
|
||||
for (const field in sortOptions) {
|
||||
if (!sortOptions.hasOwnProperty(field)) continue;
|
||||
sortItems.push({
|
||||
label: sortOptions[field],
|
||||
|
@ -650,7 +650,7 @@ class Application extends BaseApplication {
|
|||
gitInfo = _('Revision: %s (%s)', p.git.hash, p.git.branch);
|
||||
}
|
||||
const copyrightText = 'Copyright © 2016-YYYY Laurent Cozic';
|
||||
let message = [
|
||||
const message = [
|
||||
p.description,
|
||||
'',
|
||||
copyrightText.replace('YYYY', new Date().getFullYear()),
|
||||
|
@ -664,9 +664,17 @@ class Application extends BaseApplication {
|
|||
message.push(`\n${gitInfo}`);
|
||||
console.info(gitInfo);
|
||||
}
|
||||
bridge().showInfoMessageBox(message.join('\n'), {
|
||||
const text = message.join('\n');
|
||||
|
||||
const copyToClipboard = bridge().showMessageBox(text, {
|
||||
icon: `${bridge().electronApp().buildDir()}/icons/128x128.png`,
|
||||
buttons: [_('Copy'), _('OK')],
|
||||
cancelId: 1,
|
||||
defaultId: 1,
|
||||
});
|
||||
if (copyToClipboard === 0) {
|
||||
clipboard.writeText(message.splice(3).join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
const rootMenuFile = {
|
||||
|
@ -965,7 +973,6 @@ class Application extends BaseApplication {
|
|||
});
|
||||
},
|
||||
}, {
|
||||
id: 'view:toggleLayout',
|
||||
label: _('Toggle editor layout'),
|
||||
screens: ['Main'],
|
||||
accelerator: 'CommandOrControl+L',
|
||||
|
@ -1112,7 +1119,7 @@ class Application extends BaseApplication {
|
|||
|
||||
const pluginMenuItems = PluginManager.instance().menuItems();
|
||||
for (const item of pluginMenuItems) {
|
||||
let itemParent = rootMenus[item.parent] ? rootMenus[item.parent] : 'tools';
|
||||
const itemParent = rootMenus[item.parent] ? rootMenus[item.parent] : 'tools';
|
||||
itemParent.submenu.push(item);
|
||||
}
|
||||
|
||||
|
@ -1148,7 +1155,7 @@ class Application extends BaseApplication {
|
|||
}
|
||||
|
||||
// Remove empty separator for now empty sections
|
||||
let temp = [];
|
||||
const temp = [];
|
||||
let previous = null;
|
||||
for (let i = 0; i < output.length; i++) {
|
||||
const t = Object.assign({}, output[i]);
|
||||
|
@ -1164,7 +1171,7 @@ class Application extends BaseApplication {
|
|||
return output;
|
||||
}
|
||||
|
||||
let screenTemplate = removeUnwantedItems(template, screen);
|
||||
const screenTemplate = removeUnwantedItems(template, screen);
|
||||
|
||||
const menu = Menu.buildFromTemplate(screenTemplate);
|
||||
Menu.setApplicationMenu(menu);
|
||||
|
@ -1188,8 +1195,6 @@ class Application extends BaseApplication {
|
|||
menuItem.enabled = !isHtmlNote && layout !== 'viewer' && !!note;
|
||||
}
|
||||
|
||||
const toggleLayout = Menu.getApplicationMenu().getMenuItemById('view:toggleLayout');
|
||||
toggleLayout.enabled = !!note;
|
||||
const menuItem = Menu.getApplicationMenu().getMenuItemById('help:toggleDevTools');
|
||||
menuItem.checked = state.devToolsVisible;
|
||||
}
|
||||
|
|
|
@ -106,6 +106,19 @@ class Bridge {
|
|||
return result === 0;
|
||||
}
|
||||
|
||||
/* returns the index of the clicked button */
|
||||
showMessageBox(message, options = null) {
|
||||
if (options === null) options = {};
|
||||
|
||||
const result = this.showMessageBox_(this.window(), Object.assign({}, {
|
||||
type: 'question',
|
||||
message: message,
|
||||
buttons: [_('OK'), _('Cancel')],
|
||||
}, options));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
showInfoMessageBox(message, options = {}) {
|
||||
const result = this.showMessageBox_(this.window(), Object.assign({}, {
|
||||
type: 'info',
|
||||
|
|
|
@ -132,11 +132,13 @@ function checkForUpdates(inBackground, window, logFilePath, options) {
|
|||
autoUpdateLogger_.info('Is Pre-release:', release.prerelease);
|
||||
|
||||
if (compareVersions(release.version, packageInfo.version) <= 0) {
|
||||
if (!checkInBackground_) await dialog.showMessageBox({
|
||||
type: 'info',
|
||||
message: _('Current version is up-to-date.'),
|
||||
buttons: [_('OK')],
|
||||
});
|
||||
if (!checkInBackground_) {
|
||||
await dialog.showMessageBox({
|
||||
type: 'info',
|
||||
message: _('Current version is up-to-date.'),
|
||||
buttons: [_('OK')],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const fullReleaseNotes = release.notes.trim() ? `\n\n${release.notes.trim()}` : '';
|
||||
const MAX_RELEASE_NOTES_LENGTH = 1000;
|
||||
|
|
|
@ -57,7 +57,7 @@ class ClipperConfigScreenComponent extends React.Component {
|
|||
backgroundColor: theme.backgroundColor,
|
||||
};
|
||||
|
||||
let webClipperStatusComps = [];
|
||||
const webClipperStatusComps = [];
|
||||
|
||||
if (this.props.clipperServerAutoStart) {
|
||||
webClipperStatusComps.push(
|
||||
|
|
|
@ -92,8 +92,8 @@ class ConfigScreenComponent extends React.Component {
|
|||
}
|
||||
|
||||
keyValueToArray(kv) {
|
||||
let output = [];
|
||||
for (let k in kv) {
|
||||
const output = [];
|
||||
for (const k in kv) {
|
||||
if (!kv.hasOwnProperty(k)) continue;
|
||||
output.push({
|
||||
key: k,
|
||||
|
@ -205,7 +205,7 @@ class ConfigScreenComponent extends React.Component {
|
|||
}
|
||||
|
||||
let advancedSettingsButton = null;
|
||||
let advancedSettingsSectionStyle = { display: 'none' };
|
||||
const advancedSettingsSectionStyle = { display: 'none' };
|
||||
|
||||
if (advancedSettingComps.length) {
|
||||
const iconName = this.state.showAdvancedSettings ? 'fa fa-toggle-up' : 'fa fa-toggle-down';
|
||||
|
@ -227,7 +227,7 @@ class ConfigScreenComponent extends React.Component {
|
|||
settingToComponent(key, value) {
|
||||
const theme = themeStyle(this.props.theme);
|
||||
|
||||
let output = null;
|
||||
const output = null;
|
||||
|
||||
const rowStyle = this.rowStyle_;
|
||||
|
||||
|
@ -283,9 +283,9 @@ class ConfigScreenComponent extends React.Component {
|
|||
const descriptionComp = descriptionText ? <div style={descriptionStyle}>{descriptionText}</div> : null;
|
||||
|
||||
if (md.isEnum) {
|
||||
let items = [];
|
||||
const items = [];
|
||||
const settingOptions = md.options();
|
||||
let array = this.keyValueToArray(settingOptions);
|
||||
const array = this.keyValueToArray(settingOptions);
|
||||
for (let i = 0; i < array.length; i++) {
|
||||
const e = array[i];
|
||||
items.push(
|
||||
|
@ -547,7 +547,7 @@ class ConfigScreenComponent extends React.Component {
|
|||
}
|
||||
);
|
||||
|
||||
let settings = this.state.settings;
|
||||
const settings = this.state.settings;
|
||||
|
||||
const containerStyle = Object.assign({}, theme.containerStyle, { padding: 10, paddingTop: 0, display: 'flex', flex: 1 });
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ const EncryptionService = require('lib/services/EncryptionService');
|
|||
const { themeStyle } = require('../theme.js');
|
||||
const { _ } = require('lib/locale.js');
|
||||
const { time } = require('lib/time-utils.js');
|
||||
const { shim } = require('lib/shim');
|
||||
const dialogs = require('./dialogs');
|
||||
const shared = require('lib/components/shared/encryption-config-shared.js');
|
||||
const { bridge } = require('electron').remote.require('./bridge');
|
||||
|
@ -15,28 +16,18 @@ class EncryptionConfigScreenComponent extends React.Component {
|
|||
shared.constructor(this);
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
this.isMounted_ = true;
|
||||
}
|
||||
|
||||
componentWillUnmount() {
|
||||
this.isMounted_ = false;
|
||||
shared.componentWillUnmount();
|
||||
}
|
||||
|
||||
initState(props) {
|
||||
return shared.initState(this, props);
|
||||
componentDidMount() {
|
||||
this.isMounted_ = true;
|
||||
shared.componentDidMount(this);
|
||||
}
|
||||
|
||||
async refreshStats() {
|
||||
return shared.refreshStats(this);
|
||||
}
|
||||
|
||||
UNSAFE_componentWillMount() {
|
||||
this.initState(this.props);
|
||||
}
|
||||
|
||||
UNSAFE_componentWillReceiveProps(nextProps) {
|
||||
this.initState(nextProps);
|
||||
componentDidUpdate(prevProps) {
|
||||
shared.componentDidUpdate(this, prevProps);
|
||||
}
|
||||
|
||||
async checkPasswords() {
|
||||
|
@ -61,7 +52,7 @@ class EncryptionConfigScreenComponent extends React.Component {
|
|||
return shared.onPasswordChange(this, mk, event.target.value);
|
||||
};
|
||||
|
||||
const password = this.state.passwords[mk.id] ? this.state.passwords[mk.id] : '';
|
||||
const password = this.props.passwords[mk.id] ? this.props.passwords[mk.id] : '';
|
||||
const active = this.props.activeMasterKeyId === mk.id ? '✔' : '';
|
||||
const passwordOk = this.state.passwordChecks[mk.id] === true ? '✔' : '❌';
|
||||
|
||||
|
@ -83,9 +74,73 @@ class EncryptionConfigScreenComponent extends React.Component {
|
|||
);
|
||||
}
|
||||
|
||||
renderNeedUpgradeSection() {
|
||||
if (!shim.isElectron()) return null;
|
||||
|
||||
const needUpgradeMasterKeys = EncryptionService.instance().masterKeysThatNeedUpgrading(this.props.masterKeys);
|
||||
if (!needUpgradeMasterKeys.length) return null;
|
||||
|
||||
const theme = themeStyle(this.props.theme);
|
||||
|
||||
const rows = [];
|
||||
const comp = this;
|
||||
|
||||
for (const mk of needUpgradeMasterKeys) {
|
||||
rows.push(
|
||||
<tr key={mk.id}>
|
||||
<td style={theme.textStyle}>{mk.id}</td>
|
||||
<td><button onClick={() => shared.upgradeMasterKey(comp, mk)} style={theme.buttonStyle}>Upgrade</button></td>
|
||||
</tr>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h1 style={theme.h1Style}>{_('Master keys that need upgrading')}</h1>
|
||||
<p style={theme.textStyle}>{_('The following master keys use an out-dated encryption algorithm and it is recommended to upgrade them. The upgraded master key will still be able to decrypt and encrypt your data as usual.')}</p>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<th style={theme.textStyle}>{_('ID')}</th>
|
||||
<th style={theme.textStyle}>{_('Upgrade')}</th>
|
||||
</tr>
|
||||
{rows}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
renderReencryptData() {
|
||||
if (!shim.isElectron()) return null;
|
||||
|
||||
const theme = themeStyle(this.props.theme);
|
||||
const buttonLabel = _('Re-encrypt data');
|
||||
|
||||
const intro = this.props.shouldReencrypt ? _('The default encryption method has been changed to a more secure one and it is recommended that you apply it to your data.') : _('You may use the tool below to re-encrypt your data, for example if you know that some of your notes are encrypted with an obsolete encryption method.');
|
||||
|
||||
let t = `${intro}\n\n${_('In order to do so, your entire data set will have to encrypted and synchronised, so it is best to run it overnight.\n\nTo start, please follow these instructions:\n\n1. Synchronise all your devices.\n2. Click "%s".\n3. Let it run to completion. While it runs, avoid changing any note on your other devices, to avoid conflicts.\n4. Once sync is done on this device, sync all your other devices and let it run to completion.\n\nImportant: you only need to run this ONCE on one device.', buttonLabel)}`;
|
||||
|
||||
t = t.replace(/\n\n/g, '</p><p>');
|
||||
t = t.replace(/\n/g, '<br>');
|
||||
t = `<p>${t}</p>`;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h1 style={theme.h1Style}>{_('Re-encryption')}</h1>
|
||||
<p style={theme.textStyle} dangerouslySetInnerHTML={{ __html: t }}></p>
|
||||
<span style={{ marginRight: 10 }}>
|
||||
<button onClick={() => shared.reencryptData()} style={theme.buttonStyle}>{buttonLabel}</button>
|
||||
</span>
|
||||
|
||||
{ !this.props.shouldReencrypt ? null : <button onClick={() => shared.dontReencryptData()} style={theme.buttonStyle}>{_('Ignore')}</button> }
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
render() {
|
||||
const theme = themeStyle(this.props.theme);
|
||||
const masterKeys = this.state.masterKeys;
|
||||
const masterKeys = this.props.masterKeys;
|
||||
const containerPadding = 10;
|
||||
|
||||
const containerStyle = Object.assign({}, theme.containerStyle, {
|
||||
|
@ -94,7 +149,7 @@ class EncryptionConfigScreenComponent extends React.Component {
|
|||
});
|
||||
|
||||
const mkComps = [];
|
||||
let nonExistingMasterKeyIds = this.props.notLoadedMasterKeys.slice();
|
||||
const nonExistingMasterKeyIds = this.props.notLoadedMasterKeys.slice();
|
||||
|
||||
for (let i = 0; i < masterKeys.length; i++) {
|
||||
const mk = masterKeys[i];
|
||||
|
@ -139,6 +194,9 @@ class EncryptionConfigScreenComponent extends React.Component {
|
|||
</button>
|
||||
);
|
||||
|
||||
const needUpgradeSection = this.renderNeedUpgradeSection();
|
||||
const reencryptDataSection = this.renderReencryptData();
|
||||
|
||||
let masterKeySection = null;
|
||||
|
||||
if (mkComps.length) {
|
||||
|
@ -218,8 +276,11 @@ class EncryptionConfigScreenComponent extends React.Component {
|
|||
</p>
|
||||
{decryptedItemsInfo}
|
||||
{toggleButton}
|
||||
{needUpgradeSection}
|
||||
{this.props.shouldReencrypt ? reencryptDataSection : null}
|
||||
{masterKeySection}
|
||||
{nonExistingMasterKeySection}
|
||||
{!this.props.shouldReencrypt ? reencryptDataSection : null}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
@ -233,6 +294,7 @@ const mapStateToProps = state => {
|
|||
passwords: state.settings['encryption.passwordCache'],
|
||||
encryptionEnabled: state.settings['encryption.enabled'],
|
||||
activeMasterKeyId: state.settings['encryption.activeMasterKeyId'],
|
||||
shouldReencrypt: state.settings['encryption.shouldReencrypt'] >= Setting.SHOULD_REENCRYPT_YES,
|
||||
notLoadedMasterKeys: state.notLoadedMasterKeys,
|
||||
};
|
||||
};
|
||||
|
|
|
@ -149,7 +149,7 @@ class HeaderComponent extends React.Component {
|
|||
}
|
||||
|
||||
const isEnabled = !('enabled' in options) || options.enabled;
|
||||
let classes = ['button'];
|
||||
const classes = ['button'];
|
||||
if (!isEnabled) classes.push('disabled');
|
||||
|
||||
const finalStyle = Object.assign({}, style, {
|
||||
|
|
|
@ -15,7 +15,7 @@ class HelpButtonComponent extends React.Component {
|
|||
|
||||
render() {
|
||||
const theme = themeStyle(this.props.theme);
|
||||
let style = Object.assign({}, this.props.style, { color: theme.color, textDecoration: 'none' });
|
||||
const style = Object.assign({}, this.props.style, { color: theme.color, textDecoration: 'none' });
|
||||
const helpIconStyle = { flex: 0, width: 16, height: 16, marginLeft: 10 };
|
||||
const extraProps = {};
|
||||
if (this.props.tip) extraProps['data-tip'] = this.props.tip;
|
||||
|
|
|
@ -46,9 +46,9 @@ class ImportScreenComponent extends React.Component {
|
|||
}
|
||||
|
||||
uniqueMessages() {
|
||||
let output = [];
|
||||
const output = [];
|
||||
const messages = this.state.messages.slice();
|
||||
let foundKeys = [];
|
||||
const foundKeys = [];
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
const msg = messages[i];
|
||||
if (foundKeys.indexOf(msg.key) >= 0) continue;
|
||||
|
@ -68,7 +68,7 @@ class ImportScreenComponent extends React.Component {
|
|||
|
||||
const options = {
|
||||
onProgress: progressState => {
|
||||
let line = [];
|
||||
const line = [];
|
||||
line.push(_('Found: %d.', progressState.loaded));
|
||||
line.push(_('Created: %d.', progressState.created));
|
||||
if (progressState.updated) line.push(_('Updated: %d.', progressState.updated));
|
||||
|
|
|
@ -83,7 +83,7 @@ class ItemList extends React.Component {
|
|||
return <div key={key} style={{ height: height }}></div>;
|
||||
};
|
||||
|
||||
let itemComps = [blankItem('top', this.state.topItemIndex * this.props.itemHeight)];
|
||||
const itemComps = [blankItem('top', this.state.topItemIndex * this.props.itemHeight)];
|
||||
|
||||
for (let i = this.state.topItemIndex; i <= this.state.bottomItemIndex; i++) {
|
||||
const itemComp = this.props.itemRenderer(items[i]);
|
||||
|
@ -92,7 +92,7 @@ class ItemList extends React.Component {
|
|||
|
||||
itemComps.push(blankItem('bottom', (items.length - this.state.bottomItemIndex - 1) * this.props.itemHeight));
|
||||
|
||||
let classes = ['item-list'];
|
||||
const classes = ['item-list'];
|
||||
if (this.props.className) classes.push(this.props.className);
|
||||
|
||||
return (
|
||||
|
|
|
@ -22,6 +22,7 @@ const eventManager = require('../eventManager');
|
|||
const VerticalResizer = require('./VerticalResizer.min');
|
||||
const PluginManager = require('lib/services/PluginManager');
|
||||
const TemplateUtils = require('lib/TemplateUtils');
|
||||
const EncryptionService = require('lib/services/EncryptionService');
|
||||
|
||||
class MainScreenComponent extends React.Component {
|
||||
constructor() {
|
||||
|
@ -323,7 +324,7 @@ class MainScreenComponent extends React.Component {
|
|||
} else if (command.name === 'editAlarm') {
|
||||
const note = await Note.load(command.noteId);
|
||||
|
||||
let defaultDate = new Date(Date.now() + 2 * 3600 * 1000);
|
||||
const defaultDate = new Date(Date.now() + 2 * 3600 * 1000);
|
||||
defaultDate.setMinutes(0);
|
||||
defaultDate.setSeconds(0);
|
||||
|
||||
|
@ -477,6 +478,85 @@ class MainScreenComponent extends React.Component {
|
|||
return this.styles_;
|
||||
}
|
||||
|
||||
renderNotification(theme, styles) {
|
||||
if (!this.messageBoxVisible()) return null;
|
||||
|
||||
const onViewStatusScreen = () => {
|
||||
this.props.dispatch({
|
||||
type: 'NAV_GO',
|
||||
routeName: 'Status',
|
||||
});
|
||||
};
|
||||
|
||||
const onViewEncryptionConfigScreen = () => {
|
||||
this.props.dispatch({
|
||||
type: 'NAV_GO',
|
||||
routeName: 'Config',
|
||||
props: {
|
||||
defaultSection: 'encryption',
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
let msg = null;
|
||||
if (this.props.hasDisabledSyncItems) {
|
||||
msg = (
|
||||
<span>
|
||||
{_('Some items cannot be synchronised.')}{' '}
|
||||
<a href="#" onClick={() => onViewStatusScreen()}>
|
||||
{_('View them now')}
|
||||
</a>
|
||||
</span>
|
||||
);
|
||||
} else if (this.props.hasDisabledEncryptionItems) {
|
||||
msg = (
|
||||
<span>
|
||||
{_('Some items cannot be decrypted.')}{' '}
|
||||
<a href="#" onClick={() => onViewStatusScreen()}>
|
||||
{_('View them now')}
|
||||
</a>
|
||||
</span>
|
||||
);
|
||||
} else if (this.props.showMissingMasterKeyMessage) {
|
||||
msg = (
|
||||
<span>
|
||||
{_('One or more master keys need a password.')}{' '}
|
||||
<a href="#" onClick={() => onViewEncryptionConfigScreen()}>
|
||||
{_('Set the password')}
|
||||
</a>
|
||||
</span>
|
||||
);
|
||||
} else if (this.props.showNeedUpgradingMasterKeyMessage) {
|
||||
msg = (
|
||||
<span>
|
||||
{_('One of your master keys use an obsolete encryption method.')}{' '}
|
||||
<a href="#" onClick={() => onViewEncryptionConfigScreen()}>
|
||||
{_('View them now')}
|
||||
</a>
|
||||
</span>
|
||||
);
|
||||
} else if (this.props.showShouldReencryptMessage) {
|
||||
msg = (
|
||||
<span>
|
||||
{_('The default encryption method has been changed, you should re-encrypt your data.')}{' '}
|
||||
<a href="#" onClick={() => onViewEncryptionConfigScreen()}>
|
||||
{_('More info')}
|
||||
</a>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={styles.messageBox}>
|
||||
<span style={theme.textStyle}>{msg}</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
messageBoxVisible() {
|
||||
return this.props.hasDisabledSyncItems || this.props.showMissingMasterKeyMessage || this.props.showNeedUpgradingMasterKeyMessage || this.props.showShouldReencryptMessage || this.props.hasDisabledEncryptionItems;
|
||||
}
|
||||
|
||||
render() {
|
||||
const theme = themeStyle(this.props.theme);
|
||||
const style = Object.assign(
|
||||
|
@ -489,10 +569,9 @@ class MainScreenComponent extends React.Component {
|
|||
const promptOptions = this.state.promptOptions;
|
||||
const folders = this.props.folders;
|
||||
const notes = this.props.notes;
|
||||
const messageBoxVisible = this.props.hasDisabledSyncItems || this.props.showMissingMasterKeyMessage;
|
||||
const sidebarVisibility = this.props.sidebarVisibility;
|
||||
const noteListVisibility = this.props.noteListVisibility;
|
||||
const styles = this.styles(this.props.theme, style.width, style.height, messageBoxVisible, sidebarVisibility, noteListVisibility, this.props.sidebarWidth, this.props.noteListWidth);
|
||||
const styles = this.styles(this.props.theme, style.width, style.height, this.messageBoxVisible(), sidebarVisibility, noteListVisibility, this.props.sidebarWidth, this.props.noteListWidth);
|
||||
const onConflictFolder = this.props.selectedFolderId === Folder.conflictFolderId();
|
||||
|
||||
const headerItems = [];
|
||||
|
@ -565,63 +644,7 @@ class MainScreenComponent extends React.Component {
|
|||
};
|
||||
}
|
||||
|
||||
const onViewDisabledItemsClick = () => {
|
||||
this.props.dispatch({
|
||||
type: 'NAV_GO',
|
||||
routeName: 'Status',
|
||||
});
|
||||
};
|
||||
|
||||
const onViewMasterKeysClick = () => {
|
||||
this.props.dispatch({
|
||||
type: 'NAV_GO',
|
||||
routeName: 'Config',
|
||||
props: {
|
||||
defaultSection: 'encryption',
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
let messageComp = null;
|
||||
|
||||
if (messageBoxVisible) {
|
||||
let msg = null;
|
||||
if (this.props.hasDisabledSyncItems) {
|
||||
msg = (
|
||||
<span>
|
||||
{_('Some items cannot be synchronised.')}{' '}
|
||||
<a
|
||||
href="#"
|
||||
onClick={() => {
|
||||
onViewDisabledItemsClick();
|
||||
}}
|
||||
>
|
||||
{_('View them now')}
|
||||
</a>
|
||||
</span>
|
||||
);
|
||||
} else if (this.props.showMissingMasterKeyMessage) {
|
||||
msg = (
|
||||
<span>
|
||||
{_('One or more master keys need a password.')}{' '}
|
||||
<a
|
||||
href="#"
|
||||
onClick={() => {
|
||||
onViewMasterKeysClick();
|
||||
}}
|
||||
>
|
||||
{_('Set the password')}
|
||||
</a>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
messageComp = (
|
||||
<div style={styles.messageBox}>
|
||||
<span style={theme.textStyle}>{msg}</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
const messageComp = this.renderNotification(theme, styles);
|
||||
|
||||
const dialogInfo = PluginManager.instance().pluginDialogToShow(this.props.plugins);
|
||||
const pluginDialog = !dialogInfo ? null : <dialogInfo.Dialog {...dialogInfo.props} />;
|
||||
|
@ -672,7 +695,10 @@ const mapStateToProps = state => {
|
|||
folders: state.folders,
|
||||
notes: state.notes,
|
||||
hasDisabledSyncItems: state.hasDisabledSyncItems,
|
||||
hasDisabledEncryptionItems: state.hasDisabledEncryptionItems,
|
||||
showMissingMasterKeyMessage: state.notLoadedMasterKeys.length && state.masterKeys.length,
|
||||
showNeedUpgradingMasterKeyMessage: !!EncryptionService.instance().masterKeysThatNeedUpgrading(state.masterKeys).length,
|
||||
showShouldReencryptMessage: state.settings['encryption.shouldReencrypt'] >= Setting.SHOULD_REENCRYPT_YES,
|
||||
selectedFolderId: state.selectedFolderId,
|
||||
sidebarWidth: state.settings['style.sidebar.width'],
|
||||
noteListWidth: state.settings['style.noteList.width'],
|
||||
|
|
|
@ -7,7 +7,7 @@ class NavigatorComponent extends Component {
|
|||
UNSAFE_componentWillReceiveProps(newProps) {
|
||||
if (newProps.route) {
|
||||
const screenInfo = this.props.screens[newProps.route.routeName];
|
||||
let windowTitle = ['Joplin'];
|
||||
const windowTitle = ['Joplin'];
|
||||
if (screenInfo.title) {
|
||||
windowTitle.push(screenInfo.title());
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ export default function NoteContentPropertiesDialog(props:NoteContentPropertiesD
|
|||
};
|
||||
|
||||
if (textProperties) {
|
||||
for (let key in textProperties) {
|
||||
for (const key in textProperties) {
|
||||
if (!textProperties.hasOwnProperty(key)) continue;
|
||||
const comp = createItemField(key, textProperties[key]);
|
||||
textComps.push(comp);
|
||||
|
|
|
@ -34,7 +34,7 @@ class NoteListComponent extends React.Component {
|
|||
// Pull request: https://github.com/laurent22/joplin/pull/2062
|
||||
const itemWidth = '100%';
|
||||
|
||||
let style = {
|
||||
const style = {
|
||||
root: {
|
||||
backgroundColor: theme.backgroundColor,
|
||||
},
|
||||
|
@ -184,7 +184,7 @@ class NoteListComponent extends React.Component {
|
|||
listItemTitleStyle.paddingLeft = !checkbox ? hPadding : 4;
|
||||
if (item.is_todo && !!item.todo_completed) listItemTitleStyle = Object.assign(listItemTitleStyle, this.style().listItemTitleCompleted);
|
||||
|
||||
let displayTitle = Note.displayTitle(item);
|
||||
const displayTitle = Note.displayTitle(item);
|
||||
let titleComp = null;
|
||||
|
||||
if (highlightedWords.length) {
|
||||
|
@ -284,9 +284,15 @@ class NoteListComponent extends React.Component {
|
|||
|
||||
if (prevProps.selectedNoteIds !== this.props.selectedNoteIds && this.props.selectedNoteIds.length === 1) {
|
||||
const id = this.props.selectedNoteIds[0];
|
||||
const doRefocus = this.props.notes.length < prevProps.notes.length;
|
||||
|
||||
for (let i = 0; i < this.props.notes.length; i++) {
|
||||
if (this.props.notes[i].id === id) {
|
||||
this.itemListRef.current.makeItemIndexVisible(i);
|
||||
if (doRefocus) {
|
||||
const ref = this.itemAnchorRef(id);
|
||||
if (ref) ref.focus();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -319,10 +325,8 @@ class NoteListComponent extends React.Component {
|
|||
// Down
|
||||
noteIndex += 1;
|
||||
}
|
||||
|
||||
if (noteIndex < 0) noteIndex = 0;
|
||||
if (noteIndex > this.props.notes.length - 1) noteIndex = this.props.notes.length - 1;
|
||||
|
||||
return noteIndex;
|
||||
}
|
||||
|
||||
|
@ -330,7 +334,7 @@ class NoteListComponent extends React.Component {
|
|||
const keyCode = event.keyCode;
|
||||
const noteIds = this.props.selectedNoteIds;
|
||||
|
||||
if (noteIds.length === 1 && (keyCode === 40 || keyCode === 38 || keyCode === 33 || keyCode === 34 || keyCode === 35 || keyCode == 36)) {
|
||||
if (noteIds.length > 0 && (keyCode === 40 || keyCode === 38 || keyCode === 33 || keyCode === 34 || keyCode === 35 || keyCode == 36)) {
|
||||
// DOWN / UP / PAGEDOWN / PAGEUP / END / HOME
|
||||
const noteId = noteIds[0];
|
||||
let noteIndex = BaseModel.modelIndexById(this.props.notes, noteId);
|
||||
|
@ -431,7 +435,7 @@ class NoteListComponent extends React.Component {
|
|||
render() {
|
||||
const theme = themeStyle(this.props.theme);
|
||||
const style = this.props.style;
|
||||
let notes = this.props.notes.slice();
|
||||
const notes = this.props.notes.slice();
|
||||
|
||||
if (!notes.length) {
|
||||
const padding = 10;
|
||||
|
|
|
@ -361,7 +361,7 @@ class NotePropertiesDialog extends React.Component {
|
|||
const noteComps = [];
|
||||
|
||||
if (formNote) {
|
||||
for (let key in formNote) {
|
||||
for (const key in formNote) {
|
||||
if (!formNote.hasOwnProperty(key)) continue;
|
||||
const comp = this.createNoteField(key, formNote[key]);
|
||||
noteComps.push(comp);
|
||||
|
|
|
@ -40,7 +40,7 @@ class NoteRevisionViewerComponent extends React.PureComponent {
|
|||
style() {
|
||||
const theme = themeStyle(this.props.theme);
|
||||
|
||||
let style = {
|
||||
const style = {
|
||||
root: {
|
||||
backgroundColor: theme.backgroundColor,
|
||||
display: 'flex',
|
||||
|
|
|
@ -19,7 +19,7 @@ class NoteSearchBarComponent extends React.Component {
|
|||
style() {
|
||||
const theme = themeStyle(this.props.theme);
|
||||
|
||||
let style = {
|
||||
const style = {
|
||||
root: Object.assign({}, theme.textStyle, {
|
||||
backgroundColor: theme.backgroundColor,
|
||||
color: theme.colorFaded,
|
||||
|
@ -130,7 +130,7 @@ class NoteSearchBarComponent extends React.Component {
|
|||
if (this.backgroundColor === undefined) {
|
||||
this.backgroundColor = theme.backgroundColor;
|
||||
}
|
||||
let buttonEnabled = (this.backgroundColor === theme.backgroundColor);
|
||||
const buttonEnabled = (this.backgroundColor === theme.backgroundColor);
|
||||
|
||||
const closeButton = this.buttonIconComponent('fa-times', this.closeButton_click, true);
|
||||
const previousButton = this.buttonIconComponent('fa-chevron-up', this.previousButton_click, buttonEnabled);
|
||||
|
|
|
@ -7,7 +7,7 @@ class NoteStatusBarComponent extends React.Component {
|
|||
style() {
|
||||
const theme = themeStyle(this.props.theme);
|
||||
|
||||
let style = {
|
||||
const style = {
|
||||
root: Object.assign({}, theme.textStyle, {
|
||||
backgroundColor: theme.backgroundColor,
|
||||
color: theme.colorFaded,
|
||||
|
|
|
@ -543,22 +543,22 @@ class NoteTextComponent extends React.Component {
|
|||
this.setState({ loading: true });
|
||||
|
||||
const stateNoteId = this.state.note ? this.state.note.id : null;
|
||||
let noteId = props.noteId;
|
||||
const noteId = props.noteId;
|
||||
let parentFolder = null;
|
||||
const isProvisionalNote = this.props.provisionalNoteIds.includes(noteId);
|
||||
|
||||
let scrollPercent = this.props.lastEditorScrollPercents[noteId];
|
||||
if (!scrollPercent) scrollPercent = 0;
|
||||
|
||||
let loadingNewNote = stateNoteId !== noteId;
|
||||
const loadingNewNote = stateNoteId !== noteId;
|
||||
this.lastLoadedNoteId_ = noteId;
|
||||
let note = noteId ? await Note.load(noteId) : null;
|
||||
const note = noteId ? await Note.load(noteId) : null;
|
||||
if (noteId !== this.lastLoadedNoteId_) return defer(); // Race condition - current note was changed while this one was loading
|
||||
if (options.noReloadIfLocalChanges && this.isModified()) return defer();
|
||||
|
||||
// If the note hasn't been changed, exit now
|
||||
if (this.state.note && note) {
|
||||
let diff = Note.diffObjects(this.state.note, note);
|
||||
const diff = Note.diffObjects(this.state.note, note);
|
||||
delete diff.type_;
|
||||
if (!Object.getOwnPropertyNames(diff).length) return defer();
|
||||
}
|
||||
|
@ -622,7 +622,7 @@ class NoteTextComponent extends React.Component {
|
|||
parentFolder = Folder.byId(props.folders, note.parent_id);
|
||||
}
|
||||
|
||||
let newState = {
|
||||
const newState = {
|
||||
note: note,
|
||||
lastSavedNote: Object.assign({}, note),
|
||||
webviewReady: webviewReady,
|
||||
|
@ -686,9 +686,9 @@ class NoteTextComponent extends React.Component {
|
|||
|
||||
for (let i = 0; i < newTags.length; ++i) {
|
||||
let found = false;
|
||||
let currNewTag = newTags[i];
|
||||
const currNewTag = newTags[i];
|
||||
for (let j = 0; j < oldTags.length; ++j) {
|
||||
let currOldTag = oldTags[j];
|
||||
const currOldTag = oldTags[j];
|
||||
if (currOldTag.id === currNewTag.id) {
|
||||
found = true;
|
||||
if (currOldTag.updated_time !== currNewTag.updated_time) {
|
||||
|
@ -956,6 +956,7 @@ class NoteTextComponent extends React.Component {
|
|||
document.querySelector('#note-editor').removeEventListener('paste', this.onEditorPaste_, true);
|
||||
document.querySelector('#note-editor').removeEventListener('keydown', this.onEditorKeyDown_);
|
||||
document.querySelector('#note-editor').removeEventListener('contextmenu', this.onEditorContextMenu_);
|
||||
this.editor_.editor.indent = this.indentOrig;
|
||||
}
|
||||
|
||||
this.editor_ = element;
|
||||
|
@ -1017,6 +1018,32 @@ class NoteTextComponent extends React.Component {
|
|||
|
||||
return this.$getIndent(line);
|
||||
};
|
||||
|
||||
// Markdown list indentation. (https://github.com/laurent22/joplin/pull/2713)
|
||||
// If the current line starts with `markup.list` token,
|
||||
// hitting `Tab` key indents the line instead of inserting tab at cursor.
|
||||
this.indentOrig = this.editor_.editor.indent;
|
||||
const indentOrig = this.indentOrig;
|
||||
this.editor_.editor.indent = function() {
|
||||
const range = this.getSelectionRange();
|
||||
if (range.isEmpty()) {
|
||||
const row = range.start.row;
|
||||
const tokens = this.session.getTokens(row);
|
||||
|
||||
if (tokens.length > 0 && tokens[0].type == 'markup.list') {
|
||||
if (tokens[0].value.search(/\d+\./) != -1) {
|
||||
// Resets numbered list to 1.
|
||||
this.session.replace({ start: { row, column: 0 }, end: { row, column: tokens[0].value.length } },
|
||||
tokens[0].value.replace(/\d+\./, '1.'));
|
||||
}
|
||||
|
||||
this.session.indentRows(row, row, '\t');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
indentOrig.call(this);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1272,7 +1299,7 @@ class NoteTextComponent extends React.Component {
|
|||
try {
|
||||
if (!this.state.note && !args.noteIds) throw new Error('No notes selected for pdf export');
|
||||
|
||||
let noteIds = args.noteIds ? args.noteIds : [this.state.note.id];
|
||||
const noteIds = args.noteIds ? args.noteIds : [this.state.note.id];
|
||||
|
||||
let path = null;
|
||||
if (noteIds.length === 1) {
|
||||
|
@ -1425,14 +1452,14 @@ class NoteTextComponent extends React.Component {
|
|||
|
||||
if (selection && selection.start !== selection.end) {
|
||||
const selectedLines = replacementText !== null ? replacementText : this.state.note.body.substr(selection.start, selection.end - selection.start);
|
||||
let selectedStrings = byLine ? selectedLines.split(/\r?\n/) : [selectedLines];
|
||||
const selectedStrings = byLine ? selectedLines.split(/\r?\n/) : [selectedLines];
|
||||
|
||||
newBody = this.state.note.body.substr(0, selection.start);
|
||||
|
||||
for (let i = 0; i < selectedStrings.length; i++) {
|
||||
if (byLine == false) {
|
||||
let start = selectedStrings[i].search(/[^\s]/);
|
||||
let end = selectedStrings[i].search(/[^\s](?=[\s]*$)/);
|
||||
const start = selectedStrings[i].search(/[^\s]/);
|
||||
const end = selectedStrings[i].search(/[^\s](?=[\s]*$)/);
|
||||
newBody += selectedStrings[i].substr(0, start) + string1 + selectedStrings[i].substr(start, end - start + 1) + string2 + selectedStrings[i].substr(end + 1);
|
||||
if (this.state.note.body.substr(selection.end) === '') newBody = newBody.trim();
|
||||
} else { newBody += string1 + selectedStrings[i] + string2; }
|
||||
|
@ -1471,7 +1498,7 @@ class NoteTextComponent extends React.Component {
|
|||
editor.focus();
|
||||
});
|
||||
} else {
|
||||
let middleText = replacementText !== null ? replacementText : defaultText;
|
||||
const middleText = replacementText !== null ? replacementText : defaultText;
|
||||
const textOffset = this.currentTextOffset();
|
||||
const s1 = this.state.note.body.substr(0, textOffset);
|
||||
const s2 = this.state.note.body.substr(textOffset);
|
||||
|
@ -1514,9 +1541,9 @@ class NoteTextComponent extends React.Component {
|
|||
|
||||
toggleWrapSelection(strings1, strings2, defaultText) {
|
||||
const selection = this.textOffsetSelection();
|
||||
let string = this.state.note.body.substr(selection.start, selection.end - selection.start);
|
||||
const string = this.state.note.body.substr(selection.start, selection.end - selection.start);
|
||||
let replaced = false;
|
||||
for (var i = 0; i < strings1.length; i++) {
|
||||
for (let i = 0; i < strings1.length; i++) {
|
||||
if (string.startsWith(strings1[i]) && string.endsWith(strings1[i])) {
|
||||
this.wrapSelectionWithStrings('', '', '', string.substr(strings1[i].length, selection.end - selection.start - (2 * strings1[i].length)));
|
||||
replaced = true;
|
||||
|
@ -1543,10 +1570,10 @@ class NoteTextComponent extends React.Component {
|
|||
|
||||
commandTextCode() {
|
||||
const selection = this.textOffsetSelection();
|
||||
let string = this.state.note.body.substr(selection.start, selection.end - selection.start);
|
||||
const string = this.state.note.body.substr(selection.start, selection.end - selection.start);
|
||||
|
||||
// Look for newlines
|
||||
let match = string.match(/\r?\n/);
|
||||
const match = string.match(/\r?\n/);
|
||||
|
||||
if (match && match.length > 0) {
|
||||
// Follow the same newline style
|
||||
|
@ -1564,7 +1591,7 @@ class NoteTextComponent extends React.Component {
|
|||
this.wrapSelectionWithStrings(TemplateUtils.render(value));
|
||||
}
|
||||
|
||||
addListItem(string1, string2 = '', defaultText = '', byLine=false) {
|
||||
addListItem(string1, string2 = '', defaultText = '', byLine = false) {
|
||||
let newLine = '\n';
|
||||
const range = this.selectionRange_;
|
||||
if (!range || (range.start.row === range.end.row && !this.selectionRangeCurrentLine())) {
|
||||
|
@ -1653,6 +1680,7 @@ class NoteTextComponent extends React.Component {
|
|||
folderId: this.state.folder.id,
|
||||
noteId: note.id,
|
||||
});
|
||||
Folder.expandTree(this.props.folders, this.state.folder.parent_id);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
@ -1921,7 +1949,7 @@ class NoteTextComponent extends React.Component {
|
|||
const theme = themeStyle(this.props.theme);
|
||||
const visiblePanes = this.props.visiblePanes || ['editor', 'viewer'];
|
||||
const isTodo = note && !!note.is_todo;
|
||||
var keyboardMode = this.props.keyboardMode;
|
||||
let keyboardMode = this.props.keyboardMode;
|
||||
if (keyboardMode === 'default' || !keyboardMode) {
|
||||
keyboardMode = null;
|
||||
}
|
||||
|
@ -1978,7 +2006,7 @@ class NoteTextComponent extends React.Component {
|
|||
paddingRight: 8,
|
||||
marginRight: rootStyle.paddingLeft,
|
||||
color: theme.textStyle.color,
|
||||
fontSize: theme.textStyle.fontSize * 1.25 *1.5,
|
||||
fontSize: theme.textStyle.fontSize * 1.25 * 1.5,
|
||||
backgroundColor: theme.backgroundColor,
|
||||
border: '1px solid',
|
||||
borderColor: theme.dividerColor,
|
||||
|
@ -2055,11 +2083,11 @@ class NoteTextComponent extends React.Component {
|
|||
}
|
||||
|
||||
if (this.state.webviewReady && this.webviewRef_.current) {
|
||||
let html = this.state.bodyHtml;
|
||||
const html = this.state.bodyHtml;
|
||||
|
||||
const htmlHasChanged = this.lastSetHtml_ !== html;
|
||||
if (htmlHasChanged) {
|
||||
let options = {
|
||||
const options = {
|
||||
pluginAssets: this.state.lastRenderPluginAssets,
|
||||
downloadResources: Setting.value('sync.resourceDownloadMode'),
|
||||
};
|
||||
|
|
|
@ -109,7 +109,7 @@ function styles_(props:NoteTextProps) {
|
|||
paddingRight: 8,
|
||||
marginRight: theme.paddingLeft,
|
||||
color: theme.textStyle.color,
|
||||
fontSize: theme.textStyle.fontSize * 1.25 *1.5,
|
||||
fontSize: theme.textStyle.fontSize * 1.25 * 1.5,
|
||||
backgroundColor: theme.backgroundColor,
|
||||
border: '1px solid',
|
||||
borderColor: theme.dividerColor,
|
||||
|
|
|
@ -36,10 +36,12 @@ class NoteTextViewerComponent extends React.Component {
|
|||
const callName = event.data.name;
|
||||
const args = event.data.args;
|
||||
|
||||
if (this.props.onIpcMessage) this.props.onIpcMessage({
|
||||
channel: callName,
|
||||
args: args,
|
||||
});
|
||||
if (this.props.onIpcMessage) {
|
||||
this.props.onIpcMessage({
|
||||
channel: callName,
|
||||
args: args,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
domReady() {
|
||||
|
@ -57,7 +59,7 @@ class NoteTextViewerComponent extends React.Component {
|
|||
};
|
||||
}
|
||||
|
||||
for (let n in this.webviewListeners_) {
|
||||
for (const n in this.webviewListeners_) {
|
||||
if (!this.webviewListeners_.hasOwnProperty(n)) continue;
|
||||
const fn = this.webviewListeners_[n];
|
||||
wv.addEventListener(n, fn);
|
||||
|
@ -70,7 +72,7 @@ class NoteTextViewerComponent extends React.Component {
|
|||
const wv = this.webviewRef_.current;
|
||||
if (!wv || !this.initialized_) return;
|
||||
|
||||
for (let n in this.webviewListeners_) {
|
||||
for (const n in this.webviewListeners_) {
|
||||
if (!this.webviewListeners_.hasOwnProperty(n)) continue;
|
||||
const fn = this.webviewListeners_[n];
|
||||
wv.removeEventListener(n, fn);
|
||||
|
|
|
@ -230,24 +230,27 @@ class PromptDialog extends React.Component {
|
|||
}
|
||||
|
||||
const buttonComps = [];
|
||||
if (buttonTypes.indexOf('ok') >= 0)
|
||||
if (buttonTypes.indexOf('ok') >= 0) {
|
||||
buttonComps.push(
|
||||
<button key="ok" style={styles.button} onClick={() => onClose(true, 'ok')}>
|
||||
{_('OK')}
|
||||
</button>
|
||||
);
|
||||
if (buttonTypes.indexOf('cancel') >= 0)
|
||||
}
|
||||
if (buttonTypes.indexOf('cancel') >= 0) {
|
||||
buttonComps.push(
|
||||
<button key="cancel" style={styles.button} onClick={() => onClose(false, 'cancel')}>
|
||||
{_('Cancel')}
|
||||
</button>
|
||||
);
|
||||
if (buttonTypes.indexOf('clear') >= 0)
|
||||
}
|
||||
if (buttonTypes.indexOf('clear') >= 0) {
|
||||
buttonComps.push(
|
||||
<button key="clear" style={styles.button} onClick={() => onClose(false, 'clear')}>
|
||||
{_('Clear')}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={styles.modalLayer}>
|
||||
|
|
|
@ -162,7 +162,7 @@ class ResourceScreenComponent extends React.Component<Props, State> {
|
|||
<Header style={headerStyle} />
|
||||
<div style={{ ...style, margin: '20px', overflow: 'scroll' }}>
|
||||
{this.state.isLoading && <div>{_('Please wait...')}</div>}
|
||||
{!this.state.isLoading &&<div>
|
||||
{!this.state.isLoading && <div>
|
||||
{!this.state.resources && <div>
|
||||
{_('No resources!')}
|
||||
</div>
|
||||
|
|
|
@ -73,7 +73,7 @@ export default function ShareNoteDialog(props:ShareNoteDialogProps) {
|
|||
useEffect(() => {
|
||||
async function fetchNotes() {
|
||||
const result = [];
|
||||
for (let noteId of props.noteIds) {
|
||||
for (const noteId of props.noteIds) {
|
||||
result.push(await Note.load(noteId));
|
||||
}
|
||||
setNotes(result);
|
||||
|
@ -96,12 +96,6 @@ export default function ShareNoteDialog(props:ShareNoteDialogProps) {
|
|||
clipboard.writeText(links.join('\n'));
|
||||
};
|
||||
|
||||
const synchronize = async () => {
|
||||
const synchronizer = await reg.syncTarget().synchronizer();
|
||||
await synchronizer.waitForSyncToFinish();
|
||||
await reg.scheduleSync(0);
|
||||
};
|
||||
|
||||
const shareLinkButton_click = async () => {
|
||||
let hasSynced = false;
|
||||
let tryToSync = false;
|
||||
|
@ -109,7 +103,7 @@ export default function ShareNoteDialog(props:ShareNoteDialogProps) {
|
|||
try {
|
||||
if (tryToSync) {
|
||||
setSharesState('synchronizing');
|
||||
await synchronize();
|
||||
await reg.waitForSyncFinishedThenSync();
|
||||
tryToSync = false;
|
||||
hasSynced = true;
|
||||
}
|
||||
|
@ -136,7 +130,7 @@ export default function ShareNoteDialog(props:ShareNoteDialogProps) {
|
|||
|
||||
if (sharedStatusChanged) {
|
||||
setSharesState('synchronizing');
|
||||
await synchronize();
|
||||
await reg.waitForSyncFinishedThenSync();
|
||||
setSharesState('creating');
|
||||
}
|
||||
|
||||
|
@ -186,7 +180,7 @@ export default function ShareNoteDialog(props:ShareNoteDialogProps) {
|
|||
|
||||
const renderNoteList = (notes:any) => {
|
||||
const noteComps = [];
|
||||
for (let noteId of Object.keys(notes)) {
|
||||
for (const noteId of Object.keys(notes)) {
|
||||
noteComps.push(renderNote(notes[noteId]));
|
||||
}
|
||||
return <div style={styles.noteList}>{noteComps}</div>;
|
||||
|
|
|
@ -107,7 +107,7 @@ class SideBarComponent extends React.Component {
|
|||
|
||||
const itemHeight = 25;
|
||||
|
||||
let style = {
|
||||
const style = {
|
||||
root: {
|
||||
backgroundColor: theme.backgroundColor2,
|
||||
},
|
||||
|
@ -459,8 +459,8 @@ class SideBarComponent extends React.Component {
|
|||
let containerStyle = Object.assign({}, this.style(depth).listItemContainer);
|
||||
if (selected) containerStyle = Object.assign(containerStyle, this.style().listItemSelected);
|
||||
|
||||
let expandLinkStyle = Object.assign({}, this.style().listItemExpandIcon);
|
||||
let expandIconStyle = {
|
||||
const expandLinkStyle = Object.assign({}, this.style().listItemExpandIcon);
|
||||
const expandIconStyle = {
|
||||
visibility: hasChildren ? 'visible' : 'hidden',
|
||||
paddingLeft: 8 + depth * 10,
|
||||
};
|
||||
|
@ -562,18 +562,18 @@ class SideBarComponent extends React.Component {
|
|||
style.cursor = 'pointer';
|
||||
}
|
||||
|
||||
let headerClick = extraProps.onClick || null;
|
||||
const headerClick = extraProps.onClick || null;
|
||||
delete extraProps.onClick;
|
||||
|
||||
// check if toggling option is set.
|
||||
let toggleIcon = null;
|
||||
const toggleKey = `${key}IsExpanded`;
|
||||
if (extraProps.toggleblock) {
|
||||
let isExpanded = this.state[toggleKey];
|
||||
const isExpanded = this.state[toggleKey];
|
||||
toggleIcon = <i className={`fa ${isExpanded ? 'fa-chevron-down' : 'fa-chevron-left'}`} style={{ fontSize: style.fontSize * 0.75, marginRight: 12, marginLeft: 5, marginTop: style.fontSize * 0.125 }}></i>;
|
||||
}
|
||||
if (extraProps.selected) {
|
||||
style.backgroundColor =this.style().listItemSelected.backgroundColor;
|
||||
style.backgroundColor = this.style().listItemSelected.backgroundColor;
|
||||
}
|
||||
|
||||
const ref = this.anchorItemRef('headers', key);
|
||||
|
@ -645,7 +645,7 @@ class SideBarComponent extends React.Component {
|
|||
|
||||
const focusItem = focusItems[newIndex];
|
||||
|
||||
let actionName = `${focusItem.type.toUpperCase()}_SELECT`;
|
||||
const actionName = `${focusItem.type.toUpperCase()}_SELECT`;
|
||||
|
||||
this.props.dispatch({
|
||||
type: actionName,
|
||||
|
@ -712,7 +712,7 @@ class SideBarComponent extends React.Component {
|
|||
const style = Object.assign({}, this.style().button, { marginBottom: 5 });
|
||||
const iconName = 'fa-refresh';
|
||||
const label = type === 'sync' ? _('Synchronise') : _('Cancel');
|
||||
let iconStyle = { fontSize: style.fontSize, marginRight: 5 };
|
||||
const iconStyle = { fontSize: style.fontSize, marginRight: 5 };
|
||||
|
||||
if (type !== 'sync') {
|
||||
iconStyle.animation = 'icon-infinite-rotation 1s linear infinite';
|
||||
|
@ -743,7 +743,7 @@ class SideBarComponent extends React.Component {
|
|||
flexDirection: 'column',
|
||||
});
|
||||
|
||||
let items = [];
|
||||
const items = [];
|
||||
items.push(
|
||||
this.makeHeader('allNotesHeader', _('All notes'), 'fa-clone', {
|
||||
onClick: this.onAllNotesClick_,
|
||||
|
@ -798,7 +798,7 @@ class SideBarComponent extends React.Component {
|
|||
resourceFetcherText = _('Fetching resources: %d/%d', this.props.resourceFetcher.fetchingCount, this.props.resourceFetcher.toFetchCount);
|
||||
}
|
||||
|
||||
let lines = Synchronizer.reportToLines(this.props.syncReport);
|
||||
const lines = Synchronizer.reportToLines(this.props.syncReport);
|
||||
if (resourceFetcherText) lines.push(resourceFetcherText);
|
||||
if (decryptionReportText) lines.push(decryptionReportText);
|
||||
const syncReportText = [];
|
||||
|
|
|
@ -72,7 +72,7 @@ class StatusScreenComponent extends React.Component {
|
|||
}
|
||||
|
||||
const renderSectionHtml = (key, section) => {
|
||||
let itemsHtml = [];
|
||||
const itemsHtml = [];
|
||||
|
||||
itemsHtml.push(renderSectionTitleHtml(section.title, section.title));
|
||||
|
||||
|
@ -80,9 +80,9 @@ class StatusScreenComponent extends React.Component {
|
|||
itemsHtml.push(renderSectionRetryAllHtml(section.title, section.retryAllHandler));
|
||||
}
|
||||
|
||||
for (let n in section.body) {
|
||||
for (const n in section.body) {
|
||||
if (!section.body.hasOwnProperty(n)) continue;
|
||||
let item = section.body[n];
|
||||
const item = section.body[n];
|
||||
let text = '';
|
||||
|
||||
let retryLink = null;
|
||||
|
@ -118,10 +118,10 @@ class StatusScreenComponent extends React.Component {
|
|||
};
|
||||
|
||||
function renderBodyHtml(report) {
|
||||
let sectionsHtml = [];
|
||||
const sectionsHtml = [];
|
||||
|
||||
for (let i = 0; i < report.length; i++) {
|
||||
let section = report[i];
|
||||
const section = report[i];
|
||||
if (!section.body.length) continue;
|
||||
sectionsHtml.push(renderSectionHtml(i, section));
|
||||
}
|
||||
|
@ -129,7 +129,7 @@ class StatusScreenComponent extends React.Component {
|
|||
return <div>{sectionsHtml}</div>;
|
||||
}
|
||||
|
||||
let body = renderBodyHtml(this.state.report);
|
||||
const body = renderBodyHtml(this.state.report);
|
||||
|
||||
return (
|
||||
<div style={style}>
|
||||
|
|
|
@ -21,7 +21,7 @@ class ToolbarButton extends React.Component {
|
|||
}
|
||||
|
||||
const isEnabled = !('enabled' in this.props) || this.props.enabled === true;
|
||||
let classes = ['button'];
|
||||
const classes = ['button'];
|
||||
if (!isEnabled) classes.push('disabled');
|
||||
|
||||
const finalStyle = Object.assign({}, style, {
|
||||
|
|
|
@ -100,7 +100,7 @@ app().start(bridge().processArgv()).then(() => {
|
|||
} else {
|
||||
// If something goes wrong at this stage we don't have a console or a log file
|
||||
// so display the error in a message box.
|
||||
let msg = ['Fatal error:', error.message];
|
||||
const msg = ['Fatal error:', error.message];
|
||||
if (error.fileName) msg.push(error.fileName);
|
||||
if (error.lineNumber) msg.push(error.lineNumber);
|
||||
if (error.stack) msg.push(error.stack);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue