mirror of https://github.com/laurent22/joplin.git
Desktop: Add support for OCR (#8975)
parent
0e847685ff
commit
bce94f1775
|
@ -753,6 +753,8 @@ packages/lib/services/database/addMigrationFile.js
|
|||
packages/lib/services/database/migrations/42.js
|
||||
packages/lib/services/database/migrations/43.js
|
||||
packages/lib/services/database/migrations/44.js
|
||||
packages/lib/services/database/migrations/45.js
|
||||
packages/lib/services/database/sqlStringToLines.js
|
||||
packages/lib/services/database/types.js
|
||||
packages/lib/services/debug/populateDatabase.js
|
||||
packages/lib/services/e2ee/EncryptionService.test.js
|
||||
|
@ -799,6 +801,13 @@ packages/lib/services/keychain/KeychainServiceDriverBase.js
|
|||
packages/lib/services/noteList/defaultLeftToRightListRenderer.js
|
||||
packages/lib/services/noteList/defaultListRenderer.js
|
||||
packages/lib/services/noteList/renderers.js
|
||||
packages/lib/services/ocr/OcrDriverBase.js
|
||||
packages/lib/services/ocr/OcrService.test.js
|
||||
packages/lib/services/ocr/OcrService.js
|
||||
packages/lib/services/ocr/drivers/OcrDriverTesseract.js
|
||||
packages/lib/services/ocr/utils/filterOcrText.test.js
|
||||
packages/lib/services/ocr/utils/filterOcrText.js
|
||||
packages/lib/services/ocr/utils/types.js
|
||||
packages/lib/services/plugins/BasePlatformImplementation.js
|
||||
packages/lib/services/plugins/BasePluginRunner.js
|
||||
packages/lib/services/plugins/MenuController.js
|
||||
|
@ -876,6 +885,7 @@ packages/lib/services/rest/utils/paginatedResults.js
|
|||
packages/lib/services/rest/utils/readonlyProperties.js
|
||||
packages/lib/services/rest/utils/requestFields.js
|
||||
packages/lib/services/rest/utils/requestPaginationOptions.js
|
||||
packages/lib/services/searchengine/SearchEngine.resources.test.js
|
||||
packages/lib/services/searchengine/SearchEngine.js
|
||||
packages/lib/services/searchengine/SearchEngineUtils.test.js
|
||||
packages/lib/services/searchengine/SearchEngineUtils.js
|
||||
|
@ -923,6 +933,7 @@ packages/lib/services/synchronizer/utils/handleSyncStartupOperation.js
|
|||
packages/lib/services/synchronizer/utils/resourceRemotePath.js
|
||||
packages/lib/services/synchronizer/utils/syncDeleteStep.js
|
||||
packages/lib/services/synchronizer/utils/types.js
|
||||
packages/lib/shim-init-node.js
|
||||
packages/lib/shim.js
|
||||
packages/lib/testing/syncTargetUtils.js
|
||||
packages/lib/testing/test-utils-synchronizer.js
|
||||
|
|
|
@ -33,6 +33,12 @@ jobs:
|
|||
# https://yarnpkg.com/getting-started/install
|
||||
corepack enable
|
||||
|
||||
- name: Install macOs dependencies
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
# Required for building the canvas package
|
||||
brew install pango
|
||||
|
||||
# See github-action-main.yml for explanation
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
|
|
|
@ -59,6 +59,12 @@ jobs:
|
|||
# testing.
|
||||
sudo apt-get install -y xvfb
|
||||
|
||||
- name: Install macOs dependencies
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
# Required for building the canvas package
|
||||
brew install pango
|
||||
|
||||
- name: Install Docker Engine
|
||||
# if: runner.os == 'Linux' && startsWith(github.ref, 'refs/tags/server-v')
|
||||
if: runner.os == 'Linux'
|
||||
|
|
|
@ -733,6 +733,8 @@ packages/lib/services/database/addMigrationFile.js
|
|||
packages/lib/services/database/migrations/42.js
|
||||
packages/lib/services/database/migrations/43.js
|
||||
packages/lib/services/database/migrations/44.js
|
||||
packages/lib/services/database/migrations/45.js
|
||||
packages/lib/services/database/sqlStringToLines.js
|
||||
packages/lib/services/database/types.js
|
||||
packages/lib/services/debug/populateDatabase.js
|
||||
packages/lib/services/e2ee/EncryptionService.test.js
|
||||
|
@ -779,6 +781,13 @@ packages/lib/services/keychain/KeychainServiceDriverBase.js
|
|||
packages/lib/services/noteList/defaultLeftToRightListRenderer.js
|
||||
packages/lib/services/noteList/defaultListRenderer.js
|
||||
packages/lib/services/noteList/renderers.js
|
||||
packages/lib/services/ocr/OcrDriverBase.js
|
||||
packages/lib/services/ocr/OcrService.test.js
|
||||
packages/lib/services/ocr/OcrService.js
|
||||
packages/lib/services/ocr/drivers/OcrDriverTesseract.js
|
||||
packages/lib/services/ocr/utils/filterOcrText.test.js
|
||||
packages/lib/services/ocr/utils/filterOcrText.js
|
||||
packages/lib/services/ocr/utils/types.js
|
||||
packages/lib/services/plugins/BasePlatformImplementation.js
|
||||
packages/lib/services/plugins/BasePluginRunner.js
|
||||
packages/lib/services/plugins/MenuController.js
|
||||
|
@ -856,6 +865,7 @@ packages/lib/services/rest/utils/paginatedResults.js
|
|||
packages/lib/services/rest/utils/readonlyProperties.js
|
||||
packages/lib/services/rest/utils/requestFields.js
|
||||
packages/lib/services/rest/utils/requestPaginationOptions.js
|
||||
packages/lib/services/searchengine/SearchEngine.resources.test.js
|
||||
packages/lib/services/searchengine/SearchEngine.js
|
||||
packages/lib/services/searchengine/SearchEngineUtils.test.js
|
||||
packages/lib/services/searchengine/SearchEngineUtils.js
|
||||
|
@ -903,6 +913,7 @@ packages/lib/services/synchronizer/utils/handleSyncStartupOperation.js
|
|||
packages/lib/services/synchronizer/utils/resourceRemotePath.js
|
||||
packages/lib/services/synchronizer/utils/syncDeleteStep.js
|
||||
packages/lib/services/synchronizer/utils/types.js
|
||||
packages/lib/shim-init-node.js
|
||||
packages/lib/shim.js
|
||||
packages/lib/testing/syncTargetUtils.js
|
||||
packages/lib/testing/test-utils-synchronizer.js
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
|
||||
# We remove the `canvas` optional dependency because electron-rebuild fails to build it, and
|
||||
# the `canvas` API is already part of Electron
|
||||
diff --git a/package.json b/package.json
|
||||
index 105811f53d508486e08a60dc1b6e437cd24d7427..dea6a4e6612c4a4006cc482e46ff5270dcfda1e5 100644
|
||||
--- a/package.json
|
||||
+++ b/package.json
|
||||
@@ -13,7 +13,6 @@
|
||||
"bugs": "https://github.com/mozilla/pdf.js/issues",
|
||||
"license": "Apache-2.0",
|
||||
"optionalDependencies": {
|
||||
- "canvas": "^2.11.2",
|
||||
"path2d-polyfill": "^2.0.1"
|
||||
},
|
||||
"browser": {
|
|
@ -31,7 +31,7 @@
|
|||
"crowdinUpload": "crowdin upload",
|
||||
"cspell": "cspell",
|
||||
"dependencyTree": "madge",
|
||||
"generateDatabaseTypes": "node packages/tools/generate-database-types",
|
||||
"generateTypes": "node packages/tools/generate-database-types",
|
||||
"linkChecker": "linkchecker https://joplinapp.org/ && linkchecker --check-extern https://joplinapp.org/api/references/plugin_api/classes/joplin.html",
|
||||
"linter-ci": "eslint --resolve-plugins-relative-to . --quiet --ext .js --ext .jsx --ext .ts --ext .tsx",
|
||||
"linter-interactive": "eslint-interactive --resolve-plugins-relative-to . --fix --quiet --ext .js --ext .jsx --ext .ts --ext .tsx",
|
||||
|
@ -105,6 +105,7 @@
|
|||
"react-native-vosk@0.1.12": "patch:react-native-vosk@npm%3A0.1.12#./.yarn/patches/react-native-vosk-npm-0.1.12-76b1caaae8.patch",
|
||||
"eslint": "patch:eslint@8.52.0#./.yarn/patches/eslint-npm-8.39.0-d92bace04d.patch",
|
||||
"app-builder-lib@24.4.0": "patch:app-builder-lib@npm%3A24.4.0#./.yarn/patches/app-builder-lib-npm-24.4.0-05322ff057.patch",
|
||||
"react-native@0.71.10": "patch:react-native@npm%3A0.71.10#./.yarn/patches/react-native-animation-fix/react-native-npm-0.71.10-f9c32562d8.patch"
|
||||
"react-native@0.71.10": "patch:react-native@npm%3A0.71.10#./.yarn/patches/react-native-animation-fix/react-native-npm-0.71.10-f9c32562d8.patch",
|
||||
"pdfjs-dist": "patch:pdfjs-dist@npm%3A3.11.174#./.yarn/patches/pdfjs-dist-npm-3.11.174-67f2fee6d6.patch"
|
||||
}
|
||||
}
|
||||
|
|
Binary file not shown.
Binary file not shown.
After Width: | Height: | Size: 23 KiB |
Binary file not shown.
After Width: | Height: | Size: 117 KiB |
|
@ -14,6 +14,8 @@ style.min.css
|
|||
build/lib/
|
||||
vendor/*
|
||||
!vendor/loadEmojiLib.js
|
||||
build/pdf.worker.min.js
|
||||
build/tesseract.js*
|
||||
test-results/
|
||||
playwright-report/
|
||||
playwright/.cache/
|
||||
|
|
|
@ -63,11 +63,14 @@ import ShareService from '@joplin/lib/services/share/ShareService';
|
|||
import checkForUpdates from './checkForUpdates';
|
||||
import { AppState } from './app.reducer';
|
||||
import syncDebugLog from '@joplin/lib/services/synchronizer/syncDebugLog';
|
||||
import eventManager from '@joplin/lib/eventManager';
|
||||
import eventManager, { EventName } from '@joplin/lib/eventManager';
|
||||
import path = require('path');
|
||||
import { checkPreInstalledDefaultPlugins, installDefaultPlugins, setSettingsForDefaultPlugins } from '@joplin/lib/services/plugins/defaultPlugins/defaultPluginsUtils';
|
||||
import userFetcher, { initializeUserFetcher } from '@joplin/lib/utils/userFetcher';
|
||||
import { parseNotesParent } from '@joplin/lib/reducer';
|
||||
import OcrService from '@joplin/lib/services/ocr/OcrService';
|
||||
import OcrDriverTesseract from '@joplin/lib/services/ocr/drivers/OcrDriverTesseract';
|
||||
import SearchEngine from '@joplin/lib/services/searchengine/SearchEngine';
|
||||
import { PackageInfo } from '@joplin/lib/versionInfo';
|
||||
|
||||
const pluginClasses = [
|
||||
|
@ -83,6 +86,7 @@ class Application extends BaseApplication {
|
|||
|
||||
private checkAllPluginStartedIID_: any = null;
|
||||
private initPluginServiceDone_ = false;
|
||||
private ocrService_: OcrService;
|
||||
|
||||
public constructor() {
|
||||
super();
|
||||
|
@ -121,6 +125,10 @@ class Application extends BaseApplication {
|
|||
this.updateTray();
|
||||
}
|
||||
|
||||
if (action.type === 'SETTING_UPDATE_ONE' && action.key === 'ocr.enabled' || action.type === 'SETTING_UPDATE_ALL') {
|
||||
this.setupOcrService();
|
||||
}
|
||||
|
||||
if (action.type === 'SETTING_UPDATE_ONE' && action.key === 'style.editor.fontFamily' || action.type === 'SETTING_UPDATE_ALL') {
|
||||
this.updateEditorFont();
|
||||
}
|
||||
|
@ -355,6 +363,34 @@ class Application extends BaseApplication {
|
|||
Setting.setValue('wasClosedSuccessfully', false);
|
||||
}
|
||||
|
||||
private setupOcrService() {
|
||||
if (Setting.value('ocr.enabled')) {
|
||||
if (!this.ocrService_) {
|
||||
const Tesseract = (window as any).Tesseract;
|
||||
|
||||
const driver = new OcrDriverTesseract(
|
||||
{ createWorker: Tesseract.createWorker },
|
||||
`${bridge().buildDir()}/tesseract.js/worker.min.js`,
|
||||
`${bridge().buildDir()}/tesseract.js-core`,
|
||||
);
|
||||
|
||||
this.ocrService_ = new OcrService(driver);
|
||||
}
|
||||
|
||||
void this.ocrService_.runInBackground();
|
||||
} else {
|
||||
if (!this.ocrService_) return;
|
||||
void this.ocrService_.stopRunInBackground();
|
||||
}
|
||||
|
||||
const handleResourceChange = () => {
|
||||
void this.ocrService_.maintenance();
|
||||
};
|
||||
|
||||
eventManager.on(EventName.ResourceCreate, handleResourceChange);
|
||||
eventManager.on(EventName.ResourceChange, handleResourceChange);
|
||||
}
|
||||
|
||||
public async start(argv: string[]): Promise<any> {
|
||||
// If running inside a package, the command line, instead of being "node.exe <path> <flags>" is "joplin.exe <flags>" so
|
||||
// insert an extra argument so that they can be processed in a consistent way everywhere.
|
||||
|
@ -571,7 +607,7 @@ class Application extends BaseApplication {
|
|||
// Forwards the local event to the global event manager, so that it can
|
||||
// be picked up by the plugin manager.
|
||||
ResourceEditWatcher.instance().on('resourceChange', (event: any) => {
|
||||
eventManager.emit('resourceChange', event);
|
||||
eventManager.emit(EventName.ResourceChange, event);
|
||||
});
|
||||
|
||||
RevisionService.instance().runInBackground();
|
||||
|
@ -587,6 +623,8 @@ class Application extends BaseApplication {
|
|||
bridge: bridge(),
|
||||
debug: new DebugService(reg.db()),
|
||||
resourceService: ResourceService.instance(),
|
||||
searchEngine: SearchEngine.instance(),
|
||||
ocrService: () => this.ocrService_,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -600,6 +638,12 @@ class Application extends BaseApplication {
|
|||
|
||||
this.startRotatingLogMaintenance(Setting.value('profileDir'));
|
||||
|
||||
await this.setupOcrService();
|
||||
|
||||
eventManager.on(EventName.OcrServiceResourcesProcessed, () => {
|
||||
SearchEngine.instance().scheduleSyncTables();
|
||||
});
|
||||
|
||||
// await populateDatabase(reg.db(), {
|
||||
// clearDatabase: true,
|
||||
// folderCount: 1000,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { CommandRuntime, CommandDeclaration, CommandContext } from '@joplin/lib/services/CommandService';
|
||||
import eventManager from '@joplin/lib/eventManager';
|
||||
import eventManager, { EventName } from '@joplin/lib/eventManager';
|
||||
import { _ } from '@joplin/lib/locale';
|
||||
import { stateUtils } from '@joplin/lib/reducer';
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
|
@ -45,7 +45,7 @@ export const runtime = (comp: any): CommandRuntime => {
|
|||
|
||||
if (newNote) {
|
||||
await Note.save(newNote);
|
||||
eventManager.emit('alarmChange', { noteId: note.id, note: newNote });
|
||||
eventManager.emit(EventName.AlarmChange, { noteId: note.id, note: newNote });
|
||||
}
|
||||
|
||||
comp.setState({ promptOptions: null });
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { CommandRuntime, CommandDeclaration, CommandContext } from '@joplin/lib/services/CommandService';
|
||||
import { _ } from '@joplin/lib/locale';
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
import eventManager from '@joplin/lib/eventManager';
|
||||
import eventManager, { EventName } from '@joplin/lib/eventManager';
|
||||
|
||||
export const declaration: CommandDeclaration = {
|
||||
name: 'toggleNoteType',
|
||||
|
@ -22,7 +22,7 @@ export const runtime = (): CommandRuntime => {
|
|||
todo_due: newNote.todo_due,
|
||||
todo_completed: newNote.todo_completed,
|
||||
};
|
||||
eventManager.emit('noteTypeToggle', { noteId: note.id, note: eventNote });
|
||||
eventManager.emit(EventName.NoteTypeToggle, { noteId: note.id, note: eventNote });
|
||||
}
|
||||
},
|
||||
enabledCondition: '!noteIsReadOnly',
|
||||
|
|
|
@ -25,6 +25,7 @@ import { ProfileConfig } from '@joplin/lib/services/profileConfig/types';
|
|||
import PluginService, { PluginSettings } from '@joplin/lib/services/plugins/PluginService';
|
||||
import { getListRendererById, getListRendererIds } from '@joplin/lib/services/noteList/renderers';
|
||||
import useAsyncEffect from '@joplin/lib/hooks/useAsyncEffect';
|
||||
import { EventName } from '@joplin/lib/eventManager';
|
||||
const packageInfo: PackageInfo = require('../packageInfo.js');
|
||||
const { clipboard } = require('electron');
|
||||
const Menu = bridge().Menu;
|
||||
|
@ -1011,10 +1012,10 @@ function useMenu(props: Props) {
|
|||
setKeymapLastChangeTime(Date.now());
|
||||
}
|
||||
|
||||
KeymapService.instance().on('keymapChange', onKeymapChange);
|
||||
KeymapService.instance().on(EventName.KeymapChange, onKeymapChange);
|
||||
|
||||
return () => {
|
||||
KeymapService.instance().off('keymapChange', onKeymapChange);
|
||||
KeymapService.instance().off(EventName.KeymapChange, onKeymapChange);
|
||||
};
|
||||
}, []);
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ import { EditorCommand } from '../../../utils/types';
|
|||
import shim from '@joplin/lib/shim';
|
||||
import { reg } from '@joplin/lib/registry';
|
||||
import setupVim from './setupVim';
|
||||
import { EventName } from '@joplin/lib/eventManager';
|
||||
|
||||
export default function useKeymap(CodeMirror: any) {
|
||||
|
||||
|
@ -174,7 +175,7 @@ export default function useKeymap(CodeMirror: any) {
|
|||
const keymapService = KeymapService.instance();
|
||||
|
||||
registerKeymap();
|
||||
keymapService.on('keymapChange', registerKeymap);
|
||||
keymapService.on(EventName.KeymapChange, registerKeymap);
|
||||
|
||||
setupEmacs();
|
||||
setupVim(CodeMirror);
|
||||
|
|
|
@ -19,10 +19,10 @@ import ResourceEditWatcher from '@joplin/lib/services/ResourceEditWatcher/index'
|
|||
import CommandService from '@joplin/lib/services/CommandService';
|
||||
import ToolbarButton from '../ToolbarButton/ToolbarButton';
|
||||
import Button, { ButtonLevel } from '../Button/Button';
|
||||
import eventManager from '@joplin/lib/eventManager';
|
||||
import eventManager, { EventName } from '@joplin/lib/eventManager';
|
||||
import { AppState } from '../../app.reducer';
|
||||
import ToolbarButtonUtils from '@joplin/lib/services/commands/ToolbarButtonUtils';
|
||||
import { _ } from '@joplin/lib/locale';
|
||||
import { _, _n } from '@joplin/lib/locale';
|
||||
import TagList from '../TagList';
|
||||
import NoteTitleBar from './NoteTitle/NoteTitleBar';
|
||||
import markupLanguageUtils from '../../utils/markupLanguageUtils';
|
||||
|
@ -48,6 +48,7 @@ import ItemChange from '@joplin/lib/models/ItemChange';
|
|||
import PlainEditor from './NoteBody/PlainEditor/PlainEditor';
|
||||
import CodeMirror6 from './NoteBody/CodeMirror/v6/CodeMirror';
|
||||
import CodeMirror5 from './NoteBody/CodeMirror/v5/CodeMirror';
|
||||
import { openItemById } from './utils/contextMenu';
|
||||
import { namespacedKey } from '@joplin/lib/services/plugins/api/JoplinSettings';
|
||||
|
||||
const commands = [
|
||||
|
@ -135,7 +136,7 @@ function NoteEditor(props: NoteEditorProps) {
|
|||
id: formNote.id,
|
||||
});
|
||||
|
||||
eventManager.emit('noteContentChange', { note: savedNote });
|
||||
eventManager.emit(EventName.NoteContentChange, { note: savedNote });
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -367,11 +368,11 @@ function NoteEditor(props: NoteEditorProps) {
|
|||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
eventManager.on('alarmChange', onNotePropertyChange);
|
||||
eventManager.on(EventName.AlarmChange, onNotePropertyChange);
|
||||
ExternalEditWatcher.instance().on('noteChange', externalEditWatcher_noteChange);
|
||||
|
||||
return () => {
|
||||
eventManager.off('alarmChange', onNotePropertyChange);
|
||||
eventManager.off(EventName.AlarmChange, onNotePropertyChange);
|
||||
ExternalEditWatcher.instance().off('noteChange', externalEditWatcher_noteChange);
|
||||
};
|
||||
}, [externalEditWatcher_noteChange, onNotePropertyChange]);
|
||||
|
@ -499,6 +500,12 @@ function NoteEditor(props: NoteEditorProps) {
|
|||
setShowRevisions(false);
|
||||
}, []);
|
||||
|
||||
const onBannerResourceClick = useCallback(async (event: React.MouseEvent<HTMLAnchorElement>) => {
|
||||
event.preventDefault();
|
||||
const resourceId = event.currentTarget.getAttribute('data-resource-id');
|
||||
await openItemById(resourceId, props.dispatch);
|
||||
}, [props.dispatch]);
|
||||
|
||||
if (showRevisions) {
|
||||
const theme = themeStyle(props.themeId);
|
||||
|
||||
|
@ -568,6 +575,24 @@ function NoteEditor(props: NoteEditorProps) {
|
|||
);
|
||||
}
|
||||
|
||||
const renderResourceInSearchResultsNotification = () => {
|
||||
const resourceResults = props.searchResults.filter(r => r.id === props.noteId && r.item_type === ModelType.Resource);
|
||||
if (!resourceResults.length) return null;
|
||||
|
||||
const renderResource = (id: string, title: string) => {
|
||||
return <li key={id}><a data-resource-id={id} onClick={onBannerResourceClick} href="#">{title}</a></li>;
|
||||
};
|
||||
|
||||
return (
|
||||
<div style={styles.resourceWatchBanner}>
|
||||
<p style={styles.resourceWatchBannerLine}>{_n('The following attachment matches your search query:', 'The following attachments match your search query:', resourceResults.length)}</p>
|
||||
<ul>
|
||||
{resourceResults.map(r => renderResource(r.item_id, r.title))}
|
||||
</ul>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
function renderSearchInfo() {
|
||||
const theme = themeStyle(props.themeId);
|
||||
if (formNoteFolder && ['Search', 'Tag', 'SmartFilter'].includes(props.notesParentType)) {
|
||||
|
@ -603,6 +628,7 @@ function NoteEditor(props: NoteEditorProps) {
|
|||
<div style={styles.root} onDrop={onDrop}>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
{renderResourceWatchingNotification()}
|
||||
{renderResourceInSearchResultsNotification()}
|
||||
<NoteTitleBar
|
||||
titleInputRef={titleInputRef}
|
||||
themeId={props.themeId}
|
||||
|
@ -675,6 +701,7 @@ const mapStateToProps = (state: AppState) => {
|
|||
useCustomPdfViewer: false,
|
||||
syncUserId: state.settings['sync.userId'],
|
||||
shareCacheSetting: state.settings['sync.shareCache'],
|
||||
searchResults: state.searchResults,
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ import { MarkupLanguage } from '@joplin/renderer';
|
|||
import { RenderResult, RenderResultPluginAsset } from '@joplin/renderer/MarkupToHtml';
|
||||
import { MarkupToHtmlOptions } from './useMarkupToHtml';
|
||||
import { Dispatch } from 'redux';
|
||||
import { ProcessResultsRow } from '@joplin/lib/services/searchengine/SearchEngine';
|
||||
|
||||
export interface AllAssetsOptions {
|
||||
contentMaxWidthTarget?: string;
|
||||
|
@ -46,6 +47,7 @@ export interface NoteEditorProps {
|
|||
useCustomPdfViewer: boolean;
|
||||
shareCacheSetting: string;
|
||||
syncUserId: string;
|
||||
searchResults: ProcessResultsRow[];
|
||||
}
|
||||
|
||||
export interface NoteBodyEditorRef {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import * as React from 'react';
|
||||
import { useMemo, useEffect, useState, useRef, useCallback } from 'react';
|
||||
import { AppState } from '../../app.reducer';
|
||||
import eventManager from '@joplin/lib/eventManager';
|
||||
import eventManager, { EventName } from '@joplin/lib/eventManager';
|
||||
import NoteListUtils from '../utils/NoteListUtils';
|
||||
import { _ } from '@joplin/lib/locale';
|
||||
import time from '@joplin/lib/time';
|
||||
|
@ -202,7 +202,7 @@ const NoteListComponent = (props: Props) => {
|
|||
todo_completed: checked ? time.unixMs() : 0,
|
||||
};
|
||||
await Note.save(newNote, { userSideValidation: true });
|
||||
eventManager.emit('todoToggle', { noteId: item.id, note: newNote });
|
||||
eventManager.emit(EventName.TodoToggle, { noteId: item.id, note: newNote });
|
||||
};
|
||||
|
||||
const noteItem_titleClick = async (event: any, item: any) => {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { utils as pluginUtils, PluginStates } from '@joplin/lib/services/plugins/reducer';
|
||||
import CommandService from '@joplin/lib/services/CommandService';
|
||||
import eventManager from '@joplin/lib/eventManager';
|
||||
import eventManager, { EventName } from '@joplin/lib/eventManager';
|
||||
import InteropService from '@joplin/lib/services/interop/InteropService';
|
||||
import MenuUtils from '@joplin/lib/services/commands/MenuUtils';
|
||||
import InteropServiceHelper from '../../InteropServiceHelper';
|
||||
|
@ -74,7 +74,7 @@ export default class NoteListUtils {
|
|||
const newNote = Note.changeNoteType(note, type);
|
||||
if (newNote === note) continue;
|
||||
await Note.save(newNote, { userSideValidation: true });
|
||||
eventManager.emit('noteTypeToggle', { noteId: note.id });
|
||||
eventManager.emit(EventName.NoteTypeToggle, { noteId: note.id });
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
<link rel="stylesheet" href="vendor/lib/smalltalk/css/smalltalk.css">
|
||||
<link rel="stylesheet" href="vendor/lib/roboto-fontface/css/roboto/roboto-fontface.css">
|
||||
<link rel="stylesheet" href="vendor/lib/codemirror/lib/codemirror.css">
|
||||
<script src="./node_modules/tesseract.js/dist/tesseract.min.js"></script>
|
||||
|
||||
<style>
|
||||
.smalltalk {
|
||||
|
|
|
@ -30,6 +30,7 @@ const { FileApiDriverLocal } = require('@joplin/lib/file-api-driver-local');
|
|||
const React = require('react');
|
||||
const nodeSqlite = require('sqlite3');
|
||||
const initLib = require('@joplin/lib/initLib').default;
|
||||
const pdfJs = require('pdfjs-dist');
|
||||
|
||||
const main = async () => {
|
||||
if (bridge().env() === 'dev') {
|
||||
|
@ -98,12 +99,15 @@ const main = async () => {
|
|||
return p.version;
|
||||
}
|
||||
|
||||
pdfJs.GlobalWorkerOptions.workerSrc = `${bridge().electronApp().buildDir()}/pdf.worker.min.js`;
|
||||
|
||||
shimInit({
|
||||
keytar,
|
||||
React,
|
||||
appVersion,
|
||||
electronBridge: bridge(),
|
||||
nodeSqlite,
|
||||
pdfJs,
|
||||
});
|
||||
|
||||
// Disable drag and drop of links inside application (which would
|
||||
|
|
|
@ -36,6 +36,9 @@
|
|||
"build/icons/**",
|
||||
"build/images/**",
|
||||
"build/defaultPlugins/**",
|
||||
"build/pdf.worker.min.js",
|
||||
"build/tesseract.js/**",
|
||||
"build/tesseract.js-core/**",
|
||||
"build/7zip/**"
|
||||
],
|
||||
"afterAllArtifactBuild": "./generateSha512.js",
|
||||
|
@ -126,6 +129,7 @@
|
|||
"@types/react": "18.2.37",
|
||||
"@types/react-redux": "7.1.31",
|
||||
"@types/styled-components": "5.1.32",
|
||||
"@types/tesseract.js": "2.0.0",
|
||||
"electron": "26.5.0",
|
||||
"electron-builder": "24.8.0",
|
||||
"glob": "10.3.10",
|
||||
|
@ -167,6 +171,7 @@
|
|||
"node-fetch": "2.6.7",
|
||||
"node-notifier": "10.0.1",
|
||||
"node-rsa": "1.1.1",
|
||||
"pdfjs-dist": "3.11.174",
|
||||
"pretty-bytes": "5.6.0",
|
||||
"re-resizable": "6.9.11",
|
||||
"react": "18.2.0",
|
||||
|
@ -184,6 +189,7 @@
|
|||
"styled-components": "5.3.11",
|
||||
"styled-system": "5.1.5",
|
||||
"taboverride": "4.0.3",
|
||||
"tesseract.js": "4.1.2",
|
||||
"tinymce": "5.10.6"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ import { _ } from '@joplin/lib/locale';
|
|||
import { themeStyle } from '@joplin/lib/theme';
|
||||
import SearchEngine from '@joplin/lib/services/searchengine/SearchEngine';
|
||||
import gotoAnythingStyleQuery from '@joplin/lib/services/searchengine/gotoAnythingStyleQuery';
|
||||
import BaseModel from '@joplin/lib/BaseModel';
|
||||
import BaseModel, { ModelType } from '@joplin/lib/BaseModel';
|
||||
import Tag from '@joplin/lib/models/Tag';
|
||||
import Folder from '@joplin/lib/models/Folder';
|
||||
import Note from '@joplin/lib/models/Note';
|
||||
|
@ -19,13 +19,15 @@ import { mergeOverlappingIntervals } from '@joplin/lib/ArrayUtils';
|
|||
import markupLanguageUtils from '../utils/markupLanguageUtils';
|
||||
import focusEditorIfEditorCommand from '@joplin/lib/services/commands/focusEditorIfEditorCommand';
|
||||
import Logger from '@joplin/utils/Logger';
|
||||
import { MarkupToHtml } from '@joplin/renderer';
|
||||
import { MarkupLanguage, MarkupToHtml } from '@joplin/renderer';
|
||||
import Resource from '@joplin/lib/models/Resource';
|
||||
import { NoteEntity, ResourceEntity } from '@joplin/lib/services/database/types';
|
||||
|
||||
const logger = Logger.create('GotoAnything');
|
||||
|
||||
const PLUGIN_NAME = 'gotoAnything';
|
||||
|
||||
interface SearchResult {
|
||||
interface GotoAnythingSearchResult {
|
||||
id: string;
|
||||
title: string;
|
||||
parent_id: string;
|
||||
|
@ -33,6 +35,8 @@ interface SearchResult {
|
|||
fragments?: string;
|
||||
path?: string;
|
||||
type?: number;
|
||||
item_id?: string;
|
||||
item_type?: ModelType;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
|
@ -46,7 +50,7 @@ interface Props {
|
|||
|
||||
interface State {
|
||||
query: string;
|
||||
results: SearchResult[];
|
||||
results: GotoAnythingSearchResult[];
|
||||
selectedItemId: string;
|
||||
keywords: string[];
|
||||
listType: number;
|
||||
|
@ -60,6 +64,35 @@ interface CommandQuery {
|
|||
args: string[];
|
||||
}
|
||||
|
||||
const getContentMarkupLanguageAndBody = (result: GotoAnythingSearchResult, notesById: Record<string, NoteEntity>, resources: ResourceEntity[]) => {
|
||||
if (result.item_type === ModelType.Resource) {
|
||||
const resource = resources.find(r => r.id === result.item_id);
|
||||
if (!resource) {
|
||||
logger.warn('Could not find resources associated with result:', result);
|
||||
return { markupLanguage: MarkupLanguage.Markdown, content: '' };
|
||||
} else {
|
||||
return { markupLanguage: MarkupLanguage.Markdown, content: resource.ocr_text };
|
||||
}
|
||||
} else { // a note
|
||||
const note = notesById[result.id];
|
||||
return { markupLanguage: note.markup_language, content: note.body };
|
||||
}
|
||||
};
|
||||
|
||||
// A result row contains an `id` property (the note ID) and, if the current row
|
||||
// is a resource, an `item_id` property, which is the resource ID. In that case,
|
||||
// the row also has an `id` property, which is the note that contains the
|
||||
// resource.
|
||||
//
|
||||
// It means a result set may include multiple results with the same `id`
|
||||
// property, if it contains one or more resources that are in a note that's
|
||||
// already in the result set. For that reason, when we need a unique ID for the
|
||||
// result, we use this function - which returns either the item_id, if present,
|
||||
// or the note ID.
|
||||
const getResultId = (result: GotoAnythingSearchResult) => {
|
||||
return result.item_id ? result.item_id : result.id;
|
||||
};
|
||||
|
||||
class GotoAnything {
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
|
@ -266,7 +299,7 @@ class Dialog extends React.PureComponent<Props, State> {
|
|||
if (!this.state.query) {
|
||||
this.setState({ results: [], keywords: [] });
|
||||
} else {
|
||||
let results: SearchResult[] = [];
|
||||
let results: GotoAnythingSearchResult[] = [];
|
||||
let listType = null;
|
||||
let searchQuery = '';
|
||||
let keywords = null;
|
||||
|
@ -311,6 +344,9 @@ class Dialog extends React.PureComponent<Props, State> {
|
|||
|
||||
resultsInBody = !!results.find((row: any) => row.fields.includes('body'));
|
||||
|
||||
const resourceIds = results.filter(r => r.item_type === ModelType.Resource).map(r => r.item_id);
|
||||
const resources = await Resource.resourceOcrTextsByIds(resourceIds);
|
||||
|
||||
if (!resultsInBody || this.state.query.length <= 1) {
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const row = results[i];
|
||||
|
@ -336,9 +372,14 @@ class Dialog extends React.PureComponent<Props, State> {
|
|||
let fragments = '...';
|
||||
|
||||
if (i < limit) { // Display note fragments of search keyword matches
|
||||
const { markupLanguage, content } = getContentMarkupLanguageAndBody(
|
||||
row,
|
||||
notesById,
|
||||
resources,
|
||||
);
|
||||
|
||||
const indices = [];
|
||||
const note = notesById[row.id];
|
||||
const body = this.markupToHtml().stripMarkup(note.markup_language, note.body, { collapseWhiteSpaces: true });
|
||||
const body = this.markupToHtml().stripMarkup(markupLanguage, content, { collapseWhiteSpaces: true });
|
||||
|
||||
// Iterate over all matches in the body for each search keyword
|
||||
for (let { valueRegex } of searchKeywords) {
|
||||
|
@ -359,7 +400,6 @@ class Dialog extends React.PureComponent<Props, State> {
|
|||
fragments = mergedIndices.map((f: any) => body.slice(f[0], f[1])).join(' ... ');
|
||||
// Add trailing ellipsis if the final fragment doesn't end where the note is ending
|
||||
if (mergedIndices.length && mergedIndices[mergedIndices.length - 1][1] !== body.length) fragments += ' ...';
|
||||
|
||||
}
|
||||
|
||||
results[i] = { ...row, path, fragments };
|
||||
|
@ -381,7 +421,7 @@ class Dialog extends React.PureComponent<Props, State> {
|
|||
listType: listType,
|
||||
results: results,
|
||||
keywords: keywords ? keywords : await this.keywords(searchQuery),
|
||||
selectedItemId: results.length === 0 ? null : results[0].id,
|
||||
selectedItemId: results.length === 0 ? null : getResultId(results[0]),
|
||||
resultsInBody: resultsInBody,
|
||||
commandArgs: commandArgs,
|
||||
});
|
||||
|
@ -475,10 +515,10 @@ class Dialog extends React.PureComponent<Props, State> {
|
|||
});
|
||||
}
|
||||
|
||||
public renderItem(item: SearchResult) {
|
||||
public renderItem(item: GotoAnythingSearchResult) {
|
||||
const theme = themeStyle(this.props.themeId);
|
||||
const style = this.style();
|
||||
const isSelected = item.id === this.state.selectedItemId;
|
||||
const isSelected = getResultId(item) === this.state.selectedItemId;
|
||||
const rowStyle = isSelected ? style.rowSelected : style.row;
|
||||
const titleHtml = item.fragments
|
||||
? `<span style="font-weight: bold; color: ${theme.color};">${item.title}</span>`
|
||||
|
@ -491,7 +531,7 @@ class Dialog extends React.PureComponent<Props, State> {
|
|||
const fragmentComp = !fragmentsHtml ? null : <div style={style.rowFragments} dangerouslySetInnerHTML={{ __html: (fragmentsHtml) }}></div>;
|
||||
|
||||
return (
|
||||
<div key={item.id} className={isSelected ? 'selected' : null} style={rowStyle} onClick={this.listItem_onClick} data-id={item.id} data-parent-id={item.parent_id} data-type={item.type}>
|
||||
<div key={getResultId(item)} className={isSelected ? 'selected' : null} style={rowStyle} onClick={this.listItem_onClick} data-id={item.id} data-parent-id={item.parent_id} data-type={item.type}>
|
||||
<div style={style.rowTitle} dangerouslySetInnerHTML={{ __html: titleHtml }}></div>
|
||||
{fragmentComp}
|
||||
{pathComp}
|
||||
|
@ -504,7 +544,7 @@ class Dialog extends React.PureComponent<Props, State> {
|
|||
if (typeof itemId === 'undefined') itemId = this.state.selectedItemId;
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const r = results[i];
|
||||
if (r.id === itemId) return i;
|
||||
if (getResultId(r) === itemId) return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
@ -529,7 +569,7 @@ class Dialog extends React.PureComponent<Props, State> {
|
|||
if (index < 0) index = 0;
|
||||
if (index >= this.state.results.length) index = this.state.results.length - 1;
|
||||
|
||||
const newId = this.state.results[index].id;
|
||||
const newId = getResultId(this.state.results[index]);
|
||||
|
||||
this.makeItemIndexVisible(index);
|
||||
|
||||
|
|
|
@ -62,6 +62,7 @@ const withRetry = async (fn) => {
|
|||
async function main() {
|
||||
const langSourceDir = resolve(__dirname, '../../../Assets/TinyMCE/langs');
|
||||
const buildLibDir = resolve(__dirname, '../vendor/lib');
|
||||
const buildDir = resolve(__dirname, '../build');
|
||||
|
||||
const dirs = [
|
||||
'tinymce',
|
||||
|
@ -72,10 +73,10 @@ async function main() {
|
|||
src: langSourceDir,
|
||||
dest: `${buildLibDir}/tinymce/langs`,
|
||||
},
|
||||
// {
|
||||
// src: resolve(__dirname, '../../pdf-viewer/dist'),
|
||||
// dest: `${buildLibDir}/@joplin/pdf-viewer`,
|
||||
// },
|
||||
{
|
||||
src: `${nodeModulesDir}/tesseract.js-core`,
|
||||
dest: `${buildDir}/tesseract.js-core`,
|
||||
},
|
||||
];
|
||||
|
||||
const files = [
|
||||
|
@ -93,10 +94,14 @@ async function main() {
|
|||
src: resolve(__dirname, '../../lib/services/plugins/sandboxProxy.js'),
|
||||
dest: `${buildLibDir}/@joplin/lib/services/plugins/sandboxProxy.js`,
|
||||
},
|
||||
// {
|
||||
// src: resolve(__dirname, '../../pdf-viewer/index.html'),
|
||||
// dest: `${buildLibDir}/@joplin/pdf-viewer/index.html`,
|
||||
// },
|
||||
{
|
||||
src: `${nodeModulesDir}/pdfjs-dist/build/pdf.worker.min.js`,
|
||||
dest: `${buildDir}/pdf.worker.min.js`,
|
||||
},
|
||||
{
|
||||
src: `${nodeModulesDir}/tesseract.js/dist/worker.min.js`,
|
||||
dest: `${buildDir}/tesseract.js/worker.min.js`,
|
||||
},
|
||||
];
|
||||
|
||||
// First we delete all the destination directories, then we copy the files.
|
||||
|
|
|
@ -16,6 +16,7 @@ const DialogBox = require('react-native-dialogbox').default;
|
|||
const { BaseScreenComponent } = require('../base-screen');
|
||||
const { BackButtonService } = require('../../services/back-button.js');
|
||||
import { AppState } from '../../utils/types';
|
||||
import { NoteEntity } from '@joplin/lib/services/database/types';
|
||||
const { ALL_NOTES_FILTER_ID } = require('@joplin/lib/reserved-ids.js');
|
||||
|
||||
class NotesScreenComponent extends BaseScreenComponent<any> {
|
||||
|
@ -134,7 +135,7 @@ class NotesScreenComponent extends BaseScreenComponent<any> {
|
|||
|
||||
if (source === props.notesSource) return;
|
||||
|
||||
let notes = [];
|
||||
let notes: NoteEntity[] = [];
|
||||
if (props.notesParentType === 'Folder') {
|
||||
notes = await Note.previews(props.selectedFolderId, options);
|
||||
} else if (props.notesParentType === 'Tag') {
|
||||
|
|
|
@ -13,6 +13,7 @@ const DialogBox = require('react-native-dialogbox').default;
|
|||
import SearchEngineUtils from '@joplin/lib/services/searchengine/SearchEngineUtils';
|
||||
import SearchEngine from '@joplin/lib/services/searchengine/SearchEngine';
|
||||
import { AppState } from '../../utils/types';
|
||||
import { NoteEntity } from '@joplin/lib/services/database/types';
|
||||
|
||||
// We need this to suppress the useless warning
|
||||
// https://github.com/oblador/react-native-vector-icons/issues/1465
|
||||
|
@ -94,11 +95,12 @@ class SearchScreenComponent extends BaseScreenComponent {
|
|||
public async refreshSearch(query: string = null) {
|
||||
if (!this.props.visible) return;
|
||||
|
||||
let notes = [];
|
||||
let notes: NoteEntity[] = [];
|
||||
|
||||
if (query) {
|
||||
if (this.props.settings['db.ftsEnabled']) {
|
||||
notes = await SearchEngineUtils.notesForQuery(query, true, { appendWildCards: true });
|
||||
const r = await SearchEngineUtils.notesForQuery(query, true, { appendWildCards: true });
|
||||
notes = r.notes;
|
||||
} else {
|
||||
const p = query.split(' ');
|
||||
const temp = [];
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
plugin_types/
|
||||
markdownUtils.test.js
|
||||
*.traineddata
|
|
@ -39,7 +39,7 @@ const SyncTargetAmazonS3 = require('./SyncTargetAmazonS3.js');
|
|||
import EncryptionService from './services/e2ee/EncryptionService';
|
||||
import ResourceFetcher from './services/ResourceFetcher';
|
||||
import SearchEngineUtils from './services/searchengine/SearchEngineUtils';
|
||||
import SearchEngine from './services/searchengine/SearchEngine';
|
||||
import SearchEngine, { ProcessResultsRow } from './services/searchengine/SearchEngine';
|
||||
import RevisionService from './services/RevisionService';
|
||||
import ResourceService from './services/ResourceService';
|
||||
import DecryptionWorker from './services/DecryptionWorker';
|
||||
|
@ -60,6 +60,7 @@ import { ProfileConfig } from './services/profileConfig/types';
|
|||
import initProfile from './services/profileConfig/initProfile';
|
||||
import { parseShareCache } from './services/share/reducer';
|
||||
import RotatingLogs from './RotatingLogs';
|
||||
import { NoteEntity } from './services/database/types';
|
||||
import { join } from 'path';
|
||||
import processStartFlags, { MatchedStartFlags } from './utils/processStartFlags';
|
||||
|
||||
|
@ -227,8 +228,9 @@ export default class BaseApplication {
|
|||
parentId: parentId,
|
||||
});
|
||||
|
||||
let notes = [];
|
||||
let highlightedWords = [];
|
||||
let notes: NoteEntity[] = [];
|
||||
let highlightedWords: string[] = [];
|
||||
let searchResults: ProcessResultsRow[] = [];
|
||||
|
||||
if (parentId) {
|
||||
if (parentType === Folder.modelType()) {
|
||||
|
@ -237,7 +239,9 @@ export default class BaseApplication {
|
|||
notes = await Tag.notes(parentId, options);
|
||||
} else if (parentType === BaseModel.TYPE_SEARCH) {
|
||||
const search = BaseModel.byId(state.searches, parentId);
|
||||
notes = await SearchEngineUtils.notesForQuery(search.query_pattern, true, { appendWildCards: true });
|
||||
const response = await SearchEngineUtils.notesForQuery(search.query_pattern, true, { appendWildCards: true });
|
||||
notes = response.notes;
|
||||
searchResults = response.results;
|
||||
const parsedQuery = await SearchEngine.instance().parseQuery(search.query_pattern);
|
||||
highlightedWords = SearchEngine.instance().allParsedQueryTerms(parsedQuery);
|
||||
} else if (parentType === BaseModel.TYPE_SMART_FILTER) {
|
||||
|
@ -250,6 +254,11 @@ export default class BaseApplication {
|
|||
words: highlightedWords,
|
||||
});
|
||||
|
||||
this.store().dispatch({
|
||||
type: 'SEARCH_RESULTS_SET',
|
||||
value: searchResults,
|
||||
});
|
||||
|
||||
this.store().dispatch({
|
||||
type: 'NOTE_UPDATE_ALL',
|
||||
notes: notes,
|
||||
|
|
|
@ -346,16 +346,21 @@ class BaseModel {
|
|||
});
|
||||
}
|
||||
|
||||
public static modelSelectAll(sql: string, params: any[] = null) {
|
||||
public static modelSelectAll<T = any>(sql: string, params: any[] = null): Promise<T[]> {
|
||||
if (params === null) params = [];
|
||||
return this.db()
|
||||
.selectAll(sql, params)
|
||||
// eslint-disable-next-line promise/prefer-await-to-then -- Old code before rule was applied
|
||||
.then((models: any[]) => {
|
||||
return this.filterArray(this.addModelMd(models));
|
||||
return this.filterArray(this.addModelMd(models)) as T[];
|
||||
});
|
||||
}
|
||||
|
||||
protected static selectFields(options: LoadOptions): string {
|
||||
if (!options || !options.fields) return '*';
|
||||
return this.db().escapeFieldsToString(options.fields);
|
||||
}
|
||||
|
||||
public static loadByField(fieldName: string, fieldValue: any, options: LoadOptions = null) {
|
||||
if (!options) options = {};
|
||||
if (!('caseInsensitive' in options)) options.caseInsensitive = false;
|
||||
|
|
|
@ -4,10 +4,11 @@ import Database from './database';
|
|||
import migration42 from './services/database/migrations/42';
|
||||
import migration43 from './services/database/migrations/43';
|
||||
import migration44 from './services/database/migrations/44';
|
||||
import migration45 from './services/database/migrations/45';
|
||||
import { SqlQuery, Migration } from './services/database/types';
|
||||
import addMigrationFile from './services/database/addMigrationFile';
|
||||
import sqlStringToLines from './services/database/sqlStringToLines';
|
||||
|
||||
const { promiseChain } = require('./promise-utils.js');
|
||||
const { sprintf } = require('sprintf-js');
|
||||
|
||||
const structureSql = `
|
||||
|
@ -128,6 +129,7 @@ const migrations: Migration[] = [
|
|||
migration42,
|
||||
migration43,
|
||||
migration44,
|
||||
migration45,
|
||||
];
|
||||
|
||||
export interface TableField {
|
||||
|
@ -297,52 +299,56 @@ export default class JoplinDatabase extends Database {
|
|||
return d && d[fieldName] ? d[fieldName] : '';
|
||||
}
|
||||
|
||||
public refreshTableFields(newVersion: number) {
|
||||
private async countFields(tableName: string): Promise<number> {
|
||||
const pragmas = await this.selectAll(`PRAGMA table_info("${tableName}")`);
|
||||
if (!pragmas) throw new Error(`No such table: ${tableName}`);
|
||||
return pragmas.length;
|
||||
}
|
||||
|
||||
public async refreshTableFields(newVersion: number) {
|
||||
this.logger().info('Initializing tables...');
|
||||
const queries: SqlQuery[] = [];
|
||||
queries.push(this.wrapQuery('DELETE FROM table_fields'));
|
||||
|
||||
return this.selectAll('SELECT name FROM sqlite_master WHERE type="table"')
|
||||
// eslint-disable-next-line promise/prefer-await-to-then -- Old code before rule was applied
|
||||
.then(tableRows => {
|
||||
const chain = [];
|
||||
for (let i = 0; i < tableRows.length; i++) {
|
||||
const tableName = tableRows[i].name;
|
||||
if (tableName === 'android_metadata') continue;
|
||||
if (tableName === 'table_fields') continue;
|
||||
if (tableName === 'sqlite_sequence') continue;
|
||||
if (tableName.indexOf('notes_fts') === 0) continue;
|
||||
if (tableName === 'notes_spellfix') continue;
|
||||
if (tableName === 'search_aux') continue;
|
||||
chain.push(() => {
|
||||
// eslint-disable-next-line promise/prefer-await-to-then -- Old code before rule was applied
|
||||
return this.selectAll(`PRAGMA table_info("${tableName}")`).then(pragmas => {
|
||||
for (let i = 0; i < pragmas.length; i++) {
|
||||
const item = pragmas[i];
|
||||
// In SQLite, if the default value is a string it has double quotes around it, so remove them here
|
||||
let defaultValue = item.dflt_value;
|
||||
if (typeof defaultValue === 'string' && defaultValue.length >= 2 && defaultValue[0] === '"' && defaultValue[defaultValue.length - 1] === '"') {
|
||||
defaultValue = defaultValue.substr(1, defaultValue.length - 2);
|
||||
}
|
||||
const q = Database.insertQuery('table_fields', {
|
||||
table_name: tableName,
|
||||
field_name: item.name,
|
||||
field_type: Database.enumId('fieldType', item.type),
|
||||
field_default: defaultValue,
|
||||
});
|
||||
queries.push(q);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
const countFieldsNotesFts = await this.countFields('notes_fts');
|
||||
const countFieldsItemsFts = await this.countFields('items_fts');
|
||||
if (countFieldsNotesFts !== countFieldsItemsFts) {
|
||||
throw new Error(`\`notes_fts\` (${countFieldsNotesFts} fields) must have the same number of fields as \`items_fts\` (${countFieldsItemsFts} fields) for the search engine BM25 algorithm to work`);
|
||||
}
|
||||
|
||||
return promiseChain(chain);
|
||||
})
|
||||
// eslint-disable-next-line promise/prefer-await-to-then -- Old code before rule was applied
|
||||
.then(() => {
|
||||
queries.push({ sql: 'UPDATE version SET table_fields_version = ?', params: [newVersion] });
|
||||
return this.transactionExecBatch(queries);
|
||||
});
|
||||
const tableRows = await this.selectAll('SELECT name FROM sqlite_master WHERE type="table"');
|
||||
|
||||
for (let i = 0; i < tableRows.length; i++) {
|
||||
const tableName = tableRows[i].name;
|
||||
if (tableName === 'android_metadata') continue;
|
||||
if (tableName === 'table_fields') continue;
|
||||
if (tableName === 'sqlite_sequence') continue;
|
||||
if (tableName.indexOf('notes_fts') === 0) continue;
|
||||
if (tableName.indexOf('items_fts') === 0) continue;
|
||||
if (tableName === 'notes_spellfix') continue;
|
||||
if (tableName === 'search_aux') continue;
|
||||
|
||||
const pragmas = await this.selectAll(`PRAGMA table_info("${tableName}")`);
|
||||
|
||||
for (let i = 0; i < pragmas.length; i++) {
|
||||
const item = pragmas[i];
|
||||
// In SQLite, if the default value is a string it has double quotes around it, so remove them here
|
||||
let defaultValue = item.dflt_value;
|
||||
if (typeof defaultValue === 'string' && defaultValue.length >= 2 && defaultValue[0] === '"' && defaultValue[defaultValue.length - 1] === '"') {
|
||||
defaultValue = defaultValue.substr(1, defaultValue.length - 2);
|
||||
}
|
||||
const q = Database.insertQuery('table_fields', {
|
||||
table_name: tableName,
|
||||
field_name: item.name,
|
||||
field_type: Database.enumId('fieldType', item.type),
|
||||
field_default: defaultValue,
|
||||
});
|
||||
queries.push(q);
|
||||
}
|
||||
}
|
||||
|
||||
queries.push({ sql: 'UPDATE version SET table_fields_version = ?', params: [newVersion] });
|
||||
await this.transactionExecBatch(queries);
|
||||
}
|
||||
|
||||
public async upgradeDatabase(fromVersion: number) {
|
||||
|
@ -389,7 +395,7 @@ export default class JoplinDatabase extends Database {
|
|||
let queries: (SqlQuery|string)[] = [];
|
||||
|
||||
if (targetVersion === 1) {
|
||||
queries = this.wrapQueries(this.sqlStringToLines(structureSql));
|
||||
queries = this.wrapQueries(sqlStringToLines(structureSql));
|
||||
}
|
||||
|
||||
if (targetVersion === 2) {
|
||||
|
@ -404,7 +410,7 @@ export default class JoplinDatabase extends Database {
|
|||
`;
|
||||
|
||||
queries.push({ sql: 'DROP TABLE deleted_items' });
|
||||
queries.push({ sql: this.sqlStringToLines(newTableSql)[0] });
|
||||
queries.push({ sql: sqlStringToLines(newTableSql)[0] });
|
||||
queries.push({ sql: 'CREATE INDEX deleted_items_sync_target ON deleted_items (sync_target)' });
|
||||
}
|
||||
|
||||
|
@ -455,7 +461,7 @@ export default class JoplinDatabase extends Database {
|
|||
content TEXT NOT NULL
|
||||
);
|
||||
`;
|
||||
queries.push(this.sqlStringToLines(newTableSql)[0]);
|
||||
queries.push(sqlStringToLines(newTableSql)[0]);
|
||||
const tableNames = ['notes', 'folders', 'tags', 'note_tags', 'resources'];
|
||||
for (let i = 0; i < tableNames.length; i++) {
|
||||
const n = tableNames[i];
|
||||
|
@ -489,12 +495,12 @@ export default class JoplinDatabase extends Database {
|
|||
);
|
||||
`;
|
||||
|
||||
queries.push(this.sqlStringToLines(itemChangesTable)[0]);
|
||||
queries.push(sqlStringToLines(itemChangesTable)[0]);
|
||||
queries.push('CREATE INDEX item_changes_item_id ON item_changes (item_id)');
|
||||
queries.push('CREATE INDEX item_changes_created_time ON item_changes (created_time)');
|
||||
queries.push('CREATE INDEX item_changes_item_type ON item_changes (item_type)');
|
||||
|
||||
queries.push(this.sqlStringToLines(noteResourcesTable)[0]);
|
||||
queries.push(sqlStringToLines(noteResourcesTable)[0]);
|
||||
queries.push('CREATE INDEX note_resources_note_id ON note_resources (note_id)');
|
||||
queries.push('CREATE INDEX note_resources_resource_id ON note_resources (resource_id)');
|
||||
|
||||
|
@ -534,7 +540,7 @@ export default class JoplinDatabase extends Database {
|
|||
);
|
||||
`;
|
||||
|
||||
queries.push(this.sqlStringToLines(resourceLocalStates)[0]);
|
||||
queries.push(sqlStringToLines(resourceLocalStates)[0]);
|
||||
|
||||
queries.push('INSERT INTO resource_local_states SELECT null, id, fetch_status, fetch_error FROM resources');
|
||||
|
||||
|
@ -592,7 +598,7 @@ export default class JoplinDatabase extends Database {
|
|||
);
|
||||
`;
|
||||
|
||||
queries.push(this.sqlStringToLines(notesNormalized)[0]);
|
||||
queries.push(sqlStringToLines(notesNormalized)[0]);
|
||||
|
||||
queries.push('CREATE INDEX notes_normalized_id ON notes_normalized (id)');
|
||||
|
||||
|
@ -641,7 +647,7 @@ export default class JoplinDatabase extends Database {
|
|||
created_time INT NOT NULL
|
||||
);
|
||||
`;
|
||||
queries.push(this.sqlStringToLines(newTableSql)[0]);
|
||||
queries.push(sqlStringToLines(newTableSql)[0]);
|
||||
|
||||
queries.push('CREATE INDEX revisions_parent_id ON revisions (parent_id)');
|
||||
queries.push('CREATE INDEX revisions_item_type ON revisions (item_type)');
|
||||
|
@ -662,7 +668,7 @@ export default class JoplinDatabase extends Database {
|
|||
created_time INT NOT NULL
|
||||
);
|
||||
`;
|
||||
queries.push(this.sqlStringToLines(newTableSql)[0]);
|
||||
queries.push(sqlStringToLines(newTableSql)[0]);
|
||||
|
||||
queries.push('ALTER TABLE resources ADD COLUMN `size` INT NOT NULL DEFAULT -1');
|
||||
queries.push(addMigrationFile(20));
|
||||
|
@ -681,7 +687,7 @@ export default class JoplinDatabase extends Database {
|
|||
created_time INT NOT NULL
|
||||
);
|
||||
`;
|
||||
queries.push(this.sqlStringToLines(newTableSql)[0]);
|
||||
queries.push(sqlStringToLines(newTableSql)[0]);
|
||||
|
||||
queries.push('CREATE INDEX resources_to_download_resource_id ON resources_to_download (resource_id)');
|
||||
queries.push('CREATE INDEX resources_to_download_updated_time ON resources_to_download (updated_time)');
|
||||
|
@ -697,7 +703,7 @@ export default class JoplinDatabase extends Database {
|
|||
updated_time INT NOT NULL
|
||||
);
|
||||
`;
|
||||
queries.push(this.sqlStringToLines(newTableSql)[0]);
|
||||
queries.push(sqlStringToLines(newTableSql)[0]);
|
||||
|
||||
queries.push('CREATE UNIQUE INDEX key_values_key ON key_values (key)');
|
||||
}
|
||||
|
@ -823,7 +829,7 @@ export default class JoplinDatabase extends Database {
|
|||
);
|
||||
`;
|
||||
|
||||
queries.push(this.sqlStringToLines(notesNormalized)[0]);
|
||||
queries.push(sqlStringToLines(notesNormalized)[0]);
|
||||
|
||||
queries.push('CREATE INDEX notes_normalized_id ON notes_normalized (id)');
|
||||
|
||||
|
@ -857,7 +863,7 @@ export default class JoplinDatabase extends Database {
|
|||
);`
|
||||
;
|
||||
|
||||
queries.push(this.sqlStringToLines(newVirtualTableSql)[0]);
|
||||
queries.push(sqlStringToLines(newVirtualTableSql)[0]);
|
||||
|
||||
queries.push(`
|
||||
CREATE TRIGGER notes_fts_before_update BEFORE UPDATE ON notes_normalized BEGIN
|
||||
|
|
|
@ -3,7 +3,7 @@ import LockHandler, { appTypeToLockType, hasActiveLock, LockClientType, LockType
|
|||
import Setting, { AppType } from './models/Setting';
|
||||
import shim from './shim';
|
||||
import MigrationHandler from './services/synchronizer/MigrationHandler';
|
||||
import eventManager from './eventManager';
|
||||
import eventManager, { EventName } from './eventManager';
|
||||
import { _ } from './locale';
|
||||
import BaseItem from './models/BaseItem';
|
||||
import Folder from './models/Folder';
|
||||
|
@ -399,7 +399,7 @@ export default class Synchronizer {
|
|||
this.progressReport_.startTime = time.unixMs();
|
||||
|
||||
this.dispatch({ type: 'SYNC_STARTED' });
|
||||
eventManager.emit('syncStart');
|
||||
eventManager.emit(EventName.SyncStart);
|
||||
|
||||
this.logSyncOperation('starting', null, null, `Starting synchronisation to target ${syncTargetId}... supportsAccurateTimestamp = ${this.api().supportsAccurateTimestamp}; supportsMultiPut = ${this.api().supportsMultiPut} [${synchronizationId}]`);
|
||||
|
||||
|
@ -452,7 +452,7 @@ export default class Synchronizer {
|
|||
try {
|
||||
let remoteInfo = await fetchSyncInfo(this.api());
|
||||
logger.info('Sync target remote info:', remoteInfo);
|
||||
eventManager.emit('sessionEstablished');
|
||||
eventManager.emit(EventName.SessionEstablished);
|
||||
|
||||
let syncTargetIsNew = false;
|
||||
|
||||
|
@ -1103,7 +1103,7 @@ export default class Synchronizer {
|
|||
|
||||
await this.logSyncSummary(this.progressReport_);
|
||||
|
||||
eventManager.emit('syncComplete', {
|
||||
eventManager.emit(EventName.SyncComplete, {
|
||||
withErrors: Synchronizer.reportHasErrors(this.progressReport_),
|
||||
});
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
import time from './time';
|
||||
import Setting from './models/Setting';
|
||||
import Logger from '@joplin/utils/Logger';
|
||||
import Logger, { LoggerWrapper } from '@joplin/utils/Logger';
|
||||
|
||||
type TaskCallback = ()=> Promise<any>;
|
||||
|
||||
interface Task {
|
||||
id: string;
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
callback: Function;
|
||||
callback: TaskCallback;
|
||||
}
|
||||
|
||||
interface TaskResult {
|
||||
|
@ -22,19 +23,38 @@ export default class TaskQueue {
|
|||
private stopping_ = false;
|
||||
private results_: Record<string, TaskResult> = {};
|
||||
private name_: string;
|
||||
private logger_: Logger;
|
||||
private logger_: Logger | LoggerWrapper;
|
||||
private concurrency_: number = null;
|
||||
private keepTaskResults_ = true;
|
||||
|
||||
public constructor(name: string, logger: Logger = null) {
|
||||
public constructor(name: string, logger: Logger | LoggerWrapper = null) {
|
||||
this.name_ = name;
|
||||
this.logger_ = logger ? logger : new Logger();
|
||||
}
|
||||
|
||||
public concurrency() {
|
||||
return Setting.value('sync.maxConcurrentConnections');
|
||||
if (this.concurrency_ === null) {
|
||||
return Setting.value('sync.maxConcurrentConnections');
|
||||
} else {
|
||||
return this.concurrency_;
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public push(id: string, callback: Function) {
|
||||
public setConcurrency(v: number) {
|
||||
this.concurrency_ = v;
|
||||
}
|
||||
|
||||
public get keepTaskResults() {
|
||||
return this.keepTaskResults_;
|
||||
}
|
||||
|
||||
public set keepTaskResults(v: boolean) {
|
||||
this.keepTaskResults_ = v;
|
||||
}
|
||||
|
||||
// Using `push`, an unlimited number of tasks can be pushed, although only
|
||||
// up to `concurrency` will run in parallel.
|
||||
public push(id: string, callback: TaskCallback) {
|
||||
if (this.stopping_) throw new Error('Cannot push task when queue is stopping');
|
||||
|
||||
this.waitingTasks_.push({
|
||||
|
@ -44,6 +64,13 @@ export default class TaskQueue {
|
|||
this.processQueue_();
|
||||
}
|
||||
|
||||
// Using `push`, only up to `concurrency` tasks can be pushed to the queue.
|
||||
// Beyond this, the call will wait until a slot is available.
|
||||
public async pushAsync(id: string, callback: TaskCallback) {
|
||||
await this.waitForOneSlot();
|
||||
this.push(id, callback);
|
||||
}
|
||||
|
||||
private processQueue_() {
|
||||
if (this.processingQueue_ || this.stopping_) return;
|
||||
|
||||
|
@ -52,14 +79,16 @@ export default class TaskQueue {
|
|||
const completeTask = (task: Task, result: any, error: Error) => {
|
||||
delete this.processingTasks_[task.id];
|
||||
|
||||
const r: TaskResult = {
|
||||
id: task.id,
|
||||
result: result,
|
||||
};
|
||||
if (this.keepTaskResults) {
|
||||
const r: TaskResult = {
|
||||
id: task.id,
|
||||
result: result,
|
||||
};
|
||||
|
||||
if (error) r.error = error;
|
||||
if (error) r.error = error;
|
||||
|
||||
this.results_[task.id] = r;
|
||||
this.results_[task.id] = r;
|
||||
}
|
||||
|
||||
this.processQueue_();
|
||||
};
|
||||
|
@ -70,13 +99,15 @@ export default class TaskQueue {
|
|||
const task = this.waitingTasks_.splice(0, 1)[0];
|
||||
this.processingTasks_[task.id] = task;
|
||||
|
||||
// We want to use then/catch here because we don't want to wait for
|
||||
// the task to complete, but still want to capture the result.
|
||||
task
|
||||
.callback()
|
||||
// eslint-disable-next-line promise/prefer-await-to-then -- Old code before rule was applied
|
||||
// eslint-disable-next-line promise/prefer-await-to-then
|
||||
.then((result: any) => {
|
||||
completeTask(task, result, null);
|
||||
})
|
||||
// eslint-disable-next-line promise/prefer-await-to-then -- Old code before rule was applied
|
||||
// eslint-disable-next-line promise/prefer-await-to-then
|
||||
.catch((error: Error) => {
|
||||
if (!error) error = new Error('Unknown error');
|
||||
completeTask(task, null, error);
|
||||
|
@ -102,7 +133,17 @@ export default class TaskQueue {
|
|||
return new Promise((resolve) => {
|
||||
const checkIID = setInterval(() => {
|
||||
if (this.waitingTasks_.length) return;
|
||||
if (this.processingTasks_.length) return;
|
||||
if (Object.keys(this.processingTasks_).length) return;
|
||||
clearInterval(checkIID);
|
||||
resolve(null);
|
||||
}, 100);
|
||||
});
|
||||
}
|
||||
|
||||
public async waitForOneSlot() {
|
||||
return new Promise((resolve) => {
|
||||
const checkIID = setInterval(() => {
|
||||
if (Object.keys(this.processingTasks_).length >= this.concurrency()) return;
|
||||
clearInterval(checkIID);
|
||||
resolve(null);
|
||||
}, 100);
|
||||
|
@ -118,14 +159,25 @@ export default class TaskQueue {
|
|||
return this.results_[taskId];
|
||||
}
|
||||
|
||||
public async waitForResult(taskId: string) {
|
||||
public async waitForResult(taskId: string): Promise<TaskResult> {
|
||||
if (!this.taskExists(taskId)) throw new Error(`No such task: ${taskId}`);
|
||||
|
||||
while (true) {
|
||||
const task = this.results_[taskId];
|
||||
if (task) return task;
|
||||
await time.sleep(0.1);
|
||||
}
|
||||
return new Promise(resolve => {
|
||||
const check = () => {
|
||||
const result = this.results_[taskId];
|
||||
if (result) {
|
||||
resolve(result);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
if (check()) return;
|
||||
|
||||
const checkIID = setInterval(() => {
|
||||
if (check()) clearInterval(checkIID);
|
||||
}, 100);
|
||||
});
|
||||
}
|
||||
|
||||
public async stop() {
|
||||
|
|
|
@ -80,7 +80,10 @@ export default class Database {
|
|||
}
|
||||
|
||||
public escapeFieldsToString(fields: string[] | string): string {
|
||||
if (fields === '*') return '*';
|
||||
if (typeof fields === 'string') {
|
||||
if (fields === '*') return '*';
|
||||
throw new Error(`Invalid field value (only "*" is supported): ${fields}`);
|
||||
}
|
||||
|
||||
const output = [];
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
|
@ -165,7 +168,7 @@ export default class Database {
|
|||
// }
|
||||
}
|
||||
|
||||
public async selectAll(sql: string, params: SqlParams = null): Promise<Row[]> {
|
||||
public async selectAll<T = Row>(sql: string, params: SqlParams = null): Promise<T[]> {
|
||||
return this.tryCall('selectAll', sql, params);
|
||||
}
|
||||
|
||||
|
@ -253,24 +256,6 @@ export default class Database {
|
|||
throw new Error(`Unknown type: ${type}`);
|
||||
}
|
||||
|
||||
public sqlStringToLines(sql: string) {
|
||||
const output = [];
|
||||
const lines = sql.split('\n');
|
||||
let statement = '';
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line === '') continue;
|
||||
if (line.substr(0, 2) === '--') continue;
|
||||
statement += line.trim();
|
||||
if (line[line.length - 1] === ',') statement += ' ';
|
||||
if (line[line.length - 1] === ';') {
|
||||
output.push(statement);
|
||||
statement = '';
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
public logQuery(sql: string, params: SqlParams = null) {
|
||||
if (!this.sqlQueryLogEnabled_) return;
|
||||
|
||||
|
|
|
@ -3,4 +3,5 @@
|
|||
export enum ErrorCode {
|
||||
IsReadOnly = 'isReadOnly',
|
||||
NotFound = 'notFound',
|
||||
UnsupportedMimeType = 'unsupportedMimeType',
|
||||
}
|
||||
|
|
|
@ -2,6 +2,23 @@ const fastDeepEqual = require('fast-deep-equal');
|
|||
|
||||
const events = require('events');
|
||||
|
||||
export enum EventName {
|
||||
ResourceCreate = 'resourceCreate',
|
||||
ResourceChange = 'resourceChange',
|
||||
SettingsChange = 'settingsChange',
|
||||
TodoToggle = 'todoToggle',
|
||||
NoteTypeToggle = 'noteTypeToggle',
|
||||
SyncStart = 'syncStart',
|
||||
SessionEstablished = 'sessionEstablished',
|
||||
SyncComplete = 'syncComplete',
|
||||
ItemChange = 'itemChange',
|
||||
NoteAlarmTrigger = 'noteAlarmTrigger',
|
||||
AlarmChange = 'alarmChange',
|
||||
KeymapChange = 'keymapChange',
|
||||
NoteContentChange = 'noteContentChange',
|
||||
OcrServiceResourcesProcessed = 'ocrServiceResourcesProcessed',
|
||||
}
|
||||
|
||||
export class EventManager {
|
||||
|
||||
private emitter_: any;
|
||||
|
@ -22,11 +39,11 @@ export class EventManager {
|
|||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public on(eventName: string, callback: Function) {
|
||||
public on(eventName: EventName, callback: Function) {
|
||||
return this.emitter_.on(eventName, callback);
|
||||
}
|
||||
|
||||
public emit(eventName: string, object: any = null) {
|
||||
public emit(eventName: EventName, object: any = null) {
|
||||
return this.emitter_.emit(eventName, object);
|
||||
}
|
||||
|
||||
|
@ -36,7 +53,7 @@ export class EventManager {
|
|||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public off(eventName: string, callback: Function) {
|
||||
public off(eventName: EventName, callback: Function) {
|
||||
return this.removeListener(eventName, callback);
|
||||
}
|
||||
|
||||
|
|
|
@ -2,8 +2,9 @@ const { afterEachCleanUp } = require('./testing/test-utils.js');
|
|||
const { shimInit } = require('./shim-init-node.js');
|
||||
const sharp = require('sharp');
|
||||
const nodeSqlite = require('sqlite3');
|
||||
const pdfJs = require('pdfjs-dist');
|
||||
|
||||
shimInit({ sharp, nodeSqlite });
|
||||
shimInit({ sharp, nodeSqlite, pdfJs });
|
||||
|
||||
global.afterEach(async () => {
|
||||
await afterEachCleanUp();
|
||||
|
|
|
@ -182,6 +182,174 @@ codeToLanguage_['et'] = 'Eesti Keel';
|
|||
codeToLanguage_['vi'] = 'Tiếng Việt';
|
||||
codeToLanguage_['hu'] = 'Magyar';
|
||||
|
||||
const iso639Map_ = [
|
||||
['aar', 'aa'],
|
||||
['abk', 'ab'],
|
||||
['afr', 'af'],
|
||||
['aka', 'ak'],
|
||||
['amh', 'am'],
|
||||
['ara', 'ar'],
|
||||
['arg', 'an'],
|
||||
['asm', 'as'],
|
||||
['ava', 'av'],
|
||||
['ave', 'ae'],
|
||||
['aym', 'ay'],
|
||||
['aze', 'az'],
|
||||
['bak', 'ba'],
|
||||
['bam', 'bm'],
|
||||
['bel', 'be'],
|
||||
['ben', 'bn'],
|
||||
['bih', 'bh'],
|
||||
['bis', 'bi'],
|
||||
['bos', 'bs'],
|
||||
['bre', 'br'],
|
||||
['bul', 'bg'],
|
||||
['cat', 'ca'],
|
||||
['cha', 'ch'],
|
||||
['che', 'ce'],
|
||||
['chu', 'cu'],
|
||||
['chv', 'cv'],
|
||||
['cor', 'kw'],
|
||||
['cos', 'co'],
|
||||
['cre', 'cr'],
|
||||
['dan', 'da'],
|
||||
['div', 'dv'],
|
||||
['dzo', 'dz'],
|
||||
['eng', 'en'],
|
||||
['epo', 'eo'],
|
||||
['est', 'et'],
|
||||
['ewe', 'ee'],
|
||||
['fao', 'fo'],
|
||||
['fij', 'fj'],
|
||||
['fin', 'fi'],
|
||||
['fra', 'fr'],
|
||||
['fry', 'fy'],
|
||||
['ful', 'ff'],
|
||||
['gla', 'gd'],
|
||||
['gle', 'ga'],
|
||||
['glg', 'gl'],
|
||||
['glv', 'gv'],
|
||||
['grn', 'gn'],
|
||||
['guj', 'gu'],
|
||||
['hat', 'ht'],
|
||||
['hau', 'ha'],
|
||||
['heb', 'he'],
|
||||
['her', 'hz'],
|
||||
['hin', 'hi'],
|
||||
['hmo', 'ho'],
|
||||
['hrv', 'hr'],
|
||||
['hun', 'hu'],
|
||||
['ibo', 'ig'],
|
||||
['ido', 'io'],
|
||||
['iii', 'ii'],
|
||||
['iku', 'iu'],
|
||||
['ile', 'ie'],
|
||||
['ina', 'ia'],
|
||||
['ind', 'id'],
|
||||
['ipk', 'ik'],
|
||||
['ita', 'it'],
|
||||
['jav', 'jv'],
|
||||
['jpn', 'ja'],
|
||||
['kal', 'kl'],
|
||||
['kan', 'kn'],
|
||||
['kas', 'ks'],
|
||||
['kau', 'kr'],
|
||||
['kaz', 'kk'],
|
||||
['khm', 'km'],
|
||||
['kik', 'ki'],
|
||||
['kin', 'rw'],
|
||||
['kir', 'ky'],
|
||||
['kom', 'kv'],
|
||||
['kon', 'kg'],
|
||||
['kor', 'ko'],
|
||||
['kua', 'kj'],
|
||||
['kur', 'ku'],
|
||||
['lao', 'lo'],
|
||||
['lat', 'la'],
|
||||
['lav', 'lv'],
|
||||
['lim', 'li'],
|
||||
['lin', 'ln'],
|
||||
['lit', 'lt'],
|
||||
['ltz', 'lb'],
|
||||
['lub', 'lu'],
|
||||
['lug', 'lg'],
|
||||
['mah', 'mh'],
|
||||
['mal', 'ml'],
|
||||
['mar', 'mr'],
|
||||
['mlg', 'mg'],
|
||||
['mlt', 'mt'],
|
||||
['mon', 'mn'],
|
||||
['nau', 'na'],
|
||||
['nav', 'nv'],
|
||||
['nbl', 'nr'],
|
||||
['nde', 'nd'],
|
||||
['ndo', 'ng'],
|
||||
['nep', 'ne'],
|
||||
['nno', 'nn'],
|
||||
['nob', 'nb'],
|
||||
['nor', 'no'],
|
||||
['nya', 'ny'],
|
||||
['oci', 'oc'],
|
||||
['oji', 'oj'],
|
||||
['ori', 'or'],
|
||||
['orm', 'om'],
|
||||
['oss', 'os'],
|
||||
['pan', 'pa'],
|
||||
['pli', 'pi'],
|
||||
['pol', 'pl'],
|
||||
['por', 'pt'],
|
||||
['pus', 'ps'],
|
||||
['que', 'qu'],
|
||||
['roh', 'rm'],
|
||||
['run', 'rn'],
|
||||
['rus', 'ru'],
|
||||
['sag', 'sg'],
|
||||
['san', 'sa'],
|
||||
['sin', 'si'],
|
||||
['slv', 'sl'],
|
||||
['sme', 'se'],
|
||||
['smo', 'sm'],
|
||||
['sna', 'sn'],
|
||||
['snd', 'sd'],
|
||||
['som', 'so'],
|
||||
['sot', 'st'],
|
||||
['spa', 'es'],
|
||||
['srd', 'sc'],
|
||||
['srp', 'sr'],
|
||||
['ssw', 'ss'],
|
||||
['sun', 'su'],
|
||||
['swa', 'sw'],
|
||||
['swe', 'sv'],
|
||||
['tah', 'ty'],
|
||||
['tam', 'ta'],
|
||||
['tat', 'tt'],
|
||||
['tel', 'te'],
|
||||
['tgk', 'tg'],
|
||||
['tgl', 'tl'],
|
||||
['tha', 'th'],
|
||||
['tir', 'ti'],
|
||||
['ton', 'to'],
|
||||
['tsn', 'tn'],
|
||||
['tso', 'ts'],
|
||||
['tuk', 'tk'],
|
||||
['tur', 'tr'],
|
||||
['twi', 'tw'],
|
||||
['uig', 'ug'],
|
||||
['ukr', 'uk'],
|
||||
['urd', 'ur'],
|
||||
['uzb', 'uz'],
|
||||
['ven', 've'],
|
||||
['vie', 'vi'],
|
||||
['vol', 'vo'],
|
||||
['wln', 'wa'],
|
||||
['wol', 'wo'],
|
||||
['xho', 'xh'],
|
||||
['yid', 'yi'],
|
||||
['yor', 'yo'],
|
||||
['zha', 'za'],
|
||||
['zul', 'zu'],
|
||||
];
|
||||
|
||||
const codeToCountry_: CodeToCountryMap = {
|
||||
AD: ['Andorra', 'Andorra'],
|
||||
AE: ['United Arab Emirates', 'دولة الإمارات العربيّة المتّحدة'],
|
||||
|
@ -624,6 +792,16 @@ function localesFromLanguageCode(languageCode: string, locales: string[]): strin
|
|||
});
|
||||
}
|
||||
|
||||
export const toIso639 = (code: string) => {
|
||||
if (code.includes('_')) {
|
||||
const s = code.split('_');
|
||||
code = s[0];
|
||||
}
|
||||
const line = iso639Map_.find(l => l[1] === code);
|
||||
if (!line) throw new Error(`Cannot convert to ISO-639 code: ${code}`);
|
||||
return line[0];
|
||||
};
|
||||
|
||||
function _(s: string, ...args: any[]): string {
|
||||
return stringByLocale(currentLocale_, s, ...args);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import BaseModel, { ModelType } from '../BaseModel';
|
||||
import shim from '../shim';
|
||||
import eventManager from '../eventManager';
|
||||
import eventManager, { EventName } from '../eventManager';
|
||||
import { ItemChangeEntity } from '../services/database/types';
|
||||
const Mutex = require('async-mutex').Mutex;
|
||||
|
||||
|
@ -56,7 +56,7 @@ export default class ItemChange extends BaseModel {
|
|||
release();
|
||||
ItemChange.saveCalls_.pop();
|
||||
|
||||
eventManager.emit('itemChange', {
|
||||
eventManager.emit(EventName.ItemChange, {
|
||||
itemType: itemType,
|
||||
itemId: itemId,
|
||||
eventType: type,
|
||||
|
|
|
@ -436,7 +436,7 @@ export default class Note extends BaseItem {
|
|||
return this.modelSelectOne(`SELECT ${this.previewFieldsSql(options.fields)} FROM notes WHERE is_conflict = 0 AND id = ?`, [noteId]);
|
||||
}
|
||||
|
||||
public static async search(options: any = null) {
|
||||
public static async search(options: any = null): Promise<NoteEntity[]> {
|
||||
if (!options) options = {};
|
||||
if (!options.conditions) options.conditions = [];
|
||||
if (!options.conditionsParams) options.conditionsParams = [];
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import BaseModel from '../BaseModel';
|
||||
import { SqlQuery } from '../services/database/types';
|
||||
import { NoteEntity, SqlQuery } from '../services/database/types';
|
||||
import BaseItem from './BaseItem';
|
||||
import { LoadOptions } from './utils/types';
|
||||
|
||||
// - If is_associated = 1, note_resources indicates which note_id is currently associated with the given resource_id
|
||||
// - If is_associated = 0, note_resources indicates which note_id *was* associated with the given resource_id
|
||||
|
@ -76,6 +77,30 @@ export default class NoteResource extends BaseModel {
|
|||
return rows.map((r: any) => r.note_id);
|
||||
}
|
||||
|
||||
public static async associatedResourceNotes(resourceIds: string[], options: LoadOptions = null): Promise<Record<string, any>> {
|
||||
if (!resourceIds.length) return {};
|
||||
|
||||
const fields: string[] = options && options.fields ? (options.fields as string[]).slice() : [];
|
||||
fields.push('resource_id');
|
||||
fields.push('note_id');
|
||||
|
||||
const rows = await this.modelSelectAll(`
|
||||
SELECT ${this.selectFields({ ...options, fields })}
|
||||
FROM note_resources
|
||||
LEFT JOIN notes
|
||||
ON notes.id = note_resources.note_id
|
||||
WHERE resource_id IN ("${resourceIds.join('", "')}") AND is_associated = 1
|
||||
`);
|
||||
|
||||
const output: Record<string, NoteEntity[]> = {};
|
||||
for (const row of rows) {
|
||||
if (!output[row.resource_id]) output[row.resource_id] = [];
|
||||
output[row.resource_id].push(row);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
public static async setAssociatedResources(noteId: string, resourceIds: string[]) {
|
||||
const existingRows = await this.modelSelectAll('SELECT * FROM note_resources WHERE note_id = ?', [noteId]);
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ import Resource from '../models/Resource';
|
|||
import shim from '../shim';
|
||||
import { ErrorCode } from '../errors';
|
||||
import { remove, pathExists } from 'fs-extra';
|
||||
import { ResourceEntity } from '../services/database/types';
|
||||
import { ResourceEntity, ResourceOcrStatus } from '../services/database/types';
|
||||
|
||||
const testImagePath = `${supportDir}/photo.jpg`;
|
||||
|
||||
|
@ -152,4 +152,43 @@ describe('models/Resource', () => {
|
|||
cleanup();
|
||||
});
|
||||
|
||||
it('should return resources since a certain time and ID', async () => {
|
||||
expect((await Resource.allForNormalization(0, '')).length).toBe(0);
|
||||
|
||||
const testData: [string, number][] = [
|
||||
['00000000000000000000000000000001', 1536700000000],
|
||||
['ddddddddddddddddddddddddddddddd1', 1536700000001],
|
||||
['ddddddddddddddddddddddddddddddd3', 1536700000001],
|
||||
['ddddddddddddddddddddddddddddddd2', 1536700000001],
|
||||
['bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb1', 1536700000002],
|
||||
];
|
||||
|
||||
for (const [id, updatedTime] of testData) {
|
||||
await Resource.save({
|
||||
id,
|
||||
created_time: updatedTime,
|
||||
updated_time: updatedTime,
|
||||
user_updated_time: updatedTime,
|
||||
user_created_time: updatedTime,
|
||||
mime: 'application/octet-stream',
|
||||
ocr_text: 'test',
|
||||
ocr_status: ResourceOcrStatus.Done,
|
||||
}, { isNew: true, autoTimestamp: false });
|
||||
}
|
||||
|
||||
expect((await Resource.allForNormalization(0, '')).length).toBe(testData.length);
|
||||
|
||||
{
|
||||
const resources = await Resource.allForNormalization(1536700000001, 'ddddddddddddddddddddddddddddddd2');
|
||||
expect(resources.length).toBe(2);
|
||||
expect(resources.map(r => r.id)).toEqual(['ddddddddddddddddddddddddddddddd3', 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb1']);
|
||||
}
|
||||
|
||||
{
|
||||
const resources = await Resource.allForNormalization(1536700000000, '00000000000000000000000000000001');
|
||||
expect(resources.length).toBe(4);
|
||||
expect(resources.map(r => r.id)).toEqual(['ddddddddddddddddddddddddddddddd1', 'ddddddddddddddddddddddddddddddd2', 'ddddddddddddddddddddddddddddddd3', 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb1']);
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -5,7 +5,7 @@ import NoteResource from './NoteResource';
|
|||
import Setting from './Setting';
|
||||
import markdownUtils from '../markdownUtils';
|
||||
import { _ } from '../locale';
|
||||
import { ResourceEntity, ResourceLocalStateEntity } from '../services/database/types';
|
||||
import { ResourceEntity, ResourceLocalStateEntity, ResourceOcrStatus, SqlQuery } from '../services/database/types';
|
||||
import ResourceLocalState from './ResourceLocalState';
|
||||
const pathUtils = require('../path-utils');
|
||||
const { mime } = require('../mime-utils.js');
|
||||
|
@ -15,9 +15,13 @@ import JoplinError from '../JoplinError';
|
|||
import itemCanBeEncrypted from './utils/itemCanBeEncrypted';
|
||||
import { getEncryptionEnabled } from '../services/synchronizer/syncInfoUtils';
|
||||
import ShareService from '../services/share/ShareService';
|
||||
import { LoadOptions } from './utils/types';
|
||||
import { SaveOptions } from './utils/types';
|
||||
import { MarkupLanguage } from '@joplin/renderer';
|
||||
import { htmlentities } from '@joplin/utils/html';
|
||||
import { RecognizeResultLine } from '../services/ocr/utils/types';
|
||||
import eventManager, { EventName } from '../eventManager';
|
||||
import { unique } from '../array';
|
||||
|
||||
export default class Resource extends BaseItem {
|
||||
|
||||
|
@ -87,8 +91,9 @@ export default class Resource extends BaseItem {
|
|||
return await this.db().exec('UPDATE resource_local_states SET fetch_status = ? WHERE fetch_status = ?', [Resource.FETCH_STATUS_IDLE, Resource.FETCH_STATUS_STARTED]);
|
||||
}
|
||||
|
||||
public static resetErrorStatus(resourceId: string) {
|
||||
return this.db().exec('UPDATE resource_local_states SET fetch_status = ?, fetch_error = "" WHERE resource_id = ?', [Resource.FETCH_STATUS_IDLE, resourceId]);
|
||||
public static async resetFetchErrorStatus(resourceId: string) {
|
||||
await this.db().exec('UPDATE resource_local_states SET fetch_status = ?, fetch_error = "" WHERE resource_id = ?', [Resource.FETCH_STATUS_IDLE, resourceId]);
|
||||
await this.resetOcrStatus(resourceId);
|
||||
}
|
||||
|
||||
public static fsDriver() {
|
||||
|
@ -284,7 +289,7 @@ export default class Resource extends BaseItem {
|
|||
return url.substr(2);
|
||||
}
|
||||
|
||||
public static async localState(resourceOrId: any) {
|
||||
public static async localState(resourceOrId: any): Promise<ResourceLocalStateEntity> {
|
||||
return ResourceLocalState.byResourceId(typeof resourceOrId === 'object' ? resourceOrId.id : resourceOrId);
|
||||
}
|
||||
|
||||
|
@ -323,6 +328,7 @@ export default class Resource extends BaseItem {
|
|||
await super.batchDelete([id], options);
|
||||
await this.fsDriver().remove(path);
|
||||
await NoteResource.deleteByResource(id); // Clean up note/resource relationships
|
||||
await this.db().exec('DELETE FROM items_normalized WHERE item_id = ?', [id]);
|
||||
}
|
||||
|
||||
await ResourceLocalState.batchDelete(ids);
|
||||
|
@ -454,6 +460,21 @@ export default class Resource extends BaseItem {
|
|||
return folder;
|
||||
}
|
||||
|
||||
public static mustHandleConflict(local: ResourceEntity, remote: ResourceEntity) {
|
||||
// That shouldn't happen so throw an exception
|
||||
if (local.id !== remote.id) throw new Error('Cannot handle conflict for two different resources');
|
||||
|
||||
// If the content has changed, we need to handle the conflict
|
||||
if (local.blob_updated_time !== remote.blob_updated_time) return true;
|
||||
|
||||
// If nothing has been changed, or if only the metadata has been
|
||||
// changed, we just keep the remote version. Most of the resource
|
||||
// metadata is not user-editable so there won't be any data loss. Such a
|
||||
// conflict might happen for example if a resource is OCRed by two
|
||||
// different clients.
|
||||
return false;
|
||||
}
|
||||
|
||||
public static async createConflictResourceNote(resource: ResourceEntity) {
|
||||
const Note = this.getClass('Note');
|
||||
const conflictResource = await Resource.duplicateResource(resource.id);
|
||||
|
@ -465,10 +486,90 @@ export default class Resource extends BaseItem {
|
|||
}, { changeSource: ItemChange.SOURCE_SYNC });
|
||||
}
|
||||
|
||||
private static baseNeedOcrQuery(selectSql: string, supportedMimeTypes: string[]): SqlQuery {
|
||||
return {
|
||||
sql: `
|
||||
SELECT ${selectSql}
|
||||
FROM resources
|
||||
WHERE
|
||||
ocr_status = ? AND
|
||||
encryption_applied = 0 AND
|
||||
mime IN ("${supportedMimeTypes.join('","')}")
|
||||
`,
|
||||
params: [
|
||||
ResourceOcrStatus.Todo,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
public static async needOcrCount(supportedMimeTypes: string[]): Promise<number> {
|
||||
const query = this.baseNeedOcrQuery('count(*) as total', supportedMimeTypes);
|
||||
const r = await this.db().selectOne(query.sql, query.params);
|
||||
return r ? r['total'] : 0;
|
||||
}
|
||||
|
||||
public static async needOcr(supportedMimeTypes: string[], skippedResourceIds: string[], limit: number, options: LoadOptions): Promise<ResourceEntity[]> {
|
||||
const query = this.baseNeedOcrQuery(this.selectFields(options), supportedMimeTypes);
|
||||
const skippedResourcesSql = skippedResourceIds.length ? `AND resources.id NOT IN ("${skippedResourceIds.join('","')}")` : '';
|
||||
|
||||
return await this.db().selectAll(`
|
||||
${query.sql}
|
||||
${skippedResourcesSql}
|
||||
ORDER BY updated_time DESC
|
||||
LIMIT ${limit}
|
||||
`, query.params);
|
||||
}
|
||||
|
||||
private static async resetOcrStatus(resourceId: string) {
|
||||
await Resource.save({
|
||||
id: resourceId,
|
||||
ocr_error: '',
|
||||
ocr_text: '',
|
||||
ocr_status: ResourceOcrStatus.Todo,
|
||||
});
|
||||
}
|
||||
|
||||
public static serializeOcrDetails(details: RecognizeResultLine[]) {
|
||||
if (!details || !details.length) return '';
|
||||
return JSON.stringify(details);
|
||||
}
|
||||
|
||||
public static unserializeOcrDetails(s: string): RecognizeResultLine[] | null {
|
||||
if (!s) return null;
|
||||
try {
|
||||
const r = JSON.parse(s);
|
||||
if (!r) return null;
|
||||
if (!Array.isArray(r)) throw new Error('OCR details are not valid (not an array');
|
||||
return r;
|
||||
} catch (error) {
|
||||
error.message = `Could not unserialized OCR data: ${error.message}`;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public static async resourceOcrTextsByIds(ids: string[]): Promise<ResourceEntity[]> {
|
||||
if (!ids.length) return [];
|
||||
ids = unique(ids);
|
||||
return this.modelSelectAll(`SELECT id, ocr_text FROM resources WHERE id IN ("${ids.join('","')}")`);
|
||||
}
|
||||
|
||||
public static allForNormalization(updatedTime: number, id: string, limit = 100, options: LoadOptions = null) {
|
||||
return this.modelSelectAll<ResourceEntity>(`
|
||||
SELECT ${this.selectFields(options)} FROM resources
|
||||
WHERE (updated_time, id) > (?, ?)
|
||||
AND ocr_text != ""
|
||||
AND ocr_status = ?
|
||||
ORDER BY updated_time ASC, id ASC
|
||||
LIMIT ?
|
||||
`, [updatedTime, id, ResourceOcrStatus.Done, limit]);
|
||||
}
|
||||
|
||||
public static async save(o: ResourceEntity, options: SaveOptions = null): Promise<ResourceEntity> {
|
||||
const resource = { ...o };
|
||||
|
||||
if (this.isNew(o, options)) {
|
||||
const isNew = this.isNew(o, options);
|
||||
|
||||
if (isNew) {
|
||||
const now = Date.now();
|
||||
options = { ...options, autoTimestamp: false };
|
||||
if (!resource.created_time) resource.created_time = now;
|
||||
|
@ -476,7 +577,9 @@ export default class Resource extends BaseItem {
|
|||
if (!resource.blob_updated_time) resource.blob_updated_time = now;
|
||||
}
|
||||
|
||||
return await super.save(resource, options);
|
||||
const output = await super.save(resource, options);
|
||||
if (isNew) eventManager.emit(EventName.ResourceCreate);
|
||||
return output;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import shim from '../shim';
|
||||
import { _, supportedLocalesToLanguages, defaultLocale } from '../locale';
|
||||
import eventManager from '../eventManager';
|
||||
import eventManager, { EventName } from '../eventManager';
|
||||
import BaseModel from '../BaseModel';
|
||||
import Database from '../database';
|
||||
import SyncTargetRegistry from '../SyncTargetRegistry';
|
||||
|
@ -837,6 +837,17 @@ class Setting extends BaseModel {
|
|||
isGlobal: true,
|
||||
},
|
||||
|
||||
'ocr.enabled': {
|
||||
value: false,
|
||||
type: SettingItemType.Bool,
|
||||
public: true,
|
||||
appTypes: [AppType.Desktop],
|
||||
label: () => _('Enable optical character recognition (OCR)'),
|
||||
description: () => _('When enabled, the application will scan your attachments and extract the text from it. This will allow you to search for text in these attachments.'),
|
||||
storage: SettingStorage.File,
|
||||
isGlobal: true,
|
||||
},
|
||||
|
||||
theme: {
|
||||
value: Setting.THEME_LIGHT,
|
||||
type: SettingItemType.Int,
|
||||
|
@ -1592,6 +1603,7 @@ class Setting extends BaseModel {
|
|||
'revisionService.lastProcessedChangeId': { value: 0, type: SettingItemType.Int, public: false },
|
||||
|
||||
'searchEngine.initialIndexingDone': { value: false, type: SettingItemType.Bool, public: false },
|
||||
'searchEngine.lastProcessedResource': { value: '', type: SettingItemType.String, public: false },
|
||||
|
||||
'revisionService.enabled': { section: 'revisionService', storage: SettingStorage.File, value: true, type: SettingItemType.Bool, public: true, label: () => _('Enable note history') },
|
||||
'revisionService.ttlDays': {
|
||||
|
@ -2490,7 +2502,7 @@ class Setting extends BaseModel {
|
|||
|
||||
const keys = this.changedKeys_.slice();
|
||||
this.changedKeys_ = [];
|
||||
eventManager.emit('settingsChange', { keys });
|
||||
eventManager.emit(EventName.SettingsChange, { keys });
|
||||
}
|
||||
|
||||
public static scheduleSave() {
|
||||
|
|
|
@ -23,9 +23,12 @@
|
|||
"@types/node-rsa": "1.1.4",
|
||||
"@types/react": "18.2.37",
|
||||
"@types/uuid": "9.0.7",
|
||||
"canvas": "2.11.2",
|
||||
"clean-html": "1.5.0",
|
||||
"jest": "29.7.0",
|
||||
"pdfjs-dist": "3.11.174",
|
||||
"sharp": "0.33.0",
|
||||
"tesseract.js": "4.1.2",
|
||||
"typescript": "5.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
function promiseChain(chain, defaultValue = null) {
|
||||
let output = new Promise((resolve) => {
|
||||
resolve(defaultValue);
|
||||
});
|
||||
for (let i = 0; i < chain.length; i++) {
|
||||
const f = chain[i];
|
||||
// eslint-disable-next-line promise/prefer-await-to-then -- Old code before rule was applied
|
||||
output = output.then(f);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
module.exports = { promiseChain };
|
|
@ -9,6 +9,7 @@ import { ProfileConfig } from './services/profileConfig/types';
|
|||
import * as ArrayUtils from './ArrayUtils';
|
||||
import { FolderEntity } from './services/database/types';
|
||||
import { getListRendererIds } from './services/noteList/renderers';
|
||||
import { ProcessResultsRow } from './services/searchengine/SearchEngine';
|
||||
const fastDeepEqual = require('fast-deep-equal');
|
||||
const { ALL_NOTES_FILTER_ID } = require('./reserved-ids');
|
||||
const { createSelectorCreator, defaultMemoize } = require('reselect');
|
||||
|
@ -77,6 +78,7 @@ export interface State {
|
|||
syncStarted: boolean;
|
||||
syncReport: any;
|
||||
searchQuery: string;
|
||||
searchResults: ProcessResultsRow[];
|
||||
settings: Record<string, any>;
|
||||
sharedData: any;
|
||||
appState: string;
|
||||
|
@ -134,6 +136,7 @@ export const defaultState: State = {
|
|||
syncStarted: false,
|
||||
syncReport: {},
|
||||
searchQuery: '',
|
||||
searchResults: [],
|
||||
settings: {},
|
||||
sharedData: null,
|
||||
appState: 'starting',
|
||||
|
@ -703,6 +706,11 @@ function handleHistory(draft: Draft<State>, action: any) {
|
|||
draft.backwardHistoryNotes = draft.backwardHistoryNotes.concat(currentNote).slice(-MAX_HISTORY);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'SEARCH_RESULTS_SET':
|
||||
draft.searchResults = action.value;
|
||||
break;
|
||||
|
||||
case 'FOLDER_DELETE':
|
||||
draft.backwardHistoryNotes = draft.backwardHistoryNotes.filter(note => note.parent_id !== action.id);
|
||||
draft.forwardHistoryNotes = draft.forwardHistoryNotes.filter(note => note.parent_id !== action.id);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import eventManager from '../eventManager';
|
||||
import eventManager, { EventName } from '../eventManager';
|
||||
import { Notification } from '../models/Alarm';
|
||||
import shim from '../shim';
|
||||
import Setting from '../models/Setting';
|
||||
|
@ -173,7 +173,7 @@ export default class AlarmServiceDriverNode {
|
|||
|
||||
this.clearNotification(notification.id);
|
||||
|
||||
eventManager.emit('noteAlarmTrigger', { noteId: notification.noteId });
|
||||
eventManager.emit(EventName.NoteAlarmTrigger, { noteId: notification.noteId });
|
||||
}, interval);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { State } from '../reducer';
|
||||
import eventManager from '../eventManager';
|
||||
import eventManager, { EventName } from '../eventManager';
|
||||
import BaseService from './BaseService';
|
||||
import shim from '../shim';
|
||||
import WhenClause from './WhenClause';
|
||||
|
@ -110,12 +110,12 @@ export default class CommandService extends BaseService {
|
|||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public on(eventName: string, callback: Function) {
|
||||
public on(eventName: EventName, callback: Function) {
|
||||
eventManager.on(eventName, callback);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public off(eventName: string, callback: Function) {
|
||||
public off(eventName: EventName, callback: Function) {
|
||||
eventManager.off(eventName, callback);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import eventManager from '../eventManager';
|
||||
import eventManager, { EventName } from '../eventManager';
|
||||
import shim from '../shim';
|
||||
import { _ } from '../locale';
|
||||
import keysRegExp from './KeymapService_keysRegExp';
|
||||
|
@ -199,7 +199,7 @@ export default class KeymapService extends BaseService {
|
|||
this.lastSaveTime_ = Date.now();
|
||||
|
||||
// Refresh the menu items so that the changes are reflected
|
||||
eventManager.emit('keymapChange');
|
||||
eventManager.emit(EventName.KeymapChange);
|
||||
} catch (error) {
|
||||
const message = error.message || '';
|
||||
throw new Error(_('Error: %s', message));
|
||||
|
@ -411,12 +411,12 @@ export default class KeymapService extends BaseService {
|
|||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public on(eventName: string, callback: Function) {
|
||||
public on(eventName: EventName, callback: Function) {
|
||||
eventManager.on(eventName, callback);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public off(eventName: string, callback: Function) {
|
||||
public off(eventName: EventName, callback: Function) {
|
||||
eventManager.off(eventName, callback);
|
||||
}
|
||||
|
||||
|
|
|
@ -268,7 +268,7 @@ export default class ReportService {
|
|||
canRetry: true,
|
||||
canRetryType: CanRetryType.ResourceDownload,
|
||||
retryHandler: async () => {
|
||||
await Resource.resetErrorStatus(row.resource_id);
|
||||
await Resource.resetFetchErrorStatus(row.resource_id);
|
||||
void ResourceFetcher.instance().autoAddResources();
|
||||
},
|
||||
});
|
||||
|
|
|
@ -259,6 +259,11 @@ export default class ResourceFetcher extends BaseService {
|
|||
void this.autoAddResources(10);
|
||||
}
|
||||
|
||||
public async startAndWait() {
|
||||
await this.start();
|
||||
await this.waitForAllFinished();
|
||||
}
|
||||
|
||||
public scheduleQueueProcess() {
|
||||
if (this.scheduleQueueProcessIID_) {
|
||||
shim.clearTimeout(this.scheduleQueueProcessIID_);
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
import sqlStringToLines from '../sqlStringToLines';
|
||||
import { SqlQuery } from '../types';
|
||||
|
||||
export default (): (SqlQuery|string)[] => {
|
||||
const queries: (SqlQuery|string)[] = [];
|
||||
|
||||
queries.push('ALTER TABLE `resources` ADD COLUMN `ocr_text` TEXT NOT NULL DEFAULT ""');
|
||||
queries.push('ALTER TABLE `resources` ADD COLUMN `ocr_details` TEXT NOT NULL DEFAULT ""');
|
||||
queries.push('ALTER TABLE `resources` ADD COLUMN `ocr_status` INT NOT NULL DEFAULT 0');
|
||||
queries.push('ALTER TABLE `resources` ADD COLUMN `ocr_error` TEXT NOT NULL DEFAULT ""');
|
||||
|
||||
const itemsNormalized = `
|
||||
CREATE TABLE items_normalized (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL DEFAULT "",
|
||||
body TEXT NOT NULL DEFAULT "",
|
||||
item_id TEXT NOT NULL,
|
||||
item_type INT NOT NULL,
|
||||
user_updated_time INT NOT NULL DEFAULT 0,
|
||||
reserved1 INT NULL,
|
||||
reserved2 INT NULL,
|
||||
reserved3 INT NULL,
|
||||
reserved4 INT NULL,
|
||||
reserved5 INT NULL,
|
||||
reserved6 INT NULL
|
||||
);
|
||||
`;
|
||||
|
||||
queries.push(sqlStringToLines(itemsNormalized)[0]);
|
||||
|
||||
queries.push('CREATE INDEX items_normalized_id ON items_normalized (id)');
|
||||
queries.push('CREATE INDEX items_normalized_item_id ON items_normalized (item_id)');
|
||||
queries.push('CREATE INDEX items_normalized_item_type ON items_normalized (item_type)');
|
||||
|
||||
const tableFields = 'id, title, body, item_id, item_type, user_updated_time, reserved1, reserved2, reserved3, reserved4, reserved5, reserved6';
|
||||
|
||||
const newVirtualTableSql = `
|
||||
CREATE VIRTUAL TABLE items_fts USING fts4(
|
||||
content="items_normalized",
|
||||
notindexed="id",
|
||||
notindexed="item_id",
|
||||
notindexed="item_type",
|
||||
notindexed="user_updated_time",
|
||||
notindexed="reserved1",
|
||||
notindexed="reserved2",
|
||||
notindexed="reserved3",
|
||||
notindexed="reserved4",
|
||||
notindexed="reserved5",
|
||||
notindexed="reserved6",
|
||||
${tableFields}
|
||||
);`
|
||||
;
|
||||
|
||||
queries.push(sqlStringToLines(newVirtualTableSql)[0]);
|
||||
|
||||
queries.push(`
|
||||
CREATE TRIGGER items_fts_before_update BEFORE UPDATE ON items_normalized BEGIN
|
||||
DELETE FROM items_fts WHERE docid=old.rowid;
|
||||
END;`);
|
||||
queries.push(`
|
||||
CREATE TRIGGER items_fts_before_delete BEFORE DELETE ON items_normalized BEGIN
|
||||
DELETE FROM items_fts WHERE docid=old.rowid;
|
||||
END;`);
|
||||
queries.push(`
|
||||
CREATE TRIGGER items_after_update AFTER UPDATE ON items_normalized BEGIN
|
||||
INSERT INTO items_fts(docid, ${tableFields}) SELECT rowid, ${tableFields} FROM items_normalized WHERE new.rowid = items_normalized.rowid;
|
||||
END;`);
|
||||
queries.push(`
|
||||
CREATE TRIGGER items_after_insert AFTER INSERT ON items_normalized BEGIN
|
||||
INSERT INTO items_fts(docid, ${tableFields}) SELECT rowid, ${tableFields} FROM items_normalized WHERE new.rowid = items_normalized.rowid;
|
||||
END;`);
|
||||
|
||||
return queries;
|
||||
};
|
|
@ -0,0 +1,17 @@
|
|||
export default (sql: string) => {
|
||||
const output = [];
|
||||
const lines = sql.split('\n');
|
||||
let statement = '';
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line === '') continue;
|
||||
if (line.substr(0, 2) === '--') continue;
|
||||
statement += line.trim();
|
||||
if (line[line.length - 1] === ',') statement += ' ';
|
||||
if (line[line.length - 1] === ';') {
|
||||
output.push(statement);
|
||||
statement = '';
|
||||
}
|
||||
}
|
||||
return output;
|
||||
};
|
|
@ -55,6 +55,13 @@ export interface UserDataValue {
|
|||
d?: Number; // deleted - 0 or 1 (default = 0)
|
||||
}
|
||||
|
||||
export enum ResourceOcrStatus {
|
||||
Todo = 0,
|
||||
Processing = 1,
|
||||
Done = 2,
|
||||
Error = 3,
|
||||
}
|
||||
|
||||
export type UserData = Record<string, Record<string, UserDataValue>>;
|
||||
|
||||
interface DatabaseTableColumn {
|
||||
|
@ -112,6 +119,10 @@ interface DatabaseTables {
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -164,6 +175,60 @@ export interface ItemChangeEntity {
|
|||
'type'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ItemsFtEntity {
|
||||
'body'?: any | null;
|
||||
'id'?: any | null;
|
||||
'item_id'?: any | null;
|
||||
'item_type'?: any | null;
|
||||
'reserved1'?: any | null;
|
||||
'reserved2'?: any | null;
|
||||
'reserved3'?: any | null;
|
||||
'reserved4'?: any | null;
|
||||
'reserved5'?: any | null;
|
||||
'reserved6'?: any | null;
|
||||
'title'?: any | null;
|
||||
'user_updated_time'?: any | null;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ItemsFtsDocsizeEntity {
|
||||
'docid'?: number | null;
|
||||
'size'?: any | null;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ItemsFtsSegdirEntity {
|
||||
'end_block'?: number | null;
|
||||
'idx'?: number | null;
|
||||
'leaves_end_block'?: number | null;
|
||||
'level'?: number | null;
|
||||
'root'?: any | null;
|
||||
'start_block'?: number | null;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ItemsFtsSegmentEntity {
|
||||
'block'?: any | null;
|
||||
'blockid'?: number | null;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ItemsFtsStatEntity {
|
||||
'id'?: number | null;
|
||||
'value'?: any | null;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface ItemsNormalizedEntity {
|
||||
'body'?: string;
|
||||
'id'?: number | null;
|
||||
'item_id'?: string;
|
||||
'item_type'?: number;
|
||||
'reserved1'?: number | null;
|
||||
'reserved2'?: number | null;
|
||||
'reserved3'?: number | null;
|
||||
'reserved4'?: number | null;
|
||||
'reserved5'?: number | null;
|
||||
'reserved6'?: number | null;
|
||||
'title'?: string;
|
||||
'user_updated_time'?: number;
|
||||
'type_'?: number;
|
||||
}
|
||||
export interface KeyValueEntity {
|
||||
'id'?: number | null;
|
||||
'key'?: string;
|
||||
|
@ -267,6 +332,10 @@ export interface ResourceEntity {
|
|||
'is_shared'?: number;
|
||||
'master_key_id'?: string;
|
||||
'mime'?: string;
|
||||
'ocr_details'?: string;
|
||||
'ocr_error'?: string;
|
||||
'ocr_status'?: number;
|
||||
'ocr_text'?: string;
|
||||
'share_id'?: string;
|
||||
'size'?: number;
|
||||
'title'?: string;
|
||||
|
@ -479,6 +548,10 @@ export const databaseSchema: DatabaseTables = {
|
|||
is_shared: { type: 'number' },
|
||||
master_key_id: { type: 'string' },
|
||||
mime: { type: 'string' },
|
||||
ocr_details: { type: 'string' },
|
||||
ocr_error: { type: 'string' },
|
||||
ocr_status: { type: 'number' },
|
||||
ocr_text: { type: 'string' },
|
||||
share_id: { type: 'string' },
|
||||
size: { type: 'number' },
|
||||
title: { type: 'string' },
|
||||
|
@ -582,4 +655,58 @@ export const databaseSchema: DatabaseTables = {
|
|||
updated_time: { type: 'number' },
|
||||
type_: { type: 'number' },
|
||||
},
|
||||
items_normalized: {
|
||||
body: { type: 'string' },
|
||||
id: { type: 'number' },
|
||||
item_id: { type: 'string' },
|
||||
item_type: { type: 'number' },
|
||||
reserved1: { type: 'number' },
|
||||
reserved2: { type: 'number' },
|
||||
reserved3: { type: 'number' },
|
||||
reserved4: { type: 'number' },
|
||||
reserved5: { type: 'number' },
|
||||
reserved6: { type: 'number' },
|
||||
title: { type: 'string' },
|
||||
user_updated_time: { type: 'number' },
|
||||
type_: { type: 'number' },
|
||||
},
|
||||
items_fts: {
|
||||
body: { type: 'any' },
|
||||
id: { type: 'any' },
|
||||
item_id: { type: 'any' },
|
||||
item_type: { type: 'any' },
|
||||
reserved1: { type: 'any' },
|
||||
reserved2: { type: 'any' },
|
||||
reserved3: { type: 'any' },
|
||||
reserved4: { type: 'any' },
|
||||
reserved5: { type: 'any' },
|
||||
reserved6: { type: 'any' },
|
||||
title: { type: 'any' },
|
||||
user_updated_time: { type: 'any' },
|
||||
type_: { type: 'number' },
|
||||
},
|
||||
items_fts_segments: {
|
||||
block: { type: 'any' },
|
||||
blockid: { type: 'number' },
|
||||
type_: { type: 'number' },
|
||||
},
|
||||
items_fts_segdir: {
|
||||
end_block: { type: 'number' },
|
||||
idx: { type: 'number' },
|
||||
leaves_end_block: { type: 'number' },
|
||||
level: { type: 'number' },
|
||||
root: { type: 'any' },
|
||||
start_block: { type: 'number' },
|
||||
type_: { type: 'number' },
|
||||
},
|
||||
items_fts_docsize: {
|
||||
docid: { type: 'number' },
|
||||
size: { type: 'any' },
|
||||
type_: { type: 'number' },
|
||||
},
|
||||
items_fts_stat: {
|
||||
id: { type: 'number' },
|
||||
value: { type: 'any' },
|
||||
type_: { type: 'number' },
|
||||
},
|
||||
};
|
|
@ -156,18 +156,23 @@ export default class InteropService_Importer_Md_frontmatter extends InteropServi
|
|||
}
|
||||
|
||||
public async importFile(filePath: string, parentFolderId: string) {
|
||||
const note = await super.importFile(filePath, parentFolderId);
|
||||
const { metadata, tags } = this.parseYamlNote(note.body);
|
||||
try {
|
||||
const note = await super.importFile(filePath, parentFolderId);
|
||||
const { metadata, tags } = this.parseYamlNote(note.body);
|
||||
|
||||
const updatedNote = { ...note, ...metadata };
|
||||
const updatedNote = { ...note, ...metadata };
|
||||
|
||||
const noteItem = await Note.save(updatedNote, { isNew: false, autoTimestamp: false });
|
||||
const noteItem = await Note.save(updatedNote, { isNew: false, autoTimestamp: false });
|
||||
|
||||
const resolvedPath = shim.fsDriver().resolve(filePath);
|
||||
this.importedNotes[resolvedPath] = noteItem;
|
||||
const resolvedPath = shim.fsDriver().resolve(filePath);
|
||||
this.importedNotes[resolvedPath] = noteItem;
|
||||
|
||||
for (const tag of tags) { await Tag.addNoteTagByTitle(noteItem.id, tag); }
|
||||
for (const tag of tags) { await Tag.addNoteTagByTitle(noteItem.id, tag); }
|
||||
|
||||
return noteItem;
|
||||
return noteItem;
|
||||
} catch (error) {
|
||||
error.message = `On ${filePath}: ${error.message}`;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
import { RecognizeResult } from './utils/types';
|
||||
|
||||
export default class OcrDriverBase {
|
||||
|
||||
public async recognize(_language: string, _filePath: string): Promise<RecognizeResult> {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
|
||||
public async dispose(): Promise<void> {}
|
||||
|
||||
}
|
|
@ -0,0 +1,247 @@
|
|||
import { createNoteAndResource, ocrSampleDir, resourceFetcher, setupDatabaseAndSynchronizer, supportDir, switchClient, synchronizerStart } from '../../testing/test-utils';
|
||||
import OcrDriverTesseract from './drivers/OcrDriverTesseract';
|
||||
import OcrService from './OcrService';
|
||||
import { supportedMimeTypes } from './OcrService';
|
||||
import { createWorker } from 'tesseract.js';
|
||||
import Resource from '../../models/Resource';
|
||||
import { ResourceEntity, ResourceOcrStatus } from '../database/types';
|
||||
import { msleep } from '@joplin/utils/time';
|
||||
import Logger from '@joplin/utils/Logger';
|
||||
|
||||
const newService = () => {
|
||||
const driver = new OcrDriverTesseract({ createWorker });
|
||||
return new OcrService(driver);
|
||||
};
|
||||
|
||||
describe('OcrService', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
await setupDatabaseAndSynchronizer(2);
|
||||
await switchClient(1);
|
||||
});
|
||||
|
||||
it('should process resources', async () => {
|
||||
const { resource: resource1 } = await createNoteAndResource({ path: `${ocrSampleDir}/testocr.png` });
|
||||
const { resource: resource2 } = await createNoteAndResource({ path: `${supportDir}/photo.jpg` });
|
||||
const { resource: resource3 } = await createNoteAndResource({ path: `${ocrSampleDir}/with_bullets.png` });
|
||||
|
||||
// Wait to make sure that updated_time is updated
|
||||
await msleep(1);
|
||||
|
||||
expect(await Resource.needOcrCount(supportedMimeTypes)).toBe(3);
|
||||
|
||||
const service = newService();
|
||||
await service.processResources();
|
||||
|
||||
const expectedText = 'This is a lot of 12 point text to test the\n' +
|
||||
'ocr code and see if it works on all types\n' +
|
||||
'of file format.\n' +
|
||||
'The quick brown dog jumped over the\n' +
|
||||
'lazy fox. The quick brown dog jumped\n' +
|
||||
'over the lazy fox. The quick brown dog\n' +
|
||||
'jumped over the lazy fox. The quick\n' +
|
||||
'brown dog jumped over the lazy fox.';
|
||||
const processedResource1: ResourceEntity = await Resource.load(resource1.id);
|
||||
expect(processedResource1.ocr_text).toBe(expectedText);
|
||||
expect(processedResource1.ocr_status).toBe(ResourceOcrStatus.Done);
|
||||
expect(processedResource1.ocr_error).toBe('');
|
||||
|
||||
const details = Resource.unserializeOcrDetails(processedResource1.ocr_details);
|
||||
const lines = details.map(l => l.words.map(w => w.t).join(' ')).join('\n');
|
||||
expect(lines).toBe(expectedText);
|
||||
expect(details[0].words[0].t).toBe('This');
|
||||
expect(details[0].words[0]).toEqual({ 't': 'This', 'bb': [36, 96, 92, 116], 'bl': [36, 96, 116, 116] });
|
||||
|
||||
// Also check that the resource blob has not been updated
|
||||
expect(processedResource1.blob_updated_time).toBe(resource1.blob_updated_time);
|
||||
expect(processedResource1.updated_time).toBeGreaterThan(resource1.updated_time);
|
||||
|
||||
const processedResource2: ResourceEntity = await Resource.load(resource2.id);
|
||||
expect(processedResource2.ocr_text).toBe('');
|
||||
expect(processedResource2.ocr_status).toBe(ResourceOcrStatus.Done);
|
||||
expect(processedResource2.ocr_error).toBe('');
|
||||
|
||||
const processedResource3: ResourceEntity = await Resource.load(resource3.id);
|
||||
expect(processedResource3.ocr_text).toBe('Declaration\n' +
|
||||
'| declare that:\n' +
|
||||
'® | will arrive in the UK within the next 48 hours\n' +
|
||||
'® | understand | have to provide proof of a negative COVID 19 test prior to departure to the UK (unless\n' +
|
||||
'exempt)\n' +
|
||||
'® | have provided my seat number, if relevant\n' +
|
||||
'® The information | have entered in this form is correct\n' +
|
||||
'® | understand it could be a criminal offence to provide false details and | may be prosecuted\n' +
|
||||
'If any of your information changes once you have submitted your details, such as travel details, seat number, or\n' +
|
||||
'contact information, you must complete a new form.\n' +
|
||||
'| confirm that | understand and agree with the above declarations.',
|
||||
);
|
||||
expect(processedResource3.ocr_status).toBe(ResourceOcrStatus.Done);
|
||||
expect(processedResource3.ocr_error).toBe('');
|
||||
|
||||
// Also check that the resource blob has not been updated
|
||||
expect(processedResource2.blob_updated_time).toBe(resource2.blob_updated_time);
|
||||
expect(processedResource2.updated_time).toBeGreaterThan(resource2.updated_time);
|
||||
|
||||
await service.dispose();
|
||||
});
|
||||
|
||||
it('should process PDF resources', async () => {
|
||||
const { resource } = await createNoteAndResource({ path: `${ocrSampleDir}/dummy.pdf` });
|
||||
|
||||
const service = newService();
|
||||
|
||||
await service.processResources();
|
||||
|
||||
const processedResource: ResourceEntity = await Resource.load(resource.id);
|
||||
expect(processedResource.ocr_text).toBe('Dummy PDF file');
|
||||
expect(processedResource.ocr_status).toBe(ResourceOcrStatus.Done);
|
||||
expect(processedResource.ocr_error).toBe('');
|
||||
|
||||
await service.dispose();
|
||||
});
|
||||
|
||||
it('should handle case where resource blob has not yet been downloaded', async () => {
|
||||
await createNoteAndResource({ path: `${ocrSampleDir}/dummy.pdf` });
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await msleep(1);
|
||||
|
||||
const service = newService();
|
||||
|
||||
await service.processResources();
|
||||
|
||||
{
|
||||
const resource: ResourceEntity = (await Resource.all())[0];
|
||||
expect(resource.ocr_text).toBe('');
|
||||
expect(resource.ocr_error).toBe('');
|
||||
expect(resource.ocr_status).toBe(ResourceOcrStatus.Todo);
|
||||
}
|
||||
|
||||
await resourceFetcher().startAndWait();
|
||||
|
||||
await service.processResources();
|
||||
|
||||
{
|
||||
const resource: ResourceEntity = (await Resource.all())[0];
|
||||
expect(resource.ocr_text).toBe('Dummy PDF file');
|
||||
expect(resource.ocr_error).toBe('');
|
||||
expect(resource.ocr_status).toBe(ResourceOcrStatus.Done);
|
||||
}
|
||||
|
||||
await service.dispose();
|
||||
});
|
||||
|
||||
it('should handle case where resource blob cannot be downloaded', async () => {
|
||||
await createNoteAndResource({ path: `${ocrSampleDir}/dummy.pdf` });
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
const resource: ResourceEntity = (await Resource.all())[0];
|
||||
|
||||
// ----------------------------------------------------------------
|
||||
// Fetch status is an error so OCR status will be an error too
|
||||
// ----------------------------------------------------------------
|
||||
|
||||
await Resource.setLocalState(resource.id, {
|
||||
resource_id: resource.id,
|
||||
fetch_status: Resource.FETCH_STATUS_ERROR,
|
||||
fetch_error: 'cannot be downloaded',
|
||||
});
|
||||
|
||||
const service = newService();
|
||||
|
||||
// The service will print a warnign so we disable it in tests
|
||||
Logger.globalLogger.enabled = false;
|
||||
await service.processResources();
|
||||
Logger.globalLogger.enabled = true;
|
||||
|
||||
{
|
||||
const resource: ResourceEntity = (await Resource.all())[0];
|
||||
expect(resource.ocr_text).toBe('');
|
||||
expect(resource.ocr_error).toContain('Cannot process resource');
|
||||
expect(resource.ocr_error).toContain('cannot be downloaded');
|
||||
expect(resource.ocr_status).toBe(ResourceOcrStatus.Error);
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------
|
||||
// After the fetch status is reset and the resource downloaded, it
|
||||
// should also retry OCR and succeed.
|
||||
// ----------------------------------------------------------------
|
||||
|
||||
await Resource.resetFetchErrorStatus(resource.id);
|
||||
|
||||
await resourceFetcher().startAndWait();
|
||||
|
||||
await service.processResources();
|
||||
|
||||
{
|
||||
const resource: ResourceEntity = (await Resource.all())[0];
|
||||
expect(resource.ocr_text).toBe('Dummy PDF file');
|
||||
expect(resource.ocr_error).toBe('');
|
||||
expect(resource.ocr_status).toBe(ResourceOcrStatus.Done);
|
||||
}
|
||||
|
||||
await service.dispose();
|
||||
});
|
||||
|
||||
it('should handle conflicts if two clients process the same resource then sync', async () => {
|
||||
await createNoteAndResource({ path: `${ocrSampleDir}/dummy.pdf` });
|
||||
|
||||
const service1 = newService();
|
||||
await synchronizerStart();
|
||||
await service1.processResources();
|
||||
|
||||
await switchClient(2);
|
||||
|
||||
await synchronizerStart();
|
||||
await msleep(1);
|
||||
await resourceFetcher().startAndWait();
|
||||
const service2 = newService();
|
||||
await service2.processResources();
|
||||
await synchronizerStart();
|
||||
const expectedResouceUpatedTime = (await Resource.all())[0].updated_time;
|
||||
|
||||
await switchClient(1);
|
||||
|
||||
await synchronizerStart();
|
||||
|
||||
// A conflict happened during sync, but it is resolved by keeping the
|
||||
// remote version.
|
||||
|
||||
expect((await Resource.all()).length).toBe(1);
|
||||
|
||||
{
|
||||
const resource: ResourceEntity = (await Resource.all())[0];
|
||||
expect(resource.ocr_text).toBe('Dummy PDF file');
|
||||
expect(resource.ocr_error).toBe('');
|
||||
expect(resource.ocr_status).toBe(ResourceOcrStatus.Done);
|
||||
expect(resource.updated_time).toBe(expectedResouceUpatedTime);
|
||||
}
|
||||
|
||||
await service1.dispose();
|
||||
await service2.dispose();
|
||||
});
|
||||
|
||||
// Use this to quickly test with specific images:
|
||||
|
||||
// it('should process resources 2', async () => {
|
||||
// await createNoteAndResource({ path: `${require('os').homedir()}/Desktop/AllClients.png` });
|
||||
|
||||
// const service = newService();
|
||||
// await service.processResources();
|
||||
|
||||
// console.info(await Resource.all());
|
||||
|
||||
// await service.dispose();
|
||||
// });
|
||||
|
||||
});
|
|
@ -0,0 +1,206 @@
|
|||
import { toIso639 } from '../../locale';
|
||||
import Resource from '../../models/Resource';
|
||||
import Setting from '../../models/Setting';
|
||||
import shim from '../../shim';
|
||||
import { ResourceEntity, ResourceOcrStatus } from '../database/types';
|
||||
import OcrDriverBase from './OcrDriverBase';
|
||||
import { RecognizeResult } from './utils/types';
|
||||
import { Minute } from '@joplin/utils/time';
|
||||
import Logger from '@joplin/utils/Logger';
|
||||
import filterOcrText from './utils/filterOcrText';
|
||||
import TaskQueue from '../../TaskQueue';
|
||||
import eventManager, { EventName } from '../../eventManager';
|
||||
|
||||
const logger = Logger.create('OcrService');
|
||||
|
||||
// From: https://github.com/naptha/tesseract.js/blob/master/docs/image-format.md
|
||||
export const supportedMimeTypes = [
|
||||
'application/pdf',
|
||||
'image/bmp',
|
||||
'image/jpeg',
|
||||
'image/jpg',
|
||||
'image/png',
|
||||
'image/webp',
|
||||
'image/x-portable-bitmap',
|
||||
];
|
||||
|
||||
const resourceInfo = (resource: ResourceEntity) => {
|
||||
return `${resource.id} (type ${resource.mime})`;
|
||||
};
|
||||
|
||||
export default class OcrService {
|
||||
|
||||
private driver_: OcrDriverBase;
|
||||
private isRunningInBackground_ = false;
|
||||
private maintenanceTimer_: any = null;
|
||||
private pdfExtractDir_: string = null;
|
||||
private isProcessingResources_ = false;
|
||||
private recognizeQueue_: TaskQueue = null;
|
||||
|
||||
public constructor(driver: OcrDriverBase) {
|
||||
this.driver_ = driver;
|
||||
this.recognizeQueue_ = new TaskQueue('recognize', logger);
|
||||
this.recognizeQueue_.setConcurrency(5);
|
||||
this.recognizeQueue_.keepTaskResults = false;
|
||||
}
|
||||
|
||||
private async pdfExtractDir(): Promise<string> {
|
||||
if (this.pdfExtractDir_ !== null) return this.pdfExtractDir_;
|
||||
const p = `${Setting.value('tempDir')}/ocr_pdf_extract`;
|
||||
await shim.fsDriver().mkdir(p);
|
||||
this.pdfExtractDir_ = p;
|
||||
return this.pdfExtractDir_;
|
||||
}
|
||||
|
||||
public get running() {
|
||||
return this.runInBackground;
|
||||
}
|
||||
|
||||
private async recognize(language: string, resource: ResourceEntity): Promise<RecognizeResult> {
|
||||
if (resource.encryption_applied) throw new Error(`Cannot OCR encrypted resource: ${resource.id}`);
|
||||
|
||||
const resourceFilePath = Resource.fullPath(resource);
|
||||
|
||||
if (resource.mime === 'application/pdf') {
|
||||
const imageFilePaths = await shim.pdfToImages(resourceFilePath, await this.pdfExtractDir());
|
||||
const results: RecognizeResult[] = [];
|
||||
|
||||
let pageIndex = 0;
|
||||
for (const imageFilePath of imageFilePaths) {
|
||||
logger.info(`Recognize: ${resourceInfo(resource)}: Processing PDF page ${pageIndex + 1} / ${imageFilePaths.length}...`);
|
||||
results.push(await this.driver_.recognize(language, imageFilePath));
|
||||
pageIndex++;
|
||||
}
|
||||
|
||||
for (const imageFilePath of imageFilePaths) {
|
||||
await shim.fsDriver().remove(imageFilePath);
|
||||
}
|
||||
|
||||
return {
|
||||
text: results.map(r => r.text).join('\n'),
|
||||
};
|
||||
} else {
|
||||
return this.driver_.recognize(language, resourceFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
public async dispose() {
|
||||
await this.driver_.dispose();
|
||||
}
|
||||
|
||||
public async processResources() {
|
||||
if (this.isProcessingResources_) return;
|
||||
|
||||
this.isProcessingResources_ = true;
|
||||
|
||||
const totalResourcesToProcess = await Resource.needOcrCount(supportedMimeTypes);
|
||||
const inProcessResourceIds: string[] = [];
|
||||
const skippedResourceIds: string[] = [];
|
||||
|
||||
logger.info(`Found ${totalResourcesToProcess} resources to process...`);
|
||||
|
||||
const makeQueueAction = (totalProcessed: number, language: string, resource: ResourceEntity) => {
|
||||
return async () => {
|
||||
logger.info(`Processing resource ${totalProcessed + 1} / ${totalResourcesToProcess}: ${resourceInfo(resource)}...`);
|
||||
|
||||
const toSave: ResourceEntity = {
|
||||
id: resource.id,
|
||||
};
|
||||
|
||||
try {
|
||||
const fetchStatus = await Resource.localState(resource.id);
|
||||
|
||||
if (fetchStatus.fetch_status === Resource.FETCH_STATUS_ERROR) {
|
||||
throw new Error(`Cannot process resource ${resourceInfo(resource)} because it cannot be fetched from the server: ${fetchStatus.fetch_error}`);
|
||||
}
|
||||
|
||||
if (fetchStatus.fetch_status !== Resource.FETCH_STATUS_DONE) {
|
||||
skippedResourceIds.push(resource.id);
|
||||
logger.info(`Skipping resource ${resourceInfo(resource)} because it has not been downloaded yet`);
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await this.recognize(language, resource);
|
||||
toSave.ocr_status = ResourceOcrStatus.Done;
|
||||
toSave.ocr_text = filterOcrText(result.text);
|
||||
toSave.ocr_details = Resource.serializeOcrDetails(result.lines),
|
||||
toSave.ocr_error = '';
|
||||
} catch (error) {
|
||||
const errorMessage = typeof error === 'string' ? error : error?.message;
|
||||
logger.warn(`Could not process resource ${resourceInfo(resource)}`, error);
|
||||
toSave.ocr_status = ResourceOcrStatus.Error;
|
||||
toSave.ocr_text = '';
|
||||
toSave.ocr_details = '';
|
||||
toSave.ocr_error = errorMessage || 'Unknown error';
|
||||
}
|
||||
|
||||
await Resource.save(toSave);
|
||||
};
|
||||
};
|
||||
|
||||
try {
|
||||
const language = toIso639(Setting.value('locale'));
|
||||
|
||||
let totalProcessed = 0;
|
||||
|
||||
while (true) {
|
||||
const resources = await Resource.needOcr(supportedMimeTypes, skippedResourceIds.concat(inProcessResourceIds), 100, {
|
||||
fields: [
|
||||
'id',
|
||||
'mime',
|
||||
'file_extension',
|
||||
'encryption_applied',
|
||||
],
|
||||
});
|
||||
|
||||
if (!resources.length) break;
|
||||
|
||||
for (const resource of resources) {
|
||||
inProcessResourceIds.push(resource.id);
|
||||
await this.recognizeQueue_.pushAsync(resource.id, makeQueueAction(totalProcessed++, language, resource));
|
||||
}
|
||||
}
|
||||
|
||||
await this.recognizeQueue_.waitForAll();
|
||||
|
||||
if (totalProcessed) {
|
||||
eventManager.emit(EventName.OcrServiceResourcesProcessed);
|
||||
}
|
||||
|
||||
logger.info(`${totalProcessed} resources have been processed.`);
|
||||
} finally {
|
||||
this.isProcessingResources_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
public async maintenance() {
|
||||
await this.processResources();
|
||||
}
|
||||
|
||||
public async runInBackground() {
|
||||
if (this.isRunningInBackground_) return;
|
||||
|
||||
this.isRunningInBackground_ = true;
|
||||
|
||||
if (this.maintenanceTimer_) return;
|
||||
|
||||
logger.info('Starting background service...');
|
||||
|
||||
await this.maintenance();
|
||||
|
||||
this.maintenanceTimer_ = shim.setInterval(async () => {
|
||||
await this.maintenance();
|
||||
this.maintenanceTimer_ = null;
|
||||
}, 5 * Minute);
|
||||
}
|
||||
|
||||
public async stopRunInBackground() {
|
||||
logger.info('Stopping background service...');
|
||||
|
||||
if (this.maintenanceTimer_) shim.clearInterval(this.maintenanceTimer_);
|
||||
this.maintenanceTimer_ = null;
|
||||
this.isRunningInBackground_ = false;
|
||||
await this.recognizeQueue_.stop();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,187 @@
|
|||
import { RecognizeResult, RecognizeResultBoundingBox, RecognizeResultLine, RecognizeResultWord } from '../utils/types';
|
||||
import { Worker, WorkerOptions, createWorker, RecognizeResult as TesseractRecognizeResult } from 'tesseract.js';
|
||||
import OcrDriverBase from '../OcrDriverBase';
|
||||
import { Minute } from '@joplin/utils/time';
|
||||
import shim from '../../../shim';
|
||||
import Logger from '@joplin/utils/Logger';
|
||||
|
||||
const logger = Logger.create('OcrDriverTesseract');
|
||||
|
||||
interface Tesseract {
|
||||
createWorker: typeof createWorker;
|
||||
}
|
||||
|
||||
interface WorkerWrapper {
|
||||
id: number;
|
||||
busy: boolean;
|
||||
instance: Worker;
|
||||
}
|
||||
|
||||
let workerId_ = 1;
|
||||
|
||||
const formatTesseractBoundingBox = (boundingBox: Tesseract.Bbox): RecognizeResultBoundingBox => {
|
||||
return [boundingBox.x0, boundingBox.x1, boundingBox.y0, boundingBox.y1];
|
||||
};
|
||||
|
||||
// Empirically, it seems anything below 70 is not usable. Between 70 and 75 it's
|
||||
// hit and miss, but often it's good enough that we should keep the result.
|
||||
// Above this is usually reliable.
|
||||
const minConfidence = 70;
|
||||
|
||||
export default class OcrDriverTesseract extends OcrDriverBase {
|
||||
|
||||
private tesseract_: Tesseract = null;
|
||||
private workerPath_: string|null = null;
|
||||
private corePath_: string|null = null;
|
||||
private workers_: Record<string, WorkerWrapper[]> = {};
|
||||
|
||||
public constructor(tesseract: Tesseract, workerPath: string|null = null, corePath: string|null = null) {
|
||||
super();
|
||||
this.tesseract_ = tesseract;
|
||||
this.workerPath_ = workerPath;
|
||||
this.corePath_ = corePath;
|
||||
}
|
||||
|
||||
private async acquireWorker(language: string) {
|
||||
if (!this.workers_[language]) this.workers_[language] = [];
|
||||
|
||||
const existingWorker = this.workers_[language].find(w => !w.busy);
|
||||
|
||||
if (existingWorker) {
|
||||
existingWorker.busy = true;
|
||||
return existingWorker;
|
||||
}
|
||||
|
||||
const createWorkerOptions: Partial<WorkerOptions> = {
|
||||
workerBlobURL: false,
|
||||
};
|
||||
|
||||
if (this.workerPath_) createWorkerOptions.workerPath = this.workerPath_;
|
||||
if (this.corePath_) createWorkerOptions.corePath = this.corePath_;
|
||||
|
||||
const worker = await this.tesseract_.createWorker(createWorkerOptions);
|
||||
|
||||
await worker.loadLanguage(language);
|
||||
await worker.initialize(language);
|
||||
|
||||
const output: WorkerWrapper = {
|
||||
id: workerId_++,
|
||||
instance: worker,
|
||||
busy: true,
|
||||
};
|
||||
|
||||
logger.info(`Created worker: ${output.id}`);
|
||||
|
||||
this.workers_[language].push(output);
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
public async dispose() {
|
||||
for (const [language, workers] of Object.entries(this.workers_)) {
|
||||
for (const w of workers) {
|
||||
await w.instance.terminate();
|
||||
}
|
||||
this.workers_[language] = [];
|
||||
}
|
||||
}
|
||||
|
||||
private async terminateWorker(id: number) {
|
||||
for (const [, workers] of Object.entries(this.workers_)) {
|
||||
const idx = workers.findIndex(w => w.id === id);
|
||||
if (idx < 0) continue;
|
||||
|
||||
await workers[idx].instance.terminate();
|
||||
workers.splice(idx, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private async releaseWorker(worker: WorkerWrapper) {
|
||||
worker.busy = false;
|
||||
}
|
||||
|
||||
public async recognize(language: string, filePath: string): Promise<RecognizeResult> {
|
||||
// eslint-disable-next-line no-async-promise-executor -- can't think of any way to handle the timeout without using `new Promise`
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const worker = await this.acquireWorker(language);
|
||||
|
||||
let hasTimedOut = false;
|
||||
const terminateTimeout_ = shim.setTimeout(async () => {
|
||||
await this.terminateWorker(worker.id);
|
||||
hasTimedOut = true;
|
||||
reject(new Error(`Recognize operation timed out on: ${filePath}`));
|
||||
}, 10 * Minute);
|
||||
|
||||
let result: TesseractRecognizeResult = null;
|
||||
|
||||
try {
|
||||
result = await worker.instance.recognize(filePath, {}, {
|
||||
text: false,
|
||||
blocks: true,
|
||||
hocr: false,
|
||||
tsv: false,
|
||||
});
|
||||
} catch (error) {
|
||||
error.message = `Recognition failed on: ${filePath}: ${error.message}`;
|
||||
if (!hasTimedOut) reject(error);
|
||||
return;
|
||||
}
|
||||
|
||||
if (hasTimedOut) return;
|
||||
|
||||
shim.clearTimeout(terminateTimeout_);
|
||||
|
||||
await this.releaseWorker(worker);
|
||||
|
||||
interface GoodParagraph {
|
||||
text: string;
|
||||
}
|
||||
|
||||
const goodParagraphs: GoodParagraph[] = [];
|
||||
let goodLines: RecognizeResultLine[] = [];
|
||||
|
||||
for (const paragraph of result.data.paragraphs) {
|
||||
const lines: RecognizeResultLine[] = [];
|
||||
|
||||
for (const line of paragraph.lines) {
|
||||
// If the line confidence is above the threshold we keep the
|
||||
// whole text. The confidence of individual words will vary and
|
||||
// may be below the treshold, but there's a chance they will
|
||||
// still be correct if the line as a whole is well recognised.
|
||||
if (line.confidence < minConfidence) continue;
|
||||
|
||||
const goodWords: RecognizeResultWord[] = line.words.map(w => {
|
||||
const output: RecognizeResultWord = {
|
||||
t: w.text,
|
||||
bb: formatTesseractBoundingBox(w.bbox),
|
||||
};
|
||||
|
||||
if (w.baseline && w.baseline.has_baseline) output.bl = formatTesseractBoundingBox(w.baseline);
|
||||
|
||||
return output;
|
||||
});
|
||||
|
||||
lines.push({
|
||||
words: goodWords,
|
||||
});
|
||||
}
|
||||
|
||||
goodParagraphs.push({
|
||||
text: lines.map(l => l.words.map(w => w.t).join(' ')).join('\n'),
|
||||
});
|
||||
|
||||
goodLines = goodLines.concat(lines);
|
||||
}
|
||||
|
||||
resolve({
|
||||
// Note that Tesseract provides a `.text` property too, but it's the
|
||||
// concatenation of all lines, even those with a low confidence
|
||||
// score, so we recreate it here based on the good lines.
|
||||
text: goodParagraphs.map(p => p.text).join('\n'),
|
||||
lines: goodLines,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
import filterOcrText from './filterOcrText';
|
||||
|
||||
const testData: string[][] = [
|
||||
['— !',
|
||||
'',
|
||||
],
|
||||
|
||||
[
|
||||
`- = = — ‘ =
|
||||
—`,
|
||||
'',
|
||||
],
|
||||
|
||||
['', ''],
|
||||
|
||||
[' testing ', 'testing'],
|
||||
|
||||
];
|
||||
|
||||
describe('filterOcrText', () => {
|
||||
|
||||
it('should filter text', () => {
|
||||
for (const [input, expected] of testData) {
|
||||
expect(filterOcrText(input)).toBe(expected);
|
||||
}
|
||||
});
|
||||
|
||||
});
|
|
@ -0,0 +1,8 @@
|
|||
export default (text: string) => {
|
||||
// Remove all non-letter characters from the string
|
||||
const filtered = text.replace(/\P{Letter}/ug, '');
|
||||
// If there's nothing left, this is most likely an invalid detection, so we
|
||||
// clear the string.
|
||||
if (!filtered.trim()) return '';
|
||||
return text.trim();
|
||||
};
|
|
@ -0,0 +1,23 @@
|
|||
export const emptyRecognizeResult = (): RecognizeResult => {
|
||||
return {
|
||||
text: '',
|
||||
lines: [],
|
||||
};
|
||||
};
|
||||
|
||||
export type RecognizeResultBoundingBox = [number, number, number, number]; // x0, y0, x1, y1
|
||||
|
||||
export interface RecognizeResultWord {
|
||||
t: string;
|
||||
bb: RecognizeResultBoundingBox; // Bounding box;
|
||||
bl?: RecognizeResultBoundingBox; // Baseline
|
||||
}
|
||||
|
||||
export interface RecognizeResultLine {
|
||||
words: RecognizeResultWord[];
|
||||
}
|
||||
|
||||
export interface RecognizeResult {
|
||||
text: string;
|
||||
lines?: RecognizeResultLine[]; // We do not store detailed data for PDFs
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
/* eslint-disable multiline-comment-style */
|
||||
|
||||
import eventManager from '../../../eventManager';
|
||||
import eventManager, { EventName } from '../../../eventManager';
|
||||
import Setting, { SettingItem as InternalSettingItem, SettingSectionSource } from '../../../models/Setting';
|
||||
import Plugin from '../Plugin';
|
||||
import { SettingItem, SettingSection } from './types';
|
||||
|
@ -186,7 +186,7 @@ export default class JoplinSettings {
|
|||
*/
|
||||
public async onChange(handler: ChangeHandler): Promise<void> {
|
||||
// Filter out keys that are not related to this plugin
|
||||
eventManager.on('settingsChange', (event: ChangeEvent) => {
|
||||
eventManager.on(EventName.SettingsChange, (event: ChangeEvent) => {
|
||||
const keys = event.keys
|
||||
.filter(k => k.indexOf(keyPrefix(this.plugin_.id)) === 0)
|
||||
.map(k => k.substr(keyPrefix(this.plugin_.id).length));
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable multiline-comment-style */
|
||||
|
||||
import { ModelType } from '../../../BaseModel';
|
||||
import eventManager from '../../../eventManager';
|
||||
import eventManager, { EventName } from '../../../eventManager';
|
||||
import Setting from '../../../models/Setting';
|
||||
import { FolderEntity } from '../../database/types';
|
||||
import makeListener from '../utils/makeListener';
|
||||
|
@ -87,7 +87,7 @@ export default class JoplinWorkspace {
|
|||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public async onNoteContentChange(callback: Function) {
|
||||
eventManager.on('noteContentChange', callback);
|
||||
eventManager.on(EventName.NoteContentChange, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -104,7 +104,7 @@ export default class JoplinWorkspace {
|
|||
});
|
||||
};
|
||||
|
||||
return makeListener(eventManager, 'itemChange', wrapperHandler);
|
||||
return makeListener(eventManager, EventName.ItemChange, wrapperHandler);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -112,7 +112,7 @@ export default class JoplinWorkspace {
|
|||
* called when a resource is added or deleted.
|
||||
*/
|
||||
public async onResourceChange(handler: ResourceChangeHandler): Promise<void> {
|
||||
makeListener(eventManager, 'resourceChange', handler);
|
||||
makeListener(eventManager, EventName.ResourceChange, handler);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -120,14 +120,14 @@ export default class JoplinWorkspace {
|
|||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public async onNoteAlarmTrigger(handler: Function): Promise<Disposable> {
|
||||
return makeListener(eventManager, 'noteAlarmTrigger', handler);
|
||||
return makeListener(eventManager, EventName.NoteAlarmTrigger, handler);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when the synchronisation process is starting.
|
||||
*/
|
||||
public async onSyncStart(handler: SyncStartHandler): Promise<Disposable> {
|
||||
return makeListener(eventManager, 'syncStart', handler);
|
||||
return makeListener(eventManager, EventName.SyncStart, handler);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -135,7 +135,7 @@ export default class JoplinWorkspace {
|
|||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
public async onSyncComplete(callback: Function): Promise<Disposable> {
|
||||
return makeListener(eventManager, 'syncComplete', callback);
|
||||
return makeListener(eventManager, EventName.SyncComplete, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { EventManager } from '../../../eventManager';
|
||||
import { EventManager, EventName } from '../../../eventManager';
|
||||
import { Disposable } from '../api/types';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
export default function(eventManager: EventManager, eventName: string, callback: Function): Disposable {
|
||||
export default function(eventManager: EventManager, eventName: EventName, callback: Function): Disposable {
|
||||
eventManager.on(eventName, callback);
|
||||
|
||||
return {};
|
||||
|
|
|
@ -5,6 +5,7 @@ import { ErrorBadRequest, ErrorMethodNotAllowed } from '../utils/errors';
|
|||
import requestFields from '../utils/requestFields';
|
||||
import collectionToPaginatedResults from '../utils/collectionToPaginatedResults';
|
||||
import BaseItem from '../../../models/BaseItem';
|
||||
import { NoteEntity } from '../../database/types';
|
||||
import SearchEngineUtils, { NotesForQueryOptions } from '../../searchengine/SearchEngineUtils';
|
||||
|
||||
export default async function(request: Request) {
|
||||
|
@ -15,7 +16,7 @@ export default async function(request: Request) {
|
|||
|
||||
const modelType = request.query.type ? BaseModel.modelNameToType(request.query.type) : BaseModel.TYPE_NOTE;
|
||||
|
||||
let results = [];
|
||||
let results: NoteEntity[] = [];
|
||||
|
||||
if (modelType !== BaseItem.TYPE_NOTE) {
|
||||
const ModelClass = BaseItem.getClassByItemType(modelType);
|
||||
|
@ -32,7 +33,7 @@ export default async function(request: Request) {
|
|||
...defaultLoadOptions(request, ModelType.Note),
|
||||
appendWildCards: true,
|
||||
};
|
||||
results = await SearchEngineUtils.notesForQuery(query, false, options);
|
||||
results = (await SearchEngineUtils.notesForQuery(query, false, options)).notes;
|
||||
}
|
||||
|
||||
return collectionToPaginatedResults(modelType, results, request);
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
import Resource from '../../models/Resource';
|
||||
import { db, msleep, setupDatabaseAndSynchronizer, switchClient } from '../../testing/test-utils';
|
||||
import { ResourceOcrStatus } from '../database/types';
|
||||
import SearchEngine from './SearchEngine';
|
||||
|
||||
const newSearchEngine = () => {
|
||||
const engine = new SearchEngine();
|
||||
engine.setDb(db());
|
||||
return engine;
|
||||
};
|
||||
|
||||
describe('SearchEngine.resources', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
global.console = require('console');
|
||||
await setupDatabaseAndSynchronizer(1);
|
||||
await switchClient(1);
|
||||
});
|
||||
|
||||
it('should index resources', async () => {
|
||||
const engine = newSearchEngine();
|
||||
|
||||
await Resource.save({
|
||||
id: '00000000000000000000000000000001',
|
||||
mime: 'image/jpeg',
|
||||
title: 'Bonjour ça va ?',
|
||||
ocr_status: ResourceOcrStatus.Done,
|
||||
ocr_text: 'héllô, hôw äre yoù ?',
|
||||
}, { isNew: true });
|
||||
|
||||
await engine.syncTables();
|
||||
|
||||
const normalized = await db().selectAll('select * from items_normalized');
|
||||
expect(normalized[0].title).toBe('bonjour ca va ?');
|
||||
expect(normalized[0].body).toBe('hello, how are you ?');
|
||||
});
|
||||
|
||||
it('should delete normalized data when a resource is deleted', async () => {
|
||||
const engine = newSearchEngine();
|
||||
|
||||
const resource = await Resource.save({
|
||||
id: '00000000000000000000000000000001',
|
||||
mime: 'image/jpeg',
|
||||
title: 'hello',
|
||||
ocr_status: ResourceOcrStatus.Done,
|
||||
ocr_text: 'hi',
|
||||
}, { isNew: true });
|
||||
|
||||
await engine.syncTables();
|
||||
|
||||
expect((await db().selectAll('select * from items_normalized')).length).toBe(1);
|
||||
|
||||
await Resource.delete(resource.id);
|
||||
|
||||
expect((await db().selectAll('select * from items_normalized')).length).toBe(0);
|
||||
});
|
||||
|
||||
it('should sort resources', async () => {
|
||||
const engine = newSearchEngine();
|
||||
|
||||
const resourceData = [
|
||||
['abcd abcd abcd', 'efgh'],
|
||||
['abcd', 'ijkl'],
|
||||
['ijkl', 'mnop'],
|
||||
];
|
||||
|
||||
for (const [title, body] of resourceData) {
|
||||
await Resource.save({
|
||||
mime: 'image/jpeg',
|
||||
title,
|
||||
ocr_status: ResourceOcrStatus.Done,
|
||||
ocr_text: body,
|
||||
});
|
||||
await msleep(1);
|
||||
}
|
||||
|
||||
await engine.syncTables();
|
||||
|
||||
const results = await engine.search('abcd', { includeOrphanedResources: true });
|
||||
expect(results[0].title).toBe('abcd abcd abcd');
|
||||
expect(results[1].title).toBe('abcd');
|
||||
});
|
||||
|
||||
});
|
|
@ -7,8 +7,10 @@ import ItemChangeUtils from '../ItemChangeUtils';
|
|||
import shim from '../../shim';
|
||||
import filterParser, { Term } from './filterParser';
|
||||
import queryBuilder from './queryBuilder';
|
||||
import { ItemChangeEntity, NoteEntity } from '../database/types';
|
||||
import { ItemChangeEntity, NoteEntity, SqlQuery } from '../database/types';
|
||||
import Resource from '../../models/Resource';
|
||||
import JoplinDatabase from '../../JoplinDatabase';
|
||||
import NoteResource from '../../models/NoteResource';
|
||||
import isItemId from '../../models/utils/isItemId';
|
||||
import BaseItem from '../../models/BaseItem';
|
||||
import { isCallbackUrl, parseCallbackUrl } from '../../callbackUrlUtils';
|
||||
|
@ -23,13 +25,16 @@ enum SearchType {
|
|||
}
|
||||
|
||||
interface SearchOptions {
|
||||
searchType: SearchType;
|
||||
searchType?: SearchType;
|
||||
|
||||
// When this is on, the search engine automatically appends "*" to each word
|
||||
// of the query. So "hello world" is turned into "hello* world*". This
|
||||
// allows returning results quickly, in particular on mobile, and it seems
|
||||
// to be what users generally expect.
|
||||
appendWildCards?: boolean;
|
||||
|
||||
// Include resources that are not associated with any notes.
|
||||
includeOrphanedResources?: boolean;
|
||||
}
|
||||
|
||||
export interface ProcessResultsRow {
|
||||
|
@ -37,6 +42,7 @@ export interface ProcessResultsRow {
|
|||
parent_id: string;
|
||||
title: string;
|
||||
offsets: string;
|
||||
item_id: string;
|
||||
user_updated_time: number;
|
||||
user_created_time: number;
|
||||
matchinfo: Buffer;
|
||||
|
@ -111,8 +117,8 @@ export default class SearchEngine {
|
|||
return null;
|
||||
}
|
||||
|
||||
private async rebuildIndex_() {
|
||||
const notes = await this.db().selectAll('SELECT id FROM notes WHERE is_conflict = 0 AND encryption_applied = 0');
|
||||
private async doInitialNoteIndexing_() {
|
||||
const notes = await this.db().selectAll<NoteEntity>('SELECT id FROM notes WHERE is_conflict = 0 AND encryption_applied = 0');
|
||||
const noteIds = notes.map(n => n.id);
|
||||
|
||||
const lastChangeId = await ItemChange.lastChangeId();
|
||||
|
@ -160,6 +166,7 @@ export default class SearchEngine {
|
|||
public async rebuildIndex() {
|
||||
Setting.setValue('searchEngine.lastProcessedChangeId', 0);
|
||||
Setting.setValue('searchEngine.initialIndexingDone', false);
|
||||
Setting.setValue('searchEngine.lastProcessedResource', '');
|
||||
return this.syncTables();
|
||||
}
|
||||
|
||||
|
@ -173,10 +180,8 @@ export default class SearchEngine {
|
|||
await ItemChange.waitForAllSaved();
|
||||
|
||||
if (!Setting.value('searchEngine.initialIndexingDone')) {
|
||||
await this.rebuildIndex_();
|
||||
await this.doInitialNoteIndexing_();
|
||||
Setting.setValue('searchEngine.initialIndexingDone', true);
|
||||
this.isIndexing_ = false;
|
||||
return;
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
|
@ -246,6 +251,66 @@ export default class SearchEngine {
|
|||
|
||||
await ItemChangeUtils.deleteProcessedChanges();
|
||||
|
||||
interface LastProcessedResource {
|
||||
id: string;
|
||||
updated_time: number;
|
||||
}
|
||||
|
||||
const lastProcessedResource: LastProcessedResource = !Setting.value('searchEngine.lastProcessedResource') ? { updated_time: 0, id: '' } : JSON.parse(Setting.value('searchEngine.lastProcessedResource'));
|
||||
|
||||
this.logger().info('Updating items_normalized from', lastProcessedResource);
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const resources = await Resource.allForNormalization(
|
||||
lastProcessedResource.updated_time,
|
||||
lastProcessedResource.id,
|
||||
100,
|
||||
{
|
||||
fields: ['id', 'title', 'ocr_text', 'updated_time'],
|
||||
},
|
||||
);
|
||||
|
||||
if (!resources.length) break;
|
||||
|
||||
const queries: SqlQuery[] = [];
|
||||
|
||||
for (const resource of resources) {
|
||||
queries.push({
|
||||
sql: 'DELETE FROM items_normalized WHERE item_id = ? AND item_type = ?',
|
||||
params: [
|
||||
resource.id,
|
||||
ModelType.Resource,
|
||||
],
|
||||
});
|
||||
|
||||
queries.push({
|
||||
sql: `
|
||||
INSERT INTO items_normalized(item_id, item_type, title, body, user_updated_time)
|
||||
VALUES (?, ?, ?, ?, ?)`,
|
||||
params: [
|
||||
resource.id,
|
||||
ModelType.Resource,
|
||||
this.normalizeText_(resource.title),
|
||||
this.normalizeText_(resource.ocr_text),
|
||||
resource.updated_time,
|
||||
],
|
||||
});
|
||||
|
||||
report.inserted++;
|
||||
|
||||
lastProcessedResource.id = resource.id;
|
||||
lastProcessedResource.updated_time = resource.updated_time;
|
||||
}
|
||||
|
||||
await this.db().transactionExecBatch(queries);
|
||||
Setting.setValue('searchEngine.lastProcessedResource', JSON.stringify(lastProcessedResource));
|
||||
await Setting.saveAll();
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger().error('SearchEngine: Error while processing resources:', error);
|
||||
}
|
||||
|
||||
this.logger().info(sprintf('SearchEngine: Updated FTS table in %dms. Inserted: %d. Deleted: %d', Date.now() - startTime, report.inserted, report.deleted));
|
||||
|
||||
this.isIndexing_ = false;
|
||||
|
@ -279,38 +344,6 @@ export default class SearchEngine {
|
|||
return output;
|
||||
}
|
||||
|
||||
// protected calculateWeight_(offsets: any[], termCount: number) {
|
||||
// // Offset doc: https://www.sqlite.org/fts3.html#offsets
|
||||
|
||||
// // - If there's only one term in the query string, the content with the most matches goes on top
|
||||
// // - If there are multiple terms, the result with the most occurences that are closest to each others go on top.
|
||||
// // eg. if query is "abcd efgh", "abcd efgh" will go before "abcd XX efgh".
|
||||
|
||||
// const occurenceCount = Math.floor(offsets.length / 4);
|
||||
|
||||
// if (termCount === 1) return occurenceCount;
|
||||
|
||||
// let spread = 0;
|
||||
// let previousDist = null;
|
||||
// for (let i = 0; i < occurenceCount; i++) {
|
||||
// const dist = offsets[i * 4 + 2];
|
||||
|
||||
// if (previousDist !== null) {
|
||||
// const delta = dist - previousDist;
|
||||
// spread += delta;
|
||||
// }
|
||||
|
||||
// previousDist = dist;
|
||||
// }
|
||||
|
||||
// // Divide the number of occurences by the spread so even if a note has many times the searched terms
|
||||
// // but these terms are very spread appart, they'll be given a lower weight than a note that has the
|
||||
// // terms once or twice but just next to each others.
|
||||
// return occurenceCount / spread;
|
||||
// }
|
||||
|
||||
|
||||
|
||||
private calculateWeightBM25_(rows: ProcessResultsRow[]) {
|
||||
// https://www.sqlite.org/fts3.html#matchinfo
|
||||
// pcnalx are the arguments passed to matchinfo
|
||||
|
@ -438,12 +471,19 @@ export default class SearchEngine {
|
|||
}
|
||||
|
||||
rows.sort((a, b) => {
|
||||
const aIsNote = a.item_type === ModelType.Note;
|
||||
const bIsNote = b.item_type === ModelType.Note;
|
||||
|
||||
if (a.fields.includes('title') && !b.fields.includes('title')) return -1;
|
||||
if (!a.fields.includes('title') && b.fields.includes('title')) return +1;
|
||||
if (a.weight < b.weight) return +1;
|
||||
if (a.weight > b.weight) return -1;
|
||||
if (a.is_todo && a.todo_completed) return +1;
|
||||
if (b.is_todo && b.todo_completed) return -1;
|
||||
|
||||
if (aIsNote && bIsNote) {
|
||||
if (a.is_todo && a.todo_completed) return +1;
|
||||
if (b.is_todo && b.todo_completed) return -1;
|
||||
}
|
||||
|
||||
if (a.user_updated_time < b.user_updated_time) return +1;
|
||||
if (a.user_updated_time > b.user_updated_time) return -1;
|
||||
return 0;
|
||||
|
@ -637,6 +677,7 @@ export default class SearchEngine {
|
|||
return [
|
||||
{
|
||||
id: item.id,
|
||||
item_id: item.id,
|
||||
parent_id: item.parent_id || '',
|
||||
matchinfo: Buffer.from(''),
|
||||
offsets: '',
|
||||
|
@ -658,6 +699,7 @@ export default class SearchEngine {
|
|||
options = {
|
||||
searchType: SearchEngine.SEARCH_TYPE_AUTO,
|
||||
appendWildCards: false,
|
||||
includeOrphanedResources: false,
|
||||
...options,
|
||||
};
|
||||
|
||||
|
@ -668,7 +710,7 @@ export default class SearchEngine {
|
|||
|
||||
if (searchType === SearchEngine.SEARCH_TYPE_BASIC) {
|
||||
searchString = this.normalizeText_(searchString);
|
||||
rows = await this.basicSearch(searchString);
|
||||
rows = (await this.basicSearch(searchString)) as any[];
|
||||
this.processResults_(rows, parsedQuery, true);
|
||||
} else {
|
||||
// SEARCH_TYPE_FTS
|
||||
|
@ -694,8 +736,48 @@ export default class SearchEngine {
|
|||
const useFts = searchType === SearchEngine.SEARCH_TYPE_FTS;
|
||||
try {
|
||||
const { query, params } = queryBuilder(parsedQuery.allTerms, useFts);
|
||||
rows = (await this.db().selectAll(query, params)) as ProcessResultsRow[];
|
||||
this.processResults_(rows, parsedQuery, !useFts);
|
||||
|
||||
rows = await this.db().selectAll<ProcessResultsRow>(query, params);
|
||||
const queryHasFilters = !!parsedQuery.allTerms.find(t => t.name !== 'text');
|
||||
|
||||
rows = rows.map(r => {
|
||||
return {
|
||||
...r,
|
||||
item_type: ModelType.Note,
|
||||
};
|
||||
});
|
||||
|
||||
if (!queryHasFilters) {
|
||||
const toSearch = parsedQuery.allTerms.map(t => t.value).join(' ');
|
||||
|
||||
let itemRows = await this.db().selectAll<ProcessResultsRow>(`
|
||||
SELECT
|
||||
id,
|
||||
title,
|
||||
user_updated_time,
|
||||
offsets(items_fts) AS offsets,
|
||||
matchinfo(items_fts, 'pcnalx') AS matchinfo,
|
||||
item_id,
|
||||
item_type
|
||||
FROM items_fts
|
||||
WHERE title MATCH ? OR body MATCH ?
|
||||
`, [toSearch, toSearch]);
|
||||
|
||||
const resourcesToNotes = await NoteResource.associatedResourceNotes(itemRows.map(r => r.item_id), { fields: ['note_id', 'parent_id'] });
|
||||
|
||||
for (const itemRow of itemRows) {
|
||||
const notes = resourcesToNotes[itemRow.item_id];
|
||||
const note = notes && notes.length ? notes[0] : null;
|
||||
itemRow.id = note ? note.note_id : null;
|
||||
itemRow.parent_id = note ? note.parent_id : null;
|
||||
}
|
||||
|
||||
if (!options.includeOrphanedResources) itemRows = itemRows.filter(r => !!r.id);
|
||||
|
||||
rows = rows.concat(itemRows);
|
||||
}
|
||||
|
||||
this.processResults_(rows as ProcessResultsRow[], parsedQuery, !useFts);
|
||||
} catch (error) {
|
||||
this.logger().warn(`Cannot execute MATCH query: ${searchString}: ${error.message}`);
|
||||
rows = [];
|
||||
|
|
|
@ -25,7 +25,7 @@ describe('services_SearchEngineUtils', () => {
|
|||
|
||||
Setting.setValue('showCompletedTodos', true);
|
||||
|
||||
const rows = await SearchEngineUtils.notesForQuery('abcd', true, null, searchEngine);
|
||||
const rows = (await SearchEngineUtils.notesForQuery('abcd', true, null, searchEngine)).notes;
|
||||
|
||||
expect(rows.length).toBe(3);
|
||||
expect(rows.map(r=>r.id)).toContain(note1.id);
|
||||
|
@ -35,7 +35,7 @@ describe('services_SearchEngineUtils', () => {
|
|||
const options: any = {};
|
||||
options.fields = ['id', 'title'];
|
||||
|
||||
const rows2 = await SearchEngineUtils.notesForQuery('abcd', true, options, searchEngine);
|
||||
const rows2 = (await SearchEngineUtils.notesForQuery('abcd', true, options, searchEngine)).notes;
|
||||
expect(rows2.length).toBe(3);
|
||||
expect(rows2.map(r=>r.id)).toContain(note1.id);
|
||||
expect(rows2.map(r=>r.id)).toContain(todo1.id);
|
||||
|
@ -51,7 +51,7 @@ describe('services_SearchEngineUtils', () => {
|
|||
|
||||
Setting.setValue('showCompletedTodos', false);
|
||||
|
||||
const rows = await SearchEngineUtils.notesForQuery('abcd', true, null, searchEngine);
|
||||
const rows = (await SearchEngineUtils.notesForQuery('abcd', true, null, searchEngine)).notes;
|
||||
|
||||
expect(rows.length).toBe(2);
|
||||
expect(rows.map(r=>r.id)).toContain(note1.id);
|
||||
|
@ -59,7 +59,7 @@ describe('services_SearchEngineUtils', () => {
|
|||
|
||||
const options: any = {};
|
||||
options.fields = ['id', 'title'];
|
||||
const rows2 = await SearchEngineUtils.notesForQuery('abcd', true, options, searchEngine);
|
||||
const rows2 = (await SearchEngineUtils.notesForQuery('abcd', true, options, searchEngine)).notes;
|
||||
expect(rows2.length).toBe(2);
|
||||
expect(rows2.map(r=>r.id)).toContain(note1.id);
|
||||
expect(rows2.map(r=>r.id)).toContain(todo1.id);
|
||||
|
@ -74,7 +74,7 @@ describe('services_SearchEngineUtils', () => {
|
|||
|
||||
Setting.setValue('showCompletedTodos', false);
|
||||
|
||||
const rows = await SearchEngineUtils.notesForQuery('abcd', false, null, searchEngine);
|
||||
const rows = (await SearchEngineUtils.notesForQuery('abcd', false, null, searchEngine)).notes;
|
||||
|
||||
expect(rows.length).toBe(3);
|
||||
expect(rows.map(r=>r.id)).toContain(note1.id);
|
||||
|
@ -95,7 +95,7 @@ describe('services_SearchEngineUtils', () => {
|
|||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const rows = await SearchEngineUtils.notesForQuery('abcd', false, { fields: [...testCase] }, searchEngine);
|
||||
const rows = (await SearchEngineUtils.notesForQuery('abcd', false, { fields: [...testCase] }, searchEngine)).notes;
|
||||
testCase.push('type_');
|
||||
expect(Object.keys(rows[0]).length).toBe(testCase.length);
|
||||
for (const field of testCase) {
|
||||
|
|
|
@ -29,7 +29,7 @@ export default class SearchEngineUtils {
|
|||
appendWildCards: options.appendWildCards,
|
||||
});
|
||||
|
||||
const noteIds = results.map((n: any) => n.id);
|
||||
const noteIds = results.map(n => n.id);
|
||||
|
||||
// We need at least the note ID to be able to sort them below so if not
|
||||
// present in field list, add it.L Also remember it was auto-added so that
|
||||
|
@ -83,6 +83,9 @@ export default class SearchEngineUtils {
|
|||
// contain references to notes that don't exist. Not clear how it can
|
||||
// happen, but anyway handle it here. Was causing this issue:
|
||||
// https://discourse.joplinapp.org/t/how-to-recover-corrupted-database/9367
|
||||
return sortedNotes.filter(n => n);
|
||||
return {
|
||||
notes: sortedNotes.filter(n => n),
|
||||
results,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -287,8 +287,7 @@ describe('Synchronizer.resources', () => {
|
|||
|
||||
{
|
||||
await synchronizerStart();
|
||||
await resourceFetcher().start();
|
||||
await resourceFetcher().waitForAllFinished();
|
||||
await resourceFetcher().startAndWait();
|
||||
}
|
||||
|
||||
await switchClient(1);
|
||||
|
|
|
@ -52,20 +52,18 @@ export default async (action: ConflictAction, ItemClass: any, remoteExists: bool
|
|||
await Note.createConflictNote(local, ItemChange.SOURCE_SYNC);
|
||||
}
|
||||
} else if (action === 'resourceConflict') {
|
||||
// ------------------------------------------------------------------------------
|
||||
// Unlike notes we always handle the conflict for resources
|
||||
// ------------------------------------------------------------------------------
|
||||
if (!remoteContent || Resource.mustHandleConflict(local, remoteContent)) {
|
||||
await Resource.createConflictResourceNote(local);
|
||||
|
||||
await Resource.createConflictResourceNote(local);
|
||||
if (remoteExists) {
|
||||
// The local content we have is no longer valid and should be re-downloaded
|
||||
await Resource.setLocalState(local.id, {
|
||||
fetch_status: Resource.FETCH_STATUS_IDLE,
|
||||
});
|
||||
}
|
||||
|
||||
if (remoteExists) {
|
||||
// The local content we have is no longer valid and should be re-downloaded
|
||||
await Resource.setLocalState(local.id, {
|
||||
fetch_status: Resource.FETCH_STATUS_IDLE,
|
||||
});
|
||||
dispatch({ type: 'SYNC_CREATED_OR_UPDATED_RESOURCE', id: local.id });
|
||||
}
|
||||
|
||||
dispatch({ type: 'SYNC_CREATED_OR_UPDATED_RESOURCE', id: local.id });
|
||||
}
|
||||
|
||||
if (['noteConflict', 'resourceConflict'].includes(action)) {
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
'use strict';
|
||||
import shim from './shim';
|
||||
import GeolocationNode from './geolocation-node';
|
||||
import { setLocale, defaultLocale, closestSupportedLocale } from './locale';
|
||||
import FsDriverNode from './fs-driver-node';
|
||||
import Note from './models/Note';
|
||||
import Resource from './models/Resource';
|
||||
import { basename, fileExtension, safeFileExtension } from './path-utils';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as pdfJsNamespace from 'pdfjs-dist';
|
||||
import { writeFile } from 'fs/promises';
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const shim = require('./shim').default;
|
||||
const GeolocationNode = require('./geolocation-node').default;
|
||||
const { FileApiDriverLocal } = require('./file-api-driver-local');
|
||||
const { setLocale, defaultLocale, closestSupportedLocale } = require('./locale');
|
||||
const FsDriverNode = require('./fs-driver-node').default;
|
||||
const mimeUtils = require('./mime-utils.js').mime;
|
||||
const Note = require('./models/Note').default;
|
||||
const Resource = require('./models/Resource').default;
|
||||
const { _ } = require('./locale');
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
|
@ -17,11 +19,10 @@ const toRelative = require('relative');
|
|||
const timers = require('timers');
|
||||
const zlib = require('zlib');
|
||||
const dgram = require('dgram');
|
||||
const { basename, fileExtension, safeFileExtension } = require('./path-utils');
|
||||
|
||||
const proxySettings = {};
|
||||
const proxySettings: any = {};
|
||||
|
||||
function fileExists(filePath) {
|
||||
function fileExists(filePath: string) {
|
||||
try {
|
||||
return fs.statSync(filePath).isFile();
|
||||
} catch (error) {
|
||||
|
@ -29,11 +30,11 @@ function fileExists(filePath) {
|
|||
}
|
||||
}
|
||||
|
||||
function isUrlHttps(url) {
|
||||
function isUrlHttps(url: string) {
|
||||
return url.startsWith('https');
|
||||
}
|
||||
|
||||
function resolveProxyUrl(proxyUrl) {
|
||||
function resolveProxyUrl(proxyUrl: string) {
|
||||
return (
|
||||
proxyUrl ||
|
||||
process.env['http_proxy'] ||
|
||||
|
@ -52,7 +53,7 @@ function callsites() {
|
|||
return stack;
|
||||
}
|
||||
|
||||
const gunzipFile = function(source, destination) {
|
||||
const gunzipFile = function(source: string, destination: string) {
|
||||
if (!fileExists(source)) {
|
||||
throw new Error(`No such file: ${source}`);
|
||||
}
|
||||
|
@ -67,7 +68,7 @@ const gunzipFile = function(source, destination) {
|
|||
|
||||
// callback on extract completion
|
||||
dest.on('close', () => {
|
||||
resolve();
|
||||
resolve(null);
|
||||
});
|
||||
|
||||
src.on('error', () => {
|
||||
|
@ -80,14 +81,24 @@ const gunzipFile = function(source, destination) {
|
|||
});
|
||||
};
|
||||
|
||||
function setupProxySettings(options) {
|
||||
function setupProxySettings(options: any) {
|
||||
proxySettings.maxConcurrentConnections = options.maxConcurrentConnections;
|
||||
proxySettings.proxyTimeout = options.proxyTimeout;
|
||||
proxySettings.proxyEnabled = options.proxyEnabled;
|
||||
proxySettings.proxyUrl = options.proxyUrl;
|
||||
}
|
||||
|
||||
function shimInit(options = null) {
|
||||
interface ShimInitOptions {
|
||||
sharp: any;
|
||||
keytar: any;
|
||||
React: any;
|
||||
appVersion: any;
|
||||
electronBridge: any;
|
||||
nodeSqlite: any;
|
||||
pdfJs: typeof pdfJsNamespace;
|
||||
}
|
||||
|
||||
function shimInit(options: ShimInitOptions = null) {
|
||||
options = {
|
||||
sharp: null,
|
||||
keytar: null,
|
||||
|
@ -95,13 +106,14 @@ function shimInit(options = null) {
|
|||
appVersion: null,
|
||||
electronBridge: null,
|
||||
nodeSqlite: null,
|
||||
pdfJs: null,
|
||||
...options,
|
||||
};
|
||||
|
||||
const sharp = options.sharp;
|
||||
const keytar = (shim.isWindows() || shim.isMac()) && !shim.isPortable() ? options.keytar : null;
|
||||
const appVersion = options.appVersion;
|
||||
|
||||
const pdfJs = options.pdfJs;
|
||||
|
||||
shim.setNodeSqlite(options.nodeSqlite);
|
||||
|
||||
|
@ -138,7 +150,7 @@ function shimInit(options = null) {
|
|||
return Array.from(buffer);
|
||||
};
|
||||
|
||||
shim.detectAndSetLocale = function(Setting) {
|
||||
shim.detectAndSetLocale = function(Setting: any) {
|
||||
let locale = shim.isElectron() ? shim.electronBridge().getLocale() : process.env.LANG;
|
||||
if (!locale) locale = defaultLocale();
|
||||
locale = locale.split('.');
|
||||
|
@ -178,7 +190,7 @@ function shimInit(options = null) {
|
|||
}
|
||||
};
|
||||
|
||||
const handleResizeImage_ = async function(filePath, targetPath, mime, resizeLargeImages) {
|
||||
const handleResizeImage_ = async function(filePath: string, targetPath: string, mime: string, resizeLargeImages: string) {
|
||||
const maxDim = Resource.IMAGE_MAX_DIMENSION;
|
||||
|
||||
if (shim.isElectron()) {
|
||||
|
@ -193,7 +205,7 @@ function shimInit(options = null) {
|
|||
return true;
|
||||
};
|
||||
const saveResizedImage = async () => {
|
||||
const options = {};
|
||||
const options: any = {};
|
||||
if (size.width > size.height) {
|
||||
options.width = maxDim;
|
||||
} else {
|
||||
|
@ -226,7 +238,7 @@ function shimInit(options = null) {
|
|||
const md = await image.metadata();
|
||||
|
||||
if (md.width <= maxDim && md.height <= maxDim) {
|
||||
shim.fsDriver().copy(filePath, targetPath);
|
||||
await shim.fsDriver().copy(filePath, targetPath);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -236,7 +248,7 @@ function shimInit(options = null) {
|
|||
fit: 'inside',
|
||||
withoutEnlargement: true,
|
||||
})
|
||||
.toFile(targetPath, (error, info) => {
|
||||
.toFile(targetPath, (error: any, info: any) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
|
@ -315,7 +327,7 @@ function shimInit(options = null) {
|
|||
const fileStat = await shim.fsDriver().stat(targetPath);
|
||||
resource.size = fileStat.size;
|
||||
|
||||
const saveOptions = { isNew: true };
|
||||
const saveOptions: any = { isNew: true };
|
||||
if (options.userSideValidation) saveOptions.userSideValidation = true;
|
||||
|
||||
if (isUpdate) {
|
||||
|
@ -365,7 +377,7 @@ function shimInit(options = null) {
|
|||
return newBody.join('\n\n');
|
||||
};
|
||||
|
||||
shim.attachFileToNote = async function(note, filePath, position = null, options = null) {
|
||||
shim.attachFileToNote = async function(note, filePath, position: number = null, options: any = null) {
|
||||
if (!options) options = {};
|
||||
if (note.markup_language) options.markupLanguage = note.markup_language;
|
||||
const newBody = await shim.attachFileToNoteBody(note.body, filePath, position, options);
|
||||
|
@ -390,7 +402,7 @@ function shimInit(options = null) {
|
|||
if (size.width > maxSize || size.height > maxSize) {
|
||||
console.warn(`Image is over ${maxSize}px - resizing it: ${filePath}`);
|
||||
|
||||
const options = {};
|
||||
const options: any = {};
|
||||
if (size.width > size.height) {
|
||||
options.width = maxSize;
|
||||
} else {
|
||||
|
@ -455,7 +467,7 @@ function shimInit(options = null) {
|
|||
}, options);
|
||||
};
|
||||
|
||||
shim.fetchBlob = async function(url, options) {
|
||||
shim.fetchBlob = async function(url: any, options) {
|
||||
if (!options || !options.path) throw new Error('fetchBlob: target file path is missing');
|
||||
if (!options.method) options.method = 'GET';
|
||||
// if (!('maxRetry' in options)) options.maxRetry = 5;
|
||||
|
@ -473,7 +485,7 @@ function shimInit(options = null) {
|
|||
const headers = options.headers ? options.headers : {};
|
||||
const filePath = options.path;
|
||||
|
||||
function makeResponse(response) {
|
||||
function makeResponse(response: any) {
|
||||
return {
|
||||
ok: response.statusCode < 400,
|
||||
path: filePath,
|
||||
|
@ -488,7 +500,7 @@ function shimInit(options = null) {
|
|||
};
|
||||
}
|
||||
|
||||
const requestOptions = {
|
||||
const requestOptions: any = {
|
||||
protocol: url.protocol,
|
||||
host: url.hostname,
|
||||
port: url.port,
|
||||
|
@ -504,11 +516,11 @@ function shimInit(options = null) {
|
|||
|
||||
const doFetchOperation = async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let file = null;
|
||||
let file: any = null;
|
||||
|
||||
const cleanUpOnError = error => {
|
||||
const cleanUpOnError = (error: any) => {
|
||||
// We ignore any unlink error as we only want to report on the main error
|
||||
fs.unlink(filePath)
|
||||
void fs.unlink(filePath)
|
||||
// eslint-disable-next-line promise/prefer-await-to-then -- Old code before rule was applied
|
||||
.catch(() => {})
|
||||
// eslint-disable-next-line promise/prefer-await-to-then -- Old code before rule was applied
|
||||
|
@ -528,11 +540,11 @@ function shimInit(options = null) {
|
|||
// Note: relative paths aren't supported
|
||||
file = fs.createWriteStream(filePath);
|
||||
|
||||
file.on('error', (error) => {
|
||||
file.on('error', (error: any) => {
|
||||
cleanUpOnError(error);
|
||||
});
|
||||
|
||||
const request = http.request(requestOptions, (response) => {
|
||||
const request = http.request(requestOptions, (response: any) => {
|
||||
response.pipe(file);
|
||||
|
||||
const isGzipped = response.headers['content-encoding'] === 'gzip';
|
||||
|
@ -550,7 +562,7 @@ function shimInit(options = null) {
|
|||
cleanUpOnError(error);
|
||||
}
|
||||
|
||||
shim.fsDriver().remove(gzipFilePath);
|
||||
await shim.fsDriver().remove(gzipFilePath);
|
||||
} else {
|
||||
resolve(makeResponse(response));
|
||||
}
|
||||
|
@ -562,7 +574,7 @@ function shimInit(options = null) {
|
|||
request.destroy(new Error(`Request timed out. Timeout value: ${requestOptions.timeout}ms.`));
|
||||
});
|
||||
|
||||
request.on('error', (error) => {
|
||||
request.on('error', (error: any) => {
|
||||
cleanUpOnError(error);
|
||||
});
|
||||
|
||||
|
@ -612,7 +624,7 @@ function shimInit(options = null) {
|
|||
return url.startsWith('https') ? shim.httpAgent_.https : shim.httpAgent_.http;
|
||||
};
|
||||
|
||||
shim.proxyAgent = (serverUrl, proxyUrl) => {
|
||||
shim.proxyAgent = (serverUrl: string, proxyUrl: string) => {
|
||||
const proxyAgentConfig = {
|
||||
keepAlive: true,
|
||||
maxSockets: proxySettings.maxConcurrentConnections,
|
||||
|
@ -686,7 +698,7 @@ function shimInit(options = null) {
|
|||
|
||||
shim.requireDynamic = (path) => {
|
||||
if (path.indexOf('.') === 0) {
|
||||
const sites = callsites();
|
||||
const sites: any = callsites();
|
||||
if (sites.length <= 1) throw new Error(`Cannot require file (1) ${path}`);
|
||||
const filename = sites[1].getFileName();
|
||||
if (!filename) throw new Error(`Cannot require file (2) ${path}`);
|
||||
|
@ -697,6 +709,60 @@ function shimInit(options = null) {
|
|||
return require(path);
|
||||
}
|
||||
};
|
||||
|
||||
shim.pdfToImages = async (pdfPath: string, outputDirectoryPath: string): Promise<string[]> => {
|
||||
// We handle both the Electron app and testing framework. Potentially
|
||||
// the same code could be use to support the CLI app.
|
||||
const isTesting = !shim.isElectron();
|
||||
|
||||
const createCanvas = () => {
|
||||
if (isTesting) {
|
||||
return require('canvas').createCanvas();
|
||||
}
|
||||
return document.createElement('canvas');
|
||||
};
|
||||
|
||||
const canvasToBuffer = async (canvas: any): Promise<Buffer> => {
|
||||
if (isTesting) {
|
||||
return canvas.toBuffer('image/jpeg', { quality: 0.8 });
|
||||
} else {
|
||||
const canvasToBlob = async (canvas: HTMLCanvasElement): Promise<Blob> => {
|
||||
return new Promise(resolve => {
|
||||
canvas.toBlob(blob => resolve(blob), 'image/jpg', 0.8);
|
||||
});
|
||||
};
|
||||
|
||||
const blob = await canvasToBlob(canvas);
|
||||
return Buffer.from(await blob.arrayBuffer());
|
||||
}
|
||||
};
|
||||
|
||||
const filePrefix = `page_${Date.now()}`;
|
||||
const output: string[] = [];
|
||||
const loadingTask = pdfJs.getDocument(pdfPath);
|
||||
const doc = await loadingTask.promise;
|
||||
|
||||
for (let pageNum = 1; pageNum <= doc.numPages; pageNum++) {
|
||||
const page = await doc.getPage(pageNum);
|
||||
const viewport = page.getViewport({ scale: 2 });
|
||||
const canvas = createCanvas();
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
canvas.height = viewport.height;
|
||||
canvas.width = viewport.width;
|
||||
|
||||
const renderTask = page.render({ canvasContext: ctx, viewport: viewport });
|
||||
await renderTask.promise;
|
||||
|
||||
const buffer = await canvasToBuffer(canvas);
|
||||
const filePath = `${outputDirectoryPath}/${filePrefix}_${pageNum.toString().padStart(4, '0')}.jpg`;
|
||||
output.push(filePath);
|
||||
await writeFile(filePath, buffer, 'binary');
|
||||
if (!(await shim.fsDriver().exists(filePath))) throw new Error(`Could not write to file: ${filePath}`);
|
||||
}
|
||||
|
||||
return output;
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { shimInit, setupProxySettings };
|
|
@ -26,6 +26,10 @@ let nodeSqlite_: any = null;
|
|||
|
||||
const shim = {
|
||||
Geolocation: null as any,
|
||||
electronBridge_: null as any,
|
||||
fsDriver_: null as any,
|
||||
httpAgent_: null as any,
|
||||
proxyAgent: null as any,
|
||||
|
||||
electronBridge: (): any => {
|
||||
throw new Error('Not implemented');
|
||||
|
@ -210,21 +214,21 @@ const shim = {
|
|||
|
||||
FileApiDriverLocal: null as any,
|
||||
|
||||
readLocalFileBase64: (_path: string) => {
|
||||
readLocalFileBase64: (_path: string): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
uploadBlob: (_url: string, _options: any) => {
|
||||
uploadBlob: (_url: string, _options: any): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
sjclModule: null as any,
|
||||
|
||||
randomBytes: async (_count: number) => {
|
||||
randomBytes: async (_count: number): Promise<any> => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
stringByteLength: (_s: string) => {
|
||||
stringByteLength: (_s: string): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
|
@ -243,7 +247,7 @@ const shim = {
|
|||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
imageFromDataUrl: async (_imageDataUrl: string, _filePath: string, _options: any = null) => {
|
||||
imageFromDataUrl: async (_imageDataUrl: string, _filePath: string, _options: any = null): Promise<any> => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
|
@ -251,25 +255,29 @@ const shim = {
|
|||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
pdfToImages: async (_pdfPath: string, _outputDirectoryPath: string): Promise<string[]> => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
Buffer: null as any,
|
||||
|
||||
openUrl: () => {
|
||||
openUrl: (_url: string): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
httpAgent: () => {
|
||||
httpAgent: (_url: string): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
openOrCreateFile: (_path: string, _defaultContents: any) => {
|
||||
openOrCreateFile: (_path: string, _defaultContents: any): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
waitForFrame: () => {
|
||||
waitForFrame: (): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
appVersion: () => {
|
||||
appVersion: (): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
|
@ -283,15 +291,15 @@ const shim = {
|
|||
isTestingEnv_ = v;
|
||||
},
|
||||
|
||||
pathRelativeToCwd: (_path: string) => {
|
||||
pathRelativeToCwd: (_path: string): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
showMessageBox: (_message: string, _options: any = null) => {
|
||||
showMessageBox: (_message: string, _options: any = null): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
writeImageToFile: (_image: any, _format: any, _filePath: string) => {
|
||||
writeImageToFile: (_image: any, _format: any, _filePath: string): void => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
|
@ -315,20 +323,20 @@ const shim = {
|
|||
// Having the timers wrapped in that way would also make it easier to debug timing issue and
|
||||
// find out what timers have been fired or not.
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
setTimeout: (_fn: Function, _interval: number) => {
|
||||
setTimeout: (_fn: Function, _interval: number): any=> {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types -- Old code before rule was applied
|
||||
setInterval: (_fn: Function, _interval: number) => {
|
||||
setInterval: (_fn: Function, _interval: number): any=> {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
clearTimeout: (_id: any) => {
|
||||
clearTimeout: (_id: any): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
clearInterval: (_id: any) => {
|
||||
clearInterval: (_id: any): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
|
@ -350,7 +358,7 @@ const shim = {
|
|||
return react_;
|
||||
},
|
||||
|
||||
dgram: () => {
|
||||
dgram: (): any => {
|
||||
throw new Error('Not implemented');
|
||||
},
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ import ResourceFetcher from '../services/ResourceFetcher';
|
|||
const WebDavApi = require('../WebDavApi');
|
||||
const DropboxApi = require('../DropboxApi');
|
||||
import JoplinServerApi from '../JoplinServerApi';
|
||||
import { FolderEntity } from '../services/database/types';
|
||||
import { FolderEntity, ResourceEntity } from '../services/database/types';
|
||||
import { credentialFile, readCredentialFile } from '../utils/credentialFiles';
|
||||
import SyncTargetJoplinCloud from '../SyncTargetJoplinCloud';
|
||||
import KeychainService from '../services/keychain/KeychainService';
|
||||
|
@ -102,6 +102,7 @@ const oldTestDir = `${__dirname}/../../app-cli/tests`;
|
|||
const logDir = `${oldTestDir}/logs`;
|
||||
const baseTempDir = `${oldTestDir}/tmp/${suiteName_}`;
|
||||
const supportDir = `${oldTestDir}/support`;
|
||||
export const ocrSampleDir = `${oldTestDir}/ocr_samples`;
|
||||
|
||||
// We add a space in the data directory path as that will help uncover
|
||||
// various space-in-path issues.
|
||||
|
@ -422,6 +423,23 @@ function pluginDir(id: number = null) {
|
|||
return `${dataDir}/plugins-${id}`;
|
||||
}
|
||||
|
||||
export interface CreateNoteAndResourceOptions {
|
||||
path?: string;
|
||||
}
|
||||
|
||||
const createNoteAndResource = async (options: CreateNoteAndResourceOptions = null) => {
|
||||
options = {
|
||||
path: `${supportDir}/photo.jpg`,
|
||||
...options,
|
||||
};
|
||||
|
||||
let note = await Note.save({});
|
||||
note = await shim.attachFileToNote(note, options.path);
|
||||
const resourceIds = await Note.linkedItemIds(note.body);
|
||||
const resource: ResourceEntity = await Resource.load(resourceIds[0]);
|
||||
return { note, resource };
|
||||
};
|
||||
|
||||
async function setupDatabaseAndSynchronizer(id: number, options: any = null) {
|
||||
if (id === null) id = currentClient_;
|
||||
|
||||
|
@ -1009,4 +1027,4 @@ const simulateReadOnlyShareEnv = (shareId: string) => {
|
|||
};
|
||||
};
|
||||
|
||||
export { supportDir, createTempFile, createTestShareData, simulateReadOnlyShareEnv, waitForFolderCount, afterAllCleanUp, exportDir, synchronizerStart, afterEachCleanUp, syncTargetName, setSyncTargetName, syncDir, createTempDir, isNetworkSyncTarget, kvStore, expectThrow, logger, expectNotThrow, resourceService, resourceFetcher, tempFilePath, allSyncTargetItemsEncrypted, msleep, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, checkThrow, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, currentClientId, id, ids, sortedIds, at, createNTestNotes, createNTestFolders, createNTestTags, TestApp };
|
||||
export { supportDir, createNoteAndResource, createTempFile, createTestShareData, simulateReadOnlyShareEnv, waitForFolderCount, afterAllCleanUp, exportDir, synchronizerStart, afterEachCleanUp, syncTargetName, setSyncTargetName, syncDir, createTempDir, isNetworkSyncTarget, kvStore, expectThrow, logger, expectNotThrow, resourceService, resourceFetcher, tempFilePath, allSyncTargetItemsEncrypted, msleep, setupDatabase, revisionService, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync, checkThrow, encryptionService, loadEncryptionMasterKey, fileContentEqual, decryptionWorker, currentClientId, id, ids, sortedIds, at, createNTestNotes, createNTestFolders, createNTestTags, TestApp };
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { execCommand } from '@joplin/utils';
|
||||
import { insertContentIntoFile, rootDir } from './tool-utils';
|
||||
import { remove } from 'fs-extra';
|
||||
|
||||
const sqlts = require('@rmp135/sql-ts').default;
|
||||
const fs = require('fs-extra');
|
||||
|
@ -38,84 +39,89 @@ const generateListRenderDependencyType = (tables: any[]) => {
|
|||
async function main() {
|
||||
// Run the CLI app once so as to generate the database file
|
||||
process.chdir(`${rootDir}/packages/app-cli`);
|
||||
await execCommand('yarn start version');
|
||||
const profileDir = `${__dirname}/__generateTypesProfile`;
|
||||
await execCommand(['yarn', 'start', '--profile', profileDir, 'version']);
|
||||
|
||||
const sqlTsConfig = {
|
||||
'client': 'sqlite3',
|
||||
'connection': {
|
||||
'filename': `${require('os').homedir()}/.config/joplindev/database.sqlite`,
|
||||
},
|
||||
'tableNameCasing': 'pascal',
|
||||
'singularTableNames': true,
|
||||
'useNullAsDefault': true, // To disable warning "sqlite does not support inserting default values"
|
||||
'excludedTables': [
|
||||
'main.notes_fts',
|
||||
'main.notes_fts_segments',
|
||||
'main.notes_fts_segdir',
|
||||
'main.notes_fts_docsize',
|
||||
'main.notes_fts_stat',
|
||||
'main.master_keys',
|
||||
],
|
||||
};
|
||||
try {
|
||||
const sqlTsConfig = {
|
||||
'client': 'sqlite3',
|
||||
'connection': {
|
||||
'filename': `${profileDir}/database.sqlite`,
|
||||
},
|
||||
'tableNameCasing': 'pascal',
|
||||
'singularTableNames': true,
|
||||
'useNullAsDefault': true, // To disable warning "sqlite does not support inserting default values"
|
||||
'excludedTables': [
|
||||
'main.notes_fts',
|
||||
'main.notes_fts_segments',
|
||||
'main.notes_fts_segdir',
|
||||
'main.notes_fts_docsize',
|
||||
'main.notes_fts_stat',
|
||||
'main.master_keys',
|
||||
],
|
||||
};
|
||||
|
||||
const definitions = await sqlts.toObject(sqlTsConfig);
|
||||
const definitions = await sqlts.toObject(sqlTsConfig);
|
||||
|
||||
definitions.tables = definitions.tables.map((t: any) => {
|
||||
t.columns.push({
|
||||
nullable: false,
|
||||
name: 'type_',
|
||||
type: 'int',
|
||||
optional: true,
|
||||
isEnum: false,
|
||||
propertyName: 'type_',
|
||||
propertyType: 'number',
|
||||
});
|
||||
|
||||
return t;
|
||||
});
|
||||
|
||||
definitions.tables = definitions.tables.map((table: any) => {
|
||||
table.columns = table.columns.map((column: any) => {
|
||||
return {
|
||||
...column,
|
||||
definitions.tables = definitions.tables.map((t: any) => {
|
||||
t.columns.push({
|
||||
nullable: false,
|
||||
name: 'type_',
|
||||
type: 'int',
|
||||
optional: true,
|
||||
};
|
||||
isEnum: false,
|
||||
propertyName: 'type_',
|
||||
propertyType: 'number',
|
||||
});
|
||||
|
||||
return t;
|
||||
});
|
||||
|
||||
return table;
|
||||
});
|
||||
definitions.tables = definitions.tables.map((table: any) => {
|
||||
table.columns = table.columns.map((column: any) => {
|
||||
return {
|
||||
...column,
|
||||
optional: true,
|
||||
};
|
||||
});
|
||||
|
||||
const tableStrings = [];
|
||||
for (const table of definitions.tables) {
|
||||
tableStrings.push(createRuntimeObject(table));
|
||||
return table;
|
||||
});
|
||||
|
||||
const tableStrings = [];
|
||||
for (const table of definitions.tables) {
|
||||
tableStrings.push(createRuntimeObject(table));
|
||||
}
|
||||
|
||||
const tsString = sqlts.fromObject(definitions, sqlTsConfig)
|
||||
.replace(/": /g, '"?: ');
|
||||
const header = `// AUTO-GENERATED BY ${__filename.substr(rootDir.length + 1)}`;
|
||||
|
||||
const targetFile = `${rootDir}/packages/lib/services/database/types.ts`;
|
||||
console.info(`Writing type definitions to ${targetFile}...`);
|
||||
|
||||
const existingContent = (await fs.pathExists(targetFile)) ? await fs.readFile(targetFile, 'utf8') : '';
|
||||
const splitted = existingContent.split('// AUTO-GENERATED BY');
|
||||
const staticContent = splitted[0];
|
||||
|
||||
const runtimeContent = `export const databaseSchema: DatabaseTables = {\n${tableStrings.join('\n')}\n};`;
|
||||
|
||||
const listRendererDependency = `type ListRendererDatabaseDependency = ${generateListRenderDependencyType(definitions.tables)};`;
|
||||
const noteListTypeFilePath = `${rootDir}/packages/lib/services/plugins/api/noteListType.ts`;
|
||||
|
||||
await fs.writeFile(targetFile, `${staticContent}\n\n${header}\n\n${tsString}\n\n${runtimeContent}`, 'utf8');
|
||||
|
||||
console.info(`Writing ListRendererDatabaseDependency type to ${noteListTypeFilePath}...`);
|
||||
|
||||
await insertContentIntoFile(
|
||||
noteListTypeFilePath,
|
||||
'// AUTO-GENERATED by generate-database-type\n',
|
||||
'\n// AUTO-GENERATED by generate-database-type',
|
||||
listRendererDependency,
|
||||
);
|
||||
} finally {
|
||||
await remove(profileDir);
|
||||
}
|
||||
|
||||
const tsString = sqlts.fromObject(definitions, sqlTsConfig)
|
||||
.replace(/": /g, '"?: ');
|
||||
const header = `// AUTO-GENERATED BY ${__filename.substr(rootDir.length + 1)}`;
|
||||
|
||||
const targetFile = `${rootDir}/packages/lib/services/database/types.ts`;
|
||||
console.info(`Writing type definitions to ${targetFile}...`);
|
||||
|
||||
const existingContent = (await fs.pathExists(targetFile)) ? await fs.readFile(targetFile, 'utf8') : '';
|
||||
const splitted = existingContent.split('// AUTO-GENERATED BY');
|
||||
const staticContent = splitted[0];
|
||||
|
||||
const runtimeContent = `export const databaseSchema: DatabaseTables = {\n${tableStrings.join('\n')}\n};`;
|
||||
|
||||
const listRendererDependency = `type ListRendererDatabaseDependency = ${generateListRenderDependencyType(definitions.tables)};`;
|
||||
const noteListTypeFilePath = `${rootDir}/packages/lib/services/plugins/api/noteListType.ts`;
|
||||
|
||||
await fs.writeFile(targetFile, `${staticContent}\n\n${header}\n\n${tsString}\n\n${runtimeContent}`, 'utf8');
|
||||
|
||||
console.info(`Writing ListRendererDatabaseDependency type to ${noteListTypeFilePath}...`);
|
||||
|
||||
await insertContentIntoFile(
|
||||
noteListTypeFilePath,
|
||||
'// AUTO-GENERATED by generate-database-type\n',
|
||||
'\n// AUTO-GENERATED by generate-database-type',
|
||||
listRendererDependency,
|
||||
);
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
export const Second = 1000;
|
||||
export const Minute = 60 * Second;
|
||||
export const Hour = 60 * Minute;
|
||||
export const Day = 24 * Hour;
|
||||
export const Week = 7 * Day;
|
||||
export const Month = 30 * Day;
|
||||
|
||||
export const msleep = (ms: number) => {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
};
|
||||
|
|
|
@ -46,6 +46,7 @@
|
|||
"@codemirror/state",
|
||||
"@codemirror/view",
|
||||
"@lezer/highlight",
|
||||
"@lezer/markdown",
|
||||
"@fortawesome/fontawesome-svg-core",
|
||||
"@fortawesome/free-solid-svg-icons",
|
||||
"@svgr/webpack",
|
||||
|
|
183
yarn.lock
183
yarn.lock
|
@ -6417,6 +6417,7 @@ __metadata:
|
|||
"@types/react": 18.2.37
|
||||
"@types/react-redux": 7.1.31
|
||||
"@types/styled-components": 5.1.32
|
||||
"@types/tesseract.js": 2.0.0
|
||||
async-mutex: 0.4.0
|
||||
codemirror: 5.65.9
|
||||
color: 3.2.1
|
||||
|
@ -6444,6 +6445,7 @@ __metadata:
|
|||
node-fetch: 2.6.7
|
||||
node-notifier: 10.0.1
|
||||
node-rsa: 1.1.1
|
||||
pdfjs-dist: 3.11.174
|
||||
pretty-bytes: 5.6.0
|
||||
re-resizable: 6.9.11
|
||||
react: 18.2.0
|
||||
|
@ -6462,6 +6464,7 @@ __metadata:
|
|||
styled-components: 5.3.11
|
||||
styled-system: 5.1.5
|
||||
taboverride: 4.0.3
|
||||
tesseract.js: 4.1.2
|
||||
tinymce: 5.10.6
|
||||
ts-node: 10.9.1
|
||||
typescript: 5.2.2
|
||||
|
@ -6720,6 +6723,7 @@ __metadata:
|
|||
base-64: 1.0.0
|
||||
base64-stream: 1.0.0
|
||||
builtin-modules: 3.3.0
|
||||
canvas: 2.11.2
|
||||
chokidar: 3.5.3
|
||||
clean-html: 1.5.0
|
||||
color: 3.2.1
|
||||
|
@ -6752,6 +6756,7 @@ __metadata:
|
|||
node-notifier: 10.0.1
|
||||
node-persist: 3.1.3
|
||||
node-rsa: 1.1.1
|
||||
pdfjs-dist: 3.11.174
|
||||
promise: 8.3.0
|
||||
query-string: 7.1.3
|
||||
re-reselect: 4.0.1
|
||||
|
@ -6767,6 +6772,7 @@ __metadata:
|
|||
string-to-stream: 3.0.1
|
||||
tar: 6.2.0
|
||||
tcp-port-used: 1.0.2
|
||||
tesseract.js: 4.1.2
|
||||
typescript: 5.2.2
|
||||
uglifycss: 0.0.29
|
||||
url-parse: 1.5.10
|
||||
|
@ -11014,6 +11020,15 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/tesseract.js@npm:2.0.0":
|
||||
version: 2.0.0
|
||||
resolution: "@types/tesseract.js@npm:2.0.0"
|
||||
dependencies:
|
||||
tesseract.js: "*"
|
||||
checksum: c9cbba2d781559d565be24d3aa518d6a8356215b5a9a41cfc6be21330a35bd113b942be0c24c7d9fc440db06568e196034b5facef1ec52b3ca584199d040efa1
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/tough-cookie@npm:*":
|
||||
version: 4.0.1
|
||||
resolution: "@types/tough-cookie@npm:4.0.1"
|
||||
|
@ -13912,6 +13927,13 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"bmp-js@npm:^0.1.0":
|
||||
version: 0.1.0
|
||||
resolution: "bmp-js@npm:0.1.0"
|
||||
checksum: 2f6cf7eeabae2aa50eb768122f59e9752caa97248028cb8b5cf0d9db7cf8fb3a60262aeb2c6889dd21357ab061b2fb318f21f20d2b24963ba36ead8e264c6654
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"bn.js@npm:^4.0.0, bn.js@npm:^4.1.0, bn.js@npm:^4.11.9":
|
||||
version: 4.12.0
|
||||
resolution: "bn.js@npm:4.12.0"
|
||||
|
@ -14950,6 +14972,18 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"canvas@npm:2.11.2, canvas@npm:^2.11.2":
|
||||
version: 2.11.2
|
||||
resolution: "canvas@npm:2.11.2"
|
||||
dependencies:
|
||||
"@mapbox/node-pre-gyp": ^1.0.0
|
||||
nan: ^2.17.0
|
||||
node-gyp: latest
|
||||
simple-get: ^3.0.3
|
||||
checksum: 61e554aef80022841dc836964534082ec21435928498032562089dfb7736215f039c7d99ee546b0cf10780232d9bf310950f8b4d489dc394e0fb6f6adfc97994
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"capital-case@npm:^1.0.4":
|
||||
version: 1.0.4
|
||||
resolution: "capital-case@npm:1.0.4"
|
||||
|
@ -17844,6 +17878,15 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"decompress-response@npm:^4.2.0":
|
||||
version: 4.2.1
|
||||
resolution: "decompress-response@npm:4.2.1"
|
||||
dependencies:
|
||||
mimic-response: ^2.0.0
|
||||
checksum: 4e783ca4dfe9417354d61349750fe05236f565a4415a6ca20983a311be2371debaedd9104c0b0e7b36e5f167aeaae04f84f1a0b3f8be4162f1d7d15598b8fdba
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"decompress-response@npm:^6.0.0":
|
||||
version: 6.0.0
|
||||
resolution: "decompress-response@npm:6.0.0"
|
||||
|
@ -18857,13 +18900,6 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"dommatrix@npm:^1.0.1, dommatrix@npm:^1.0.3":
|
||||
version: 1.0.3
|
||||
resolution: "dommatrix@npm:1.0.3"
|
||||
checksum: 8ac727c1a14cf8de30a5b49a3bd6b2622a661b391fe1ac54e855eaa14a857ed86d63492150b5f70f912acc24fa3acc31d750259c47e9b5801de237624b0a319f
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"dompurify@npm:^3.0.5":
|
||||
version: 3.0.5
|
||||
resolution: "dompurify@npm:3.0.5"
|
||||
|
@ -23965,6 +24001,13 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"idb-keyval@npm:^6.2.0":
|
||||
version: 6.2.1
|
||||
resolution: "idb-keyval@npm:6.2.1"
|
||||
checksum: 7c0836f832096086e99258167740181132a71dd2694c8b8454a4f5ec69114ba6d70983115153306f0b6de1c8d3bad04f67eed3dff8f50c96815b9985d6d78470
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"ieee754@npm:1.1.13":
|
||||
version: 1.1.13
|
||||
resolution: "ieee754@npm:1.1.13"
|
||||
|
@ -24894,6 +24937,13 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"is-electron@npm:^2.2.2":
|
||||
version: 2.2.2
|
||||
resolution: "is-electron@npm:2.2.2"
|
||||
checksum: de5aa8bd8d72c96675b8d0f93fab4cc21f62be5440f65bc05c61338ca27bd851a64200f31f1bf9facbaa01b3dbfed7997b2186741d84b93b63e0aff1db6a9494
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"is-extendable@npm:^0.1.0, is-extendable@npm:^0.1.1":
|
||||
version: 0.1.1
|
||||
resolution: "is-extendable@npm:0.1.1"
|
||||
|
@ -29878,6 +29928,13 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"mimic-response@npm:^2.0.0":
|
||||
version: 2.1.0
|
||||
resolution: "mimic-response@npm:2.1.0"
|
||||
checksum: 014fad6ab936657e5f2f48bd87af62a8e928ebe84472aaf9e14fec4fcb31257a5edff77324d8ac13ddc6685ba5135cf16e381efac324e5f174fb4ddbf902bf07
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"mimic-response@npm:^3.1.0":
|
||||
version: 3.1.0
|
||||
resolution: "mimic-response@npm:3.1.0"
|
||||
|
@ -30484,7 +30541,7 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"nan@npm:2.18.0":
|
||||
"nan@npm:2.18.0, nan@npm:^2.17.0":
|
||||
version: 2.18.0
|
||||
resolution: "nan@npm:2.18.0"
|
||||
dependencies:
|
||||
|
@ -30815,7 +30872,7 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"node-fetch@npm:^2.6.12":
|
||||
"node-fetch@npm:^2.6.12, node-fetch@npm:^2.6.9":
|
||||
version: 2.7.0
|
||||
resolution: "node-fetch@npm:2.7.0"
|
||||
dependencies:
|
||||
|
@ -31850,7 +31907,7 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"opencollective-postinstall@npm:^2.0.2":
|
||||
"opencollective-postinstall@npm:^2.0.2, opencollective-postinstall@npm:^2.0.3":
|
||||
version: 2.0.3
|
||||
resolution: "opencollective-postinstall@npm:2.0.3"
|
||||
bin:
|
||||
|
@ -32760,6 +32817,13 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"path2d-polyfill@npm:^2.0.1":
|
||||
version: 2.0.1
|
||||
resolution: "path2d-polyfill@npm:2.0.1"
|
||||
checksum: e38a4f920be3550e8334b899cc56f4fca0a976ca69404ee10d656a45d422996b7e27e294e2cf0aac2e410ce59d6977cde9f95586e62a24e6c904716695e059f8
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"pbkdf2@npm:^3.0.3":
|
||||
version: 3.1.2
|
||||
resolution: "pbkdf2@npm:3.1.2"
|
||||
|
@ -32780,33 +32844,33 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"pdfjs-dist@npm:*":
|
||||
version: 2.14.305
|
||||
resolution: "pdfjs-dist@npm:2.14.305"
|
||||
"pdfjs-dist@npm:3.11.174":
|
||||
version: 3.11.174
|
||||
resolution: "pdfjs-dist@npm:3.11.174"
|
||||
dependencies:
|
||||
dommatrix: ^1.0.1
|
||||
web-streams-polyfill: ^3.2.1
|
||||
peerDependencies:
|
||||
worker-loader: ^3.0.8
|
||||
peerDependenciesMeta:
|
||||
worker-loader:
|
||||
canvas: ^2.11.2
|
||||
path2d-polyfill: ^2.0.1
|
||||
dependenciesMeta:
|
||||
canvas:
|
||||
optional: true
|
||||
checksum: b75443f81e500856e3a7b61303d1f621f81e82b19fc6216f74d33a70d1ef392bb8b2ca4cfa39f11e7c0a877e6d6d74b474768988dcf3299d5d8a1d996d48f856
|
||||
path2d-polyfill:
|
||||
optional: true
|
||||
checksum: 62f5a64ca0b2dbc855701ebf9a65c3e48c3c9aa5d64c50eb42bb9ff50a326f3eddab7f2a134ef0398398b1ccff9d842935b9f31358c9103bdc71406632d1a7fa
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"pdfjs-dist@npm:2.16.105":
|
||||
version: 2.16.105
|
||||
resolution: "pdfjs-dist@npm:2.16.105"
|
||||
"pdfjs-dist@patch:pdfjs-dist@npm%3A3.11.174#./.yarn/patches/pdfjs-dist-npm-3.11.174-67f2fee6d6.patch::locator=root%40workspace%3A.":
|
||||
version: 3.11.174
|
||||
resolution: "pdfjs-dist@patch:pdfjs-dist@npm%3A3.11.174#./.yarn/patches/pdfjs-dist-npm-3.11.174-67f2fee6d6.patch::version=3.11.174&hash=4ecadb&locator=root%40workspace%3A."
|
||||
dependencies:
|
||||
dommatrix: ^1.0.3
|
||||
web-streams-polyfill: ^3.2.1
|
||||
peerDependencies:
|
||||
worker-loader: ^3.0.8
|
||||
peerDependenciesMeta:
|
||||
worker-loader:
|
||||
canvas: ^2.11.2
|
||||
path2d-polyfill: ^2.0.1
|
||||
dependenciesMeta:
|
||||
canvas:
|
||||
optional: true
|
||||
checksum: 16ad2fa0ff8404fefd1a3e83f92ef1a594fcc4d3ff65512f801365c8f06d300d4a38023a867994f0b964a8e146773e6dcc9988c7c1a791917eb6371d5bd72863
|
||||
path2d-polyfill:
|
||||
optional: true
|
||||
checksum: bc7597789b13b3ea59ee4ff29db40a15f0a20094ef8f6fa9ba59a80db0501a9b94d0fb3602515666057ebe9c92e59d938ac157f4d2852a125ccc1511c9d8adf6
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
|
@ -35998,7 +36062,7 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"regenerator-runtime@npm:^0.13.11":
|
||||
"regenerator-runtime@npm:^0.13.11, regenerator-runtime@npm:^0.13.3":
|
||||
version: 0.13.11
|
||||
resolution: "regenerator-runtime@npm:0.13.11"
|
||||
checksum: 27481628d22a1c4e3ff551096a683b424242a216fee44685467307f14d58020af1e19660bf2e26064de946bad7eff28950eae9f8209d55723e2d9351e632bbb4
|
||||
|
@ -37925,6 +37989,17 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"simple-get@npm:^3.0.3":
|
||||
version: 3.1.1
|
||||
resolution: "simple-get@npm:3.1.1"
|
||||
dependencies:
|
||||
decompress-response: ^4.2.0
|
||||
once: ^1.3.1
|
||||
simple-concat: ^1.0.0
|
||||
checksum: 80195e70bf171486e75c31e28e5485468195cc42f85940f8b45c4a68472160144d223eb4d07bc82ef80cb974b7c401db021a540deb2d34ac4b3b8883da2d6401
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"simple-get@npm:^4.0.0":
|
||||
version: 4.0.0
|
||||
resolution: "simple-get@npm:4.0.0"
|
||||
|
@ -40052,6 +40127,31 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"tesseract.js-core@npm:^4.0.4":
|
||||
version: 4.0.4
|
||||
resolution: "tesseract.js-core@npm:4.0.4"
|
||||
checksum: 5bd17315260d13af789a6f3288d35fd4d425234d10e48c548bfb91dd702259894091356941f30186f814832073cbe265abac12d9c5263dd62940504fcfecbd69
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"tesseract.js@npm:*, tesseract.js@npm:4.1.2":
|
||||
version: 4.1.2
|
||||
resolution: "tesseract.js@npm:4.1.2"
|
||||
dependencies:
|
||||
bmp-js: ^0.1.0
|
||||
idb-keyval: ^6.2.0
|
||||
is-electron: ^2.2.2
|
||||
is-url: ^1.2.4
|
||||
node-fetch: ^2.6.9
|
||||
opencollective-postinstall: ^2.0.3
|
||||
regenerator-runtime: ^0.13.3
|
||||
tesseract.js-core: ^4.0.4
|
||||
wasm-feature-detect: ^1.2.11
|
||||
zlibjs: ^0.3.1
|
||||
checksum: 63cf607507394542e90e9816106f754f9a39ed946d6c3d5d18b6f63d9e13112ec2094c2e47ccc835046c32ad7477f5e2e3793044305d706f15be08278acf76fd
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"test-exclude@npm:^6.0.0":
|
||||
version: 6.0.0
|
||||
resolution: "test-exclude@npm:6.0.0"
|
||||
|
@ -42425,6 +42525,13 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"wasm-feature-detect@npm:^1.2.11":
|
||||
version: 1.5.1
|
||||
resolution: "wasm-feature-detect@npm:1.5.1"
|
||||
checksum: a99be305a83530271e47212ef138644c75bc65bcf2b2c094b22fe4e13b9ca1a2c00fdd8a01d663b338e189459a5e1f27d9cbd71c8adca3c71957beea51606193
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"watchpack@npm:^2.3.1":
|
||||
version: 2.3.1
|
||||
resolution: "watchpack@npm:2.3.1"
|
||||
|
@ -42470,13 +42577,6 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"web-streams-polyfill@npm:^3.2.1":
|
||||
version: 3.2.1
|
||||
resolution: "web-streams-polyfill@npm:3.2.1"
|
||||
checksum: b119c78574b6d65935e35098c2afdcd752b84268e18746606af149e3c424e15621b6f1ff0b42b2676dc012fc4f0d313f964b41a4b5031e525faa03997457da02
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"web-worker@npm:^1.2.0":
|
||||
version: 1.2.0
|
||||
resolution: "web-worker@npm:1.2.0"
|
||||
|
@ -43776,6 +43876,13 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"zlibjs@npm:^0.3.1":
|
||||
version: 0.3.1
|
||||
resolution: "zlibjs@npm:0.3.1"
|
||||
checksum: 91f3d28bb5925ac71565a1fd4d44303b6b876ed483c9c192864393811151b399f29d917c9118a5aca5a541d9dfc7c199460f86b489ea1f77b7217e0edc887c89
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"zwitch@npm:^1.0.0":
|
||||
version: 1.0.5
|
||||
resolution: "zwitch@npm:1.0.5"
|
||||
|
|
Loading…
Reference in New Issue