1
0
mirror of https://github.com/laurent22/joplin.git synced 2025-02-07 19:30:04 +02:00

More fixes to search engine and better handling of non-ASCII searches

This commit is contained in:
Laurent Cozic 2019-01-14 19:11:54 +00:00
parent a1f0bd1e6c
commit 2e12b2655b
17 changed files with 330 additions and 130 deletions

View File

@ -555,6 +555,11 @@
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
"integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups="
}, },
"diacritics": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/diacritics/-/diacritics-1.3.0.tgz",
"integrity": "sha1-PvqHMj67hj5mls67AILUj/PW96E="
},
"domexception": { "domexception": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/domexception/-/domexception-1.0.1.tgz", "resolved": "https://registry.npmjs.org/domexception/-/domexception-1.0.1.tgz",

View File

@ -31,6 +31,7 @@
"async-mutex": "^0.1.3", "async-mutex": "^0.1.3",
"base-64": "^0.1.0", "base-64": "^0.1.0",
"compare-version": "^0.1.2", "compare-version": "^0.1.2",
"diacritics": "^1.3.0",
"es6-promise-pool": "^2.5.0", "es6-promise-pool": "^2.5.0",
"follow-redirects": "^1.2.4", "follow-redirects": "^1.2.4",
"form-data": "^2.1.4", "form-data": "^2.1.4",

View File

@ -29,6 +29,7 @@ npm test tests-build/models_BaseItem.js
npm test tests-build/models_Folder.js npm test tests-build/models_Folder.js
npm test tests-build/models_Note.js npm test tests-build/models_Note.js
npm test tests-build/models_Tag.js npm test tests-build/models_Tag.js
npm test tests-build/models_ItemChange.js
npm test tests-build/models_Setting.js npm test tests-build/models_Setting.js
npm test tests-build/pathUtils.js npm test tests-build/pathUtils.js
npm test tests-build/StringUtils.js npm test tests-build/StringUtils.js

View File

@ -0,0 +1,53 @@
require('app-module-path').addPath(__dirname);
const { time } = require('lib/time-utils.js');
const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
const SearchEngine = require('lib/services/SearchEngine');
const ResourceService = require('lib/services/ResourceService');
const ItemChangeUtils = require('lib/services/ItemChangeUtils');
const Note = require('lib/models/Note');
const Setting = require('lib/models/Setting');
const ItemChange = require('lib/models/ItemChange');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
let searchEngine = null;
describe('models_ItemChange', function() {
beforeEach(async (done) => {
await setupDatabaseAndSynchronizer(1);
searchEngine = new SearchEngine();
searchEngine.setDb(db());
done();
});
it('should delete old changes that have been processed', asyncTest(async () => {
const n1 = await Note.save({ title: "abcd efgh" }); // 3
await ItemChange.waitForAllSaved();
expect(await ItemChange.lastChangeId()).toBe(1);
const resourceService = new ResourceService();
await searchEngine.syncTables();
// If we run this now, it should not delete any change because
// the resource service has not yet processed the change
await ItemChangeUtils.deleteProcessedChanges();
expect(await ItemChange.lastChangeId()).toBe(1);
await resourceService.indexNoteResources();
// Now that the resource service has processed the change,
// the change can be deleted.
await ItemChangeUtils.deleteProcessedChanges();
expect(await ItemChange.lastChangeId()).toBe(0);
}));
});

View File

@ -1,9 +1,11 @@
require('app-module-path').addPath(__dirname); require('app-module-path').addPath(__dirname);
const { time } = require('lib/time-utils.js'); const { time } = require('lib/time-utils.js');
const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js'); const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, asyncTest, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
const SearchEngine = require('lib/services/SearchEngine'); const SearchEngine = require('lib/services/SearchEngine');
const Note = require('lib/models/Note'); const Note = require('lib/models/Note');
const ItemChange = require('lib/models/ItemChange');
const Setting = require('lib/models/Setting');
process.on('unhandledRejection', (reason, p) => { process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
@ -23,7 +25,7 @@ describe('services_SearchEngine', function() {
done(); done();
}); });
it('should keep the content and FTS table in sync', async (done) => { it('should keep the content and FTS table in sync', asyncTest(async () => {
let rows, n1, n2, n3; let rows, n1, n2, n3;
n1 = await Note.save({ title: "a" }); n1 = await Note.save({ title: "a" });
@ -56,11 +58,25 @@ describe('services_SearchEngine', function() {
await engine.syncTables(); await engine.syncTables();
rows = await engine.search('c'); rows = await engine.search('c');
expect(rows.length).toBe(1); expect(rows.length).toBe(1);
}));
done(); it('should, after initial indexing, save the last change ID', asyncTest(async () => {
}); const n1 = await Note.save({ title: "abcd efgh" }); // 3
const n2 = await Note.save({ title: "abcd aaaaa abcd abcd" }); // 1
it('should order search results by relevance (1)', async (done) => { expect(Setting.value('searchEngine.initialIndexingDone')).toBe(false);
await ItemChange.waitForAllSaved();
const lastChangeId = await ItemChange.lastChangeId();
await engine.syncTables();
expect(Setting.value('searchEngine.lastProcessedChangeId')).toBe(lastChangeId);
expect(Setting.value('searchEngine.initialIndexingDone')).toBe(true);
}));
it('should order search results by relevance (1)', asyncTest(async () => {
const n1 = await Note.save({ title: "abcd efgh" }); // 3 const n1 = await Note.save({ title: "abcd efgh" }); // 3
const n2 = await Note.save({ title: "abcd aaaaa abcd abcd" }); // 1 const n2 = await Note.save({ title: "abcd aaaaa abcd abcd" }); // 1
const n3 = await Note.save({ title: "abcd aaaaa bbbb eeee abcd" }); // 2 const n3 = await Note.save({ title: "abcd aaaaa bbbb eeee abcd" }); // 2
@ -71,11 +87,9 @@ describe('services_SearchEngine', function() {
expect(rows[0].id).toBe(n2.id); expect(rows[0].id).toBe(n2.id);
expect(rows[1].id).toBe(n3.id); expect(rows[1].id).toBe(n3.id);
expect(rows[2].id).toBe(n1.id); expect(rows[2].id).toBe(n1.id);
}));
done(); it('should order search results by relevance (2)', asyncTest(async () => {
});
it('should order search results by relevance (2)', async (done) => {
// 1 // 1
const n1 = await Note.save({ title: "abcd efgh", body: "XX abcd XX efgh" }); const n1 = await Note.save({ title: "abcd efgh", body: "XX abcd XX efgh" });
// 4 // 4
@ -95,11 +109,9 @@ describe('services_SearchEngine', function() {
expect(rows[2].id).toBe(n3.id); expect(rows[2].id).toBe(n3.id);
expect(rows[3].id).toBe(n2.id); expect(rows[3].id).toBe(n2.id);
expect(rows[4].id).toBe(n5.id); expect(rows[4].id).toBe(n5.id);
}));
done(); it('should supports various query types', asyncTest(async () => {
});
it('should supports various query types', async (done) => {
let rows; let rows;
const n1 = await Note.save({ title: "abcd efgh ijkl", body: "aaaa bbbb" }); const n1 = await Note.save({ title: "abcd efgh ijkl", body: "aaaa bbbb" });
@ -148,11 +160,51 @@ describe('services_SearchEngine', function() {
rows = await engine.search('сообщило'); rows = await engine.search('сообщило');
expect(rows.length).toBe(1); expect(rows.length).toBe(1);
}));
done(); it('should support queries with or without accents', asyncTest(async () => {
}); let rows;
const n1 = await Note.save({ title: "père noël" });
it('should parse normal query strings', async (done) => { await engine.syncTables();
expect((await engine.search('père')).length).toBe(1);
expect((await engine.search('pere')).length).toBe(1);
expect((await engine.search('noe*')).length).toBe(1);
expect((await engine.search('noë*')).length).toBe(1);
}));
it('should support queries with Chinese characters', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: "我是法国人" });
await engine.syncTables();
expect((await engine.search('我')).length).toBe(1);
expect((await engine.search('法国人')).length).toBe(1);
}));
it('should support queries with Japanese characters', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: "私は日本語を話すことができません" });
await engine.syncTables();
expect((await engine.search('日本')).length).toBe(1);
expect((await engine.search('できません')).length).toBe(1);
}));
it('should support queries with Korean characters', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: "이것은 한국말이다" });
await engine.syncTables();
expect((await engine.search('이것은')).length).toBe(1);
expect((await engine.search('말')).length).toBe(1);
}));
it('should parse normal query strings', asyncTest(async () => {
let rows; let rows;
const testCases = [ const testCases = [
@ -173,11 +225,9 @@ describe('services_SearchEngine', function() {
expect(JSON.stringify(actual.terms.title)).toBe(JSON.stringify(expected.title)); expect(JSON.stringify(actual.terms.title)).toBe(JSON.stringify(expected.title));
expect(JSON.stringify(actual.terms.body)).toBe(JSON.stringify(expected.body)); expect(JSON.stringify(actual.terms.body)).toBe(JSON.stringify(expected.body));
} }
}));
done(); it('should parse query strings with wildcards', asyncTest(async () => {
});
it('should parse query strings with wildcards', async (done) => {
let rows; let rows;
const testCases = [ const testCases = [
@ -202,8 +252,6 @@ describe('services_SearchEngine', function() {
} }
expect(engine.parseQuery('*').termCount).toBe(0); expect(engine.parseQuery('*').termCount).toBe(0);
}));
done();
});
}); });

View File

@ -796,6 +796,7 @@ class Application extends BaseApplication {
SearchEngine.instance().setDb(reg.db()); SearchEngine.instance().setDb(reg.db());
SearchEngine.instance().setLogger(reg.logger()); SearchEngine.instance().setLogger(reg.logger());
SearchEngine.runInBackground();
if (Setting.value('env') === 'dev') { if (Setting.value('env') === 'dev') {
AlarmService.updateAllNotifications(); AlarmService.updateAllNotifications();

View File

@ -279,9 +279,13 @@ class NoteListComponent extends React.Component {
const w = highlightedWords[i]; const w = highlightedWords[i];
if (w.type === 'regex') { if (w.type === 'regex') {
mark.markRegExp(new RegExp(w.value, 'gmi'), { acrossElements: true }); mark.markRegExp(new RegExp('\\b' + w.value + '\\b', 'gmi'), {
acrossElements: true,
});
} else { } else {
mark.mark([w]); mark.mark([w], {
accuracy: 'exactly',
});
} }
} }

View File

@ -226,6 +226,8 @@
function setMarkers(keywords, options = null) { function setMarkers(keywords, options = null) {
if (!options) options = {}; if (!options) options = {};
// TODO: It should highlight queries without accents - eg "penche*" should highlight "penchés"
if (!mark_) { if (!mark_) {
mark_ = new Mark(document.getElementById('content'), { mark_ = new Mark(document.getElementById('content'), {
exclude: ['img'], exclude: ['img'],
@ -258,7 +260,7 @@
const keyword = keywords[i]; const keyword = keywords[i];
if (keyword.type === 'regex') { if (keyword.type === 'regex') {
mark_.markRegExp(new RegExp(keyword.value, 'gmi'), { mark_.markRegExp(new RegExp('\\b' + keyword.value + '\\b', 'gmi'), {
each: onEachElement, each: onEachElement,
acrossElements: true, acrossElements: true,
}); });

View File

@ -1841,6 +1841,11 @@
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
"integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups="
}, },
"diacritics": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/diacritics/-/diacritics-1.3.0.tgz",
"integrity": "sha1-PvqHMj67hj5mls67AILUj/PW96E="
},
"diff-match-patch": { "diff-match-patch": {
"version": "1.0.4", "version": "1.0.4",
"resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.4.tgz", "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.4.tgz",

View File

@ -84,6 +84,7 @@
"base-64": "^0.1.0", "base-64": "^0.1.0",
"chokidar": "^2.0.3", "chokidar": "^2.0.3",
"compare-versions": "^3.2.1", "compare-versions": "^3.2.1",
"diacritics": "^1.3.0",
"electron-context-menu": "^0.9.1", "electron-context-menu": "^0.9.1",
"electron-is-dev": "^0.3.0", "electron-is-dev": "^0.3.0",
"electron-window-state": "^4.1.1", "electron-window-state": "^4.1.1",

View File

@ -47,6 +47,11 @@ class ItemChange extends BaseModel {
}); });
} }
static async deleteOldChanges(lowestChangeId) {
if (!lowestChangeId) return;
return this.db().exec('DELETE FROM item_changes WHERE id <= ?', [lowestChangeId]);
}
} }
ItemChange.addChangeMutex_ = new Mutex(); ItemChange.addChangeMutex_ = new Mutex();

View File

@ -0,0 +1,18 @@
const Setting = require('lib/models/Setting');
const ItemChange = require('lib/models/ItemChange');
class ItemChangeUtils {
static async deleteProcessedChanges() {
const lastProcessedChangeIds = [
Setting.value('resourceService.lastProcessedChangeId'),
Setting.value('searchEngine.lastProcessedChangeId'),
];
const lowestChangeId = Math.min(...lastProcessedChangeIds);
await ItemChange.deleteOldChanges(lowestChangeId);
}
}
module.exports = ItemChangeUtils;

View File

@ -6,6 +6,7 @@ const BaseModel = require('lib/BaseModel');
const BaseService = require('lib/services/BaseService'); const BaseService = require('lib/services/BaseService');
const Setting = require('lib/models/Setting'); const Setting = require('lib/models/Setting');
const { shim } = require('lib/shim'); const { shim } = require('lib/shim');
const ItemChangeUtils = require('lib/services/ItemChangeUtils');
class ResourceService extends BaseService { class ResourceService extends BaseService {
@ -66,6 +67,8 @@ class ResourceService extends BaseService {
await NoteResource.addOrphanedResources(); await NoteResource.addOrphanedResources();
await ItemChangeUtils.deleteProcessedChanges();
this.logger().info('ResourceService::indexNoteResources: Completed'); this.logger().info('ResourceService::indexNoteResources: Completed');
} }

View File

@ -4,7 +4,9 @@ const ItemChange = require('lib/models/ItemChange.js');
const Setting = require('lib/models/Setting.js'); const Setting = require('lib/models/Setting.js');
const Note = require('lib/models/Note.js'); const Note = require('lib/models/Note.js');
const BaseModel = require('lib/BaseModel.js'); const BaseModel = require('lib/BaseModel.js');
const { pregQuote } = require('lib/string-utils.js'); const ItemChangeUtils = require('lib/services/ItemChangeUtils');
const { pregQuote, scriptType } = require('lib/string-utils.js');
const removeDiacritics = require('diacritics').remove;
class SearchEngine { class SearchEngine {
@ -49,15 +51,14 @@ class SearchEngine {
} }
async initialIndexing() { async rebuildIndex() {
let noteIds = await this.db().selectAll('SELECT id FROM notes WHERE is_conflict = 0 AND encryption_applied = 0'); let noteIds = await this.db().selectAll('SELECT id FROM notes WHERE is_conflict = 0 AND encryption_applied = 0');
noteIds = noteIds.map(n => n.id); noteIds = noteIds.map(n => n.id);
const lastChangeId = await ItemChange.lastChangeId();
// TODO: get last change id HERE
// First delete content of note_normalized, in case the previous initial indexing failed // First delete content of note_normalized, in case the previous initial indexing failed
await this.db().exec('DELETE FROM note_normalized'); await this.db().exec('DELETE FROM notes_normalized');
while (noteIds.length) { while (noteIds.length) {
const currentIds = noteIds.splice(0, 100); const currentIds = noteIds.splice(0, 100);
@ -73,8 +74,7 @@ class SearchEngine {
await this.db().transactionExecBatch(queries); await this.db().transactionExecBatch(queries);
} }
// TODO: SET last chnage ID here Setting.setValue('searchEngine.lastProcessedChangeId', lastChangeId);
} }
async syncTables() { async syncTables() {
@ -83,7 +83,8 @@ class SearchEngine {
await ItemChange.waitForAllSaved(); await ItemChange.waitForAllSaved();
if (!Setting.value('searchEngine.initialIndexingDone')) { if (!Setting.value('searchEngine.initialIndexingDone')) {
await this.initialIndexing(); await this.rebuildIndex();
Setting.setValue('searchEngine.initialIndexingDone', true)
return; return;
} }
@ -131,6 +132,8 @@ class SearchEngine {
await Setting.saveAll(); await Setting.saveAll();
} }
await ItemChangeUtils.deleteProcessedChanges();
this.logger().info('SearchEngine: Updated FTS table in ' + (Date.now() - startTime) + 'ms'); this.logger().info('SearchEngine: Updated FTS table in ' + (Date.now() - startTime) + 'ms');
} }
@ -306,7 +309,7 @@ class SearchEngine {
} }
normalizeText_(text) { normalizeText_(text) {
return text.normalize().toLowerCase(); return removeDiacritics(text.normalize().toLowerCase());
} }
normalizeNote_(note) { normalizeNote_(note) {
@ -316,14 +319,48 @@ class SearchEngine {
return n; return n;
} }
async basicSearch(query) {
let p = query.split(' ');
let temp = [];
for (let i = 0; i < p.length; i++) {
let t = p[i].trim();
if (!t) continue;
temp.push(t);
}
return await Note.previews(null, {
anywherePattern: '*' + temp.join('*') + '*',
});
}
async search(query) { async search(query) {
query = this.normalizeText_(query); query = this.normalizeText_(query);
const st = scriptType(query);
if (!Setting.value('db.ftsEnabled') || ['ja', 'zh', 'ko'].indexOf(st) >= 0) {
// Non-alphabetical languages aren't support by SQLite FTS (except with extensions which are not available in all platforms)
return this.basicSearch(query);
} else {
const parsedQuery = this.parseQuery(query); const parsedQuery = this.parseQuery(query);
const sql = 'SELECT id, title, offsets(notes_fts) AS offsets FROM notes_fts WHERE notes_fts MATCH ?' const sql = 'SELECT id, title, offsets(notes_fts) AS offsets FROM notes_fts WHERE notes_fts MATCH ?'
const rows = await this.db().selectAll(sql, [query]); const rows = await this.db().selectAll(sql, [query]);
this.orderResults_(rows, parsedQuery); this.orderResults_(rows, parsedQuery);
return rows; return rows;
} }
}
static runInBackground() {
if (this.isRunningInBackground_) return;
this.isRunningInBackground_ = true;
this.instance().syncTables();
setTimeout(() => {
this.instance().syncTables();
}, 1000 * 60 * 5);
}
} }

View File

@ -1,6 +1,4 @@
function removeDiacritics(str) { const defaultDiacriticsRemovalMap = [
var defaultDiacriticsRemovalMap = [
{'base':'A', 'letters':/[\u0041\u24B6\uFF21\u00C0\u00C1\u00C2\u1EA6\u1EA4\u1EAA\u1EA8\u00C3\u0100\u0102\u1EB0\u1EAE\u1EB4\u1EB2\u0226\u01E0\u00C4\u01DE\u1EA2\u00C5\u01FA\u01CD\u0200\u0202\u1EA0\u1EAC\u1EB6\u1E00\u0104\u023A\u2C6F]/g}, {'base':'A', 'letters':/[\u0041\u24B6\uFF21\u00C0\u00C1\u00C2\u1EA6\u1EA4\u1EAA\u1EA8\u00C3\u0100\u0102\u1EB0\u1EAE\u1EB4\u1EB2\u0226\u01E0\u00C4\u01DE\u1EA2\u00C5\u01FA\u01CD\u0200\u0202\u1EA0\u1EAC\u1EB6\u1E00\u0104\u023A\u2C6F]/g},
{'base':'AA','letters':/[\uA732]/g}, {'base':'AA','letters':/[\uA732]/g},
{'base':'AE','letters':/[\u00C6\u01FC\u01E2]/g}, {'base':'AE','letters':/[\u00C6\u01FC\u01E2]/g},
@ -87,6 +85,7 @@ function removeDiacritics(str) {
{'base':'z','letters':/[\u007A\u24E9\uFF5A\u017A\u1E91\u017C\u017E\u1E93\u1E95\u01B6\u0225\u0240\u2C6C\uA763]/g} {'base':'z','letters':/[\u007A\u24E9\uFF5A\u017A\u1E91\u017C\u017E\u1E93\u1E95\u01B6\u0225\u0240\u2C6C\uA763]/g}
]; ];
function removeDiacritics(str) {
for(var i=0; i<defaultDiacriticsRemovalMap.length; i++) { for(var i=0; i<defaultDiacriticsRemovalMap.length; i++) {
str = str.replace(defaultDiacriticsRemovalMap[i].letters, defaultDiacriticsRemovalMap[i].base); str = str.replace(defaultDiacriticsRemovalMap[i].letters, defaultDiacriticsRemovalMap[i].base);
} }
@ -243,4 +242,15 @@ function surroundKeywords(keywords, text, prefix, suffix) {
return text.replace(re, prefix + '$1' + suffix); return text.replace(re, prefix + '$1' + suffix);
} }
module.exports = { removeDiacritics, escapeFilename, wrap, splitCommandString, padLeft, toTitleCase, urlDecode, escapeHtml, pregQuote, surroundKeywords }; const REGEX_JAPANESE = /[\u3000-\u303f]|[\u3040-\u309f]|[\u30a0-\u30ff]|[\uff00-\uff9f]|[\u4e00-\u9faf]|[\u3400-\u4dbf]/;
const REGEX_CHINESE = /[\u4e00-\u9fff]|[\u3400-\u4dbf]|[\u{20000}-\u{2a6df}]|[\u{2a700}-\u{2b73f}]|[\u{2b740}-\u{2b81f}]|[\u{2b820}-\u{2ceaf}]|[\uf900-\ufaff]|[\u3300-\u33ff]|[\ufe30-\ufe4f]|[\uf900-\ufaff]|[\u{2f800}-\u{2fa1f}]/u;
const REGEX_KOREAN = /[\uac00-\ud7af]|[\u1100-\u11ff]|[\u3130-\u318f]|[\ua960-\ua97f]|[\ud7b0-\ud7ff]/;
function scriptType(s) {
if (REGEX_JAPANESE.test(s)) return 'ja';
if (REGEX_CHINESE.test(s)) return 'zh';
if (REGEX_KOREAN.test(s)) return 'ko';
return 'en';
}
module.exports = { removeDiacritics, escapeFilename, wrap, splitCommandString, padLeft, toTitleCase, urlDecode, escapeHtml, pregQuote, surroundKeywords, scriptType };

View File

@ -2631,6 +2631,11 @@
"resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-2.1.0.tgz", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-2.1.0.tgz",
"integrity": "sha1-9B8cEL5LAOh7XxPaaAdZ8sW/0+I=" "integrity": "sha1-9B8cEL5LAOh7XxPaaAdZ8sW/0+I="
}, },
"diacritics": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/diacritics/-/diacritics-1.3.0.tgz",
"integrity": "sha1-PvqHMj67hj5mls67AILUj/PW96E="
},
"diff": { "diff": {
"version": "3.4.0", "version": "3.4.0",
"resolved": "https://registry.npmjs.org/diff/-/diff-3.4.0.tgz", "resolved": "https://registry.npmjs.org/diff/-/diff-3.4.0.tgz",

View File

@ -13,6 +13,7 @@
"async-mutex": "^0.1.3", "async-mutex": "^0.1.3",
"base-64": "^0.1.0", "base-64": "^0.1.0",
"buffer": "^5.0.8", "buffer": "^5.0.8",
"diacritics": "^1.3.0",
"events": "^1.1.1", "events": "^1.1.1",
"form-data": "^2.1.4", "form-data": "^2.1.4",
"html-entities": "^1.2.1", "html-entities": "^1.2.1",