1
0
mirror of https://github.com/laurent22/joplin.git synced 2024-12-24 10:27:10 +02:00

All: Resolves #1877: Add search filters (#3213)

This commit is contained in:
Naveen M V 2020-08-08 04:43:21 +05:30 committed by GitHub
parent 3253146dae
commit f99f3f8a6d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 1715 additions and 88 deletions

View File

@ -164,6 +164,8 @@ ReactNativeClient/lib/services/ResourceEditWatcher/index.js
ReactNativeClient/lib/services/ResourceEditWatcher/reducer.js
ReactNativeClient/lib/services/rest/actionApi.desktop.js
ReactNativeClient/lib/services/rest/errors.js
ReactNativeClient/lib/services/searchengine/filterParser.js
ReactNativeClient/lib/services/searchengine/queryBuilder.js
ReactNativeClient/lib/services/SettingUtils.js
ReactNativeClient/lib/services/synchronizer/gui/useSyncTargetUpgrade.js
ReactNativeClient/lib/services/synchronizer/LockHandler.js

2
.gitignore vendored
View File

@ -155,6 +155,8 @@ ReactNativeClient/lib/services/ResourceEditWatcher/index.js
ReactNativeClient/lib/services/ResourceEditWatcher/reducer.js
ReactNativeClient/lib/services/rest/actionApi.desktop.js
ReactNativeClient/lib/services/rest/errors.js
ReactNativeClient/lib/services/searchengine/filterParser.js
ReactNativeClient/lib/services/searchengine/queryBuilder.js
ReactNativeClient/lib/services/SettingUtils.js
ReactNativeClient/lib/services/synchronizer/gui/useSyncTargetUpgrade.js
ReactNativeClient/lib/services/synchronizer/LockHandler.js

View File

@ -0,0 +1,155 @@
/* eslint-disable no-unused-vars */
require('app-module-path').addPath(__dirname);
const filterParser = require('lib/services/searchengine/filterParser.js').default;
// import filterParser from 'lib/services/searchengine/filterParser.js';
const makeTerm = (name, value, negated) => { return { name, value, negated }; };
describe('filterParser should be correct filter for keyword', () => {
it('title', () => {
const searchString = 'title: something';
expect(filterParser(searchString)).toContain(makeTerm('title', 'something', false));
});
it('negated title', () => {
const searchString = '-title: something';
expect(filterParser(searchString)).toContain(makeTerm('title', 'something', true));
});
it('body', () => {
const searchString = 'body:something';
expect(filterParser(searchString)).toContain(makeTerm('body', 'something', false));
});
it('negated body', () => {
const searchString = '-body:something';
expect(filterParser(searchString)).toContain(makeTerm('body', 'something', true));
});
it('title and body', () => {
const searchString = 'title:testTitle body:testBody';
expect(filterParser(searchString)).toContain(makeTerm('title', 'testTitle', false));
expect(filterParser(searchString)).toContain(makeTerm('body', 'testBody', false));
});
it('title with multiple words', () => {
const searchString = 'title:"word1 word2" body:testBody';
expect(filterParser(searchString)).toContain(makeTerm('title', 'word1', false));
expect(filterParser(searchString)).toContain(makeTerm('title', 'word2', false));
expect(filterParser(searchString)).toContain(makeTerm('body', 'testBody', false));
});
it('body with multiple words', () => {
const searchString = 'title:testTitle body:"word1 word2"';
expect(filterParser(searchString)).toContain(makeTerm('title', 'testTitle', false));
expect(filterParser(searchString)).toContain(makeTerm('body', 'word1', false));
expect(filterParser(searchString)).toContain(makeTerm('body', 'word2', false));
});
it('single word text', () => {
const searchString = 'joplin';
expect(filterParser(searchString)).toContain(makeTerm('text', '"joplin"', false));
});
it('multi word text', () => {
const searchString = 'scott joplin';
expect(filterParser(searchString)).toContain(makeTerm('text', '"scott"', false));
expect(filterParser(searchString)).toContain(makeTerm('text', '"joplin"', false));
});
it('negated word text', () => {
const searchString = 'scott -joplin';
expect(filterParser(searchString)).toContain(makeTerm('text', '"scott"', false));
expect(filterParser(searchString)).toContain(makeTerm('text', '"joplin"', true));
});
it('phrase text search', () => {
const searchString = '"scott joplin"';
expect(filterParser(searchString)).toContain(makeTerm('text', '"scott joplin"', false));
});
it('multi word body', () => {
const searchString = 'body:"foo bar"';
expect(filterParser(searchString)).toContain(makeTerm('body', 'foo', false));
expect(filterParser(searchString)).toContain(makeTerm('body', 'bar', false));
});
it('negated tag queries', () => {
const searchString = '-tag:mozart';
expect(filterParser(searchString)).toContain(makeTerm('tag', 'mozart', true));
});
it('created after', () => {
const searchString = 'created:20151218'; // YYYYMMDD
expect(filterParser(searchString)).toContain(makeTerm('created', '20151218', false));
});
it('created before', () => {
const searchString = '-created:20151218'; // YYYYMMDD
expect(filterParser(searchString)).toContain(makeTerm('created', '20151218', true));
});
it('any', () => {
const searchString = 'any:1 tag:123';
expect(filterParser(searchString)).toContain(makeTerm('any', '1', false));
expect(filterParser(searchString)).toContain(makeTerm('tag', '123', false));
});
it('wildcard tags', () => {
let searchString = 'tag:*';
expect(filterParser(searchString)).toContain(makeTerm('tag', '%', false));
searchString = '-tag:*';
expect(filterParser(searchString)).toContain(makeTerm('tag', '%', true));
searchString = 'tag:bl*sphemy';
expect(filterParser(searchString)).toContain(makeTerm('tag', 'bl%sphemy', false));
});
it('wildcard notebooks', () => {
const searchString = 'notebook:my*notebook';
expect(filterParser(searchString)).toContain(makeTerm('notebook', 'my%notebook', false));
});
it('wildcard MIME types', () => {
const searchString = 'resource:image/*';
expect(filterParser(searchString)).toContain(makeTerm('resource', 'image/%', false));
});
it('sourceurl', () => {
let searchString = 'sourceurl:https://www.google.com';
expect(filterParser(searchString)).toContain(makeTerm('sourceurl', 'https://www.google.com', false));
searchString = 'sourceurl:https://www.google.com -sourceurl:https://www.facebook.com';
expect(filterParser(searchString)).toContain(makeTerm('sourceurl', 'https://www.google.com', false));
expect(filterParser(searchString)).toContain(makeTerm('sourceurl', 'https://www.facebook.com', true));
});
it('handle invalid filters', () => {
let searchString = 'titletitle:123';
expect(() => filterParser(searchString)).toThrow(new Error('Invalid filter: titletitle'));
searchString = 'invalid:abc';
expect(() => filterParser(searchString)).toThrow(new Error('Invalid filter: invalid'));
searchString = ':abc';
expect(() => filterParser(searchString)).toThrow(new Error('Invalid filter: '));
searchString = 'type:blah';
expect(() => filterParser(searchString)).toThrow(new Error('The value of filter "type" must be "note" or "todo"'));
searchString = '-type:note';
expect(() => filterParser(searchString)).toThrow(new Error('type can\'t be negated'));
searchString = 'iscompleted:blah';
expect(() => filterParser(searchString)).toThrow(new Error('The value of filter "iscompleted" must be "1" or "0"'));
searchString = '-notebook:n1';
expect(() => filterParser(searchString)).toThrow(new Error('notebook can\'t be negated'));
searchString = '-iscompleted:1';
expect(() => filterParser(searchString)).toThrow(new Error('iscompleted can\'t be negated'));
});
});

View File

@ -4,7 +4,7 @@ require('app-module-path').addPath(__dirname);
const { time } = require('lib/time-utils.js');
const { asyncTest, fileContentEqual, revisionService, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const ResourceService = require('lib/services/ResourceService');
const ItemChangeUtils = require('lib/services/ItemChangeUtils');
const Note = require('lib/models/Note');

View File

@ -17,7 +17,7 @@ const fs = require('fs-extra');
const ArrayUtils = require('lib/ArrayUtils');
const ObjectUtils = require('lib/ObjectUtils');
const { shim } = require('lib/shim.js');
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);

View File

@ -5,7 +5,7 @@ require('app-module-path').addPath(__dirname);
const { time } = require('lib/time-utils.js');
const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, asyncTest, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const Note = require('lib/models/Note');
const ItemChange = require('lib/models/ItemChange');
const Setting = require('lib/models/Setting');
@ -389,5 +389,4 @@ describe('services_SearchEngine', function() {
expect((await engine.search('"- [ ]"', { searchType: SearchEngine.SEARCH_TYPE_BASIC })).length).toBe(1);
expect((await engine.search('"[ ]"', { searchType: SearchEngine.SEARCH_TYPE_BASIC })).length).toBe(2);
}));
});

View File

@ -0,0 +1,752 @@
/* eslint-disable no-unused-vars */
/* eslint prefer-const: 0*/
require('app-module-path').addPath(__dirname);
const { time } = require('lib/time-utils.js');
const { fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, asyncTest, db, synchronizer, fileApi, sleep, createNTestNotes, switchClient, createNTestFolders } = require('test-utils.js');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const Note = require('lib/models/Note');
const Folder = require('lib/models/Folder');
const Tag = require('lib/models/Tag');
const ItemChange = require('lib/models/ItemChange');
const Setting = require('lib/models/Setting');
const Resource = require('lib/models/Resource.js');
const { shim } = require('lib/shim');
const ResourceService = require('lib/services/ResourceService.js');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
let engine = null;
const ids = (array) => array.map(a => a.id);
// For pretty printing.
// See https://stackoverflow.com/questions/23676459/karma-jasmine-pretty-printing-object-comparison/26324116
// jasmine.pp = function(obj) {
// return JSON.stringify(obj, undefined, 2);
// };
describe('services_SearchFilter', function() {
beforeEach(async (done) => {
await setupDatabaseAndSynchronizer(1);
await switchClient(1);
engine = new SearchEngine();
engine.setDb(db());
done();
});
it('should return note matching title', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abcd', body: 'body 1' });
const n2 = await Note.save({ title: 'efgh', body: 'body 2' });
await engine.syncTables();
rows = await engine.search('title: abcd');
expect(rows.length).toBe(1);
expect(rows[0].id).toBe(n1.id);
}));
it('should return note matching negated title', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abcd', body: 'body 1' });
const n2 = await Note.save({ title: 'efgh', body: 'body 2' });
await engine.syncTables();
rows = await engine.search('-title: abcd');
expect(rows.length).toBe(1);
expect(rows[0].id).toBe(n2.id);
}));
it('should return note matching body', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abcd', body: 'body1' });
const n2 = await Note.save({ title: 'efgh', body: 'body2' });
await engine.syncTables();
rows = await engine.search('body: body1');
expect(rows.length).toBe(1);
expect(rows[0].id).toBe(n1.id);
}));
it('should return note matching negated body', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abcd', body: 'body1' });
const n2 = await Note.save({ title: 'efgh', body: 'body2' });
await engine.syncTables();
rows = await engine.search('-body: body1');
expect(rows.length).toBe(1);
expect(rows[0].id).toBe(n2.id);
}));
it('should return note matching title containing multiple words', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abcd xyz', body: 'body1' });
const n2 = await Note.save({ title: 'efgh ijk', body: 'body2' });
await engine.syncTables();
rows = await engine.search('title: "abcd xyz"');
expect(rows.length).toBe(1);
expect(rows[0].id).toBe(n1.id);
}));
it('should return note matching body containing multiple words', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abcd', body: 'ho ho ho' });
const n2 = await Note.save({ title: 'efgh', body: 'foo bar' });
await engine.syncTables();
rows = await engine.search('body: "foo bar"');
expect(rows.length).toBe(1);
expect(rows[0].id).toBe(n2.id);
}));
it('should return note matching title AND body', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abcd', body: 'ho ho ho' });
const n2 = await Note.save({ title: 'efgh', body: 'foo bar' });
await engine.syncTables();
rows = await engine.search('title: efgh body: "foo bar"');
expect(rows.length).toBe(1);
expect(rows[0].id).toBe(n2.id);
rows = await engine.search('title: abcd body: "foo bar"');
expect(rows.length).toBe(0);
}));
it('should return note matching title OR body', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abcd', body: 'ho ho ho' });
const n2 = await Note.save({ title: 'efgh', body: 'foo bar' });
await engine.syncTables();
rows = await engine.search('any:1 title: abcd body: "foo bar"');
expect(rows.length).toBe(2);
expect(rows.map(r=>r.id)).toContain(n1.id);
expect(rows.map(r=>r.id)).toContain(n2.id);
rows = await engine.search('any:1 title: wxyz body: "blah blah"');
expect(rows.length).toBe(0);
}));
it('should return notes matching text', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'foo beef', body: 'dead bar' });
const n2 = await Note.save({ title: 'bar efgh', body: 'foo dog' });
const n3 = await Note.save({ title: 'foo ho', body: 'ho ho ho' });
await engine.syncTables();
// Interpretation: Match with notes containing foo in title/body and bar in title/body
// Note: This is NOT saying to match notes containing foo bar in title/body
rows = await engine.search('foo bar');
expect(rows.length).toBe(2);
expect(rows.map(r=>r.id)).toContain(n1.id);
expect(rows.map(r=>r.id)).toContain(n2.id);
rows = await engine.search('foo -bar');
expect(rows.length).toBe(1);
expect(rows.map(r=>r.id)).toContain(n3.id);
rows = await engine.search('foo efgh');
expect(rows.length).toBe(1);
expect(rows[0].id).toBe(n2.id);
rows = await engine.search('zebra');
expect(rows.length).toBe(0);
}));
it('should return notes matching any negated text', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abc', body: 'def' });
const n2 = await Note.save({ title: 'def', body: 'ghi' });
const n3 = await Note.save({ title: 'ghi', body: 'jkl' });
await engine.syncTables();
rows = await engine.search('any:1 -abc -ghi');
expect(rows.length).toBe(3);
expect(rows.map(r=>r.id)).toContain(n1.id);
expect(rows.map(r=>r.id)).toContain(n2.id);
expect(rows.map(r=>r.id)).toContain(n3.id);
}));
it('should return notes matching any negated title', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abc', body: 'def' });
const n2 = await Note.save({ title: 'def', body: 'ghi' });
const n3 = await Note.save({ title: 'ghi', body: 'jkl' });
await engine.syncTables();
rows = await engine.search('any:1 -title:abc -title:ghi');
expect(rows.length).toBe(3);
expect(rows.map(r=>r.id)).toContain(n1.id);
expect(rows.map(r=>r.id)).toContain(n2.id);
expect(rows.map(r=>r.id)).toContain(n3.id);
}));
it('should return notes matching any negated body', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'abc', body: 'def' });
const n2 = await Note.save({ title: 'def', body: 'ghi' });
const n3 = await Note.save({ title: 'ghi', body: 'jkl' });
await engine.syncTables();
rows = await engine.search('any:1 -body:xyz -body:ghi');
expect(rows.length).toBe(3);
expect(rows.map(r=>r.id)).toContain(n1.id);
expect(rows.map(r=>r.id)).toContain(n2.id);
expect(rows.map(r=>r.id)).toContain(n3.id);
}));
it('should support phrase search', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'foo beef', body: 'bar dog' });
const n2 = await Note.save({ title: 'bar efgh', body: 'foo dog' });
await engine.syncTables();
rows = await engine.search('"bar dog"');
expect(rows.length).toBe(1);
expect(rows[0].id).toBe(n1.id);
}));
it('should support prefix search', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'foo beef', body: 'bar dog' });
const n2 = await Note.save({ title: 'bar efgh', body: 'foo dog' });
await engine.syncTables();
rows = await engine.search('"bar*"');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
}));
it('should support filtering by tags', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'But I would', body: 'walk 500 miles' });
const n2 = await Note.save({ title: 'And I would', body: 'walk 500 more' });
const n3 = await Note.save({ title: 'Just to be', body: 'the man who' });
const n4 = await Note.save({ title: 'walked a thousand', body: 'miles to fall' });
const n5 = await Note.save({ title: 'down at your', body: 'door' });
await Tag.setNoteTagsByTitles(n1.id, ['Da', 'da', 'lat', 'da']);
await Tag.setNoteTagsByTitles(n2.id, ['Da', 'da', 'lat', 'da']);
await engine.syncTables();
rows = await engine.search('tag:*');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('-tag:*');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n3.id);
expect(ids(rows)).toContain(n4.id);
expect(ids(rows)).toContain(n5.id);
}));
it('should support filtering by tags', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'peace talks', body: 'battle ground' });
const n2 = await Note.save({ title: 'mouse', body: 'mister' });
const n3 = await Note.save({ title: 'dresden files', body: 'harry dresden' });
await Tag.setNoteTagsByTitles(n1.id, ['tag1', 'tag2']);
await Tag.setNoteTagsByTitles(n2.id, ['tag2', 'tag3']);
await Tag.setNoteTagsByTitles(n3.id, ['tag3', 'tag4']);
await engine.syncTables();
rows = await engine.search('tag:tag2');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('tag:tag2 tag:tag3');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('any:1 tag:tag1 tag:tag2 tag:tag3');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('tag:tag2 tag:tag3 tag:tag4');
expect(rows.length).toBe(0);
rows = await engine.search('-tag:tag2');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('-tag:tag2 -tag:tag3');
expect(rows.length).toBe(0);
rows = await engine.search('-tag:tag2 -tag:tag3');
expect(rows.length).toBe(0);
rows = await engine.search('any:1 -tag:tag2 -tag:tag3');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n3.id);
}));
it('should support filtering by notebook', asyncTest(async () => {
let rows;
const folder0 = await Folder.save({ title: 'notebook0' });
const folder1 = await Folder.save({ title: 'notebook1' });
const notes0 = await createNTestNotes(5, folder0);
const notes1 = await createNTestNotes(5, folder1);
await engine.syncTables();
rows = await engine.search('notebook:notebook0');
expect(rows.length).toBe(5);
expect(ids(rows).sort()).toEqual(ids(notes0).sort());
}));
it('should support filtering by nested notebook', asyncTest(async () => {
let rows;
const folder0 = await Folder.save({ title: 'notebook0' });
const folder00 = await Folder.save({ title: 'notebook00', parent_id: folder0.id });
const folder1 = await Folder.save({ title: 'notebook1' });
const notes0 = await createNTestNotes(5, folder0);
const notes00 = await createNTestNotes(5, folder00);
const notes1 = await createNTestNotes(5, folder1);
await engine.syncTables();
rows = await engine.search('notebook:notebook0');
expect(rows.length).toBe(10);
expect(ids(rows).sort()).toEqual(ids(notes0.concat(notes00)).sort());
}));
it('should support filtering by multiple notebooks', asyncTest(async () => {
let rows;
const folder0 = await Folder.save({ title: 'notebook0' });
const folder00 = await Folder.save({ title: 'notebook00', parent_id: folder0.id });
const folder1 = await Folder.save({ title: 'notebook1' });
const folder2 = await Folder.save({ title: 'notebook2' });
const notes0 = await createNTestNotes(5, folder0);
const notes00 = await createNTestNotes(5, folder00);
const notes1 = await createNTestNotes(5, folder1);
const notes2 = await createNTestNotes(5, folder2);
await engine.syncTables();
rows = await engine.search('notebook:notebook0 notebook:notebook1');
expect(rows.length).toBe(15);
expect(ids(rows).sort()).toEqual(ids(notes0).concat(ids(notes00).concat(ids(notes1))).sort());
}));
it('should support filtering by created date', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'I made this on', body: 'May 20 2020', user_created_time: Date.parse('2020-05-20') });
const n2 = await Note.save({ title: 'I made this on', body: 'May 19 2020', user_created_time: Date.parse('2020-05-19') });
const n3 = await Note.save({ title: 'I made this on', body: 'May 18 2020', user_created_time: Date.parse('2020-05-18') });
await engine.syncTables();
rows = await engine.search('created:20200520');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('created:20200519');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('-created:20200519');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n3.id);
}));
it('should support filtering by between two dates', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'January 01 2020', body: 'January 01 2020', user_created_time: Date.parse('2020-01-01') });
const n2 = await Note.save({ title: 'February 15 2020', body: 'February 15 2020', user_created_time: Date.parse('2020-02-15') });
const n3 = await Note.save({ title: 'March 25 2019', body: 'March 25 2019', user_created_time: Date.parse('2019-03-25') });
const n4 = await Note.save({ title: 'March 01 2018', body: 'March 01 2018', user_created_time: Date.parse('2018-03-01') });
await engine.syncTables();
rows = await engine.search('created:20200101 -created:20200220');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('created:201901 -created:202002');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n3.id);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('created:2018 -created:2019');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n4.id);
}));
it('should support filtering by created with smart value: day', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'I made this', body: 'today', user_created_time: parseInt(time.goBackInTime(Date.now(), 0, 'day'), 10) });
const n2 = await Note.save({ title: 'I made this', body: 'yesterday', user_created_time: parseInt(time.goBackInTime(Date.now(), 1, 'day'), 10) });
const n3 = await Note.save({ title: 'I made this', body: 'day before yesterday', user_created_time: parseInt(time.goBackInTime(Date.now(), 2, 'day'), 10) });
await engine.syncTables();
rows = await engine.search('created:day-0');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('created:day-1');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('created:day-2');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
}));
it('should support filtering by created with smart value: week', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'I made this', body: 'this week', user_created_time: parseInt(time.goBackInTime(Date.now(), 0, 'week'), 10) });
const n2 = await Note.save({ title: 'I made this', body: 'the week before', user_created_time: parseInt(time.goBackInTime(Date.now(), 1, 'week'), 10) });
const n3 = await Note.save({ title: 'I made this', body: 'before before week', user_created_time: parseInt(time.goBackInTime(Date.now(), 2, 'week'), 10) });
await engine.syncTables();
rows = await engine.search('created:week-0');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('created:week-1');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('created:week-2');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
}));
it('should support filtering by created with smart value: month', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'I made this', body: 'this month', user_created_time: parseInt(time.goBackInTime(Date.now(), 0, 'month'), 10) });
const n2 = await Note.save({ title: 'I made this', body: 'the month before', user_created_time: parseInt(time.goBackInTime(Date.now(), 1, 'month'), 10) });
const n3 = await Note.save({ title: 'I made this', body: 'before before month', user_created_time: parseInt(time.goBackInTime(Date.now(), 2, 'month'), 10) });
await engine.syncTables();
rows = await engine.search('created:month-0');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('created:month-1');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('created:month-2');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
}));
it('should support filtering by created with smart value: year', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'I made this', body: 'this year', user_created_time: parseInt(time.goBackInTime(Date.now(), 0, 'year'), 10) });
const n2 = await Note.save({ title: 'I made this', body: 'the year before', user_created_time: parseInt(time.goBackInTime(Date.now(), 1, 'year'), 10) });
const n3 = await Note.save({ title: 'I made this', body: 'before before year', user_created_time: parseInt(time.goBackInTime(Date.now(), 2, 'year'), 10) });
await engine.syncTables();
rows = await engine.search('created:year-0');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('created:year-1');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('created:year-2');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
}));
it('should support filtering by updated date', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'I updated this on', body: 'May 20 2020', updated_time: Date.parse('2020-05-20'), user_updated_time: Date.parse('2020-05-20') }, { autoTimestamp: false });
const n2 = await Note.save({ title: 'I updated this on', body: 'May 19 2020', updated_time: Date.parse('2020-05-19'), user_updated_time: Date.parse('2020-05-19') }, { autoTimestamp: false });
await engine.syncTables();
rows = await engine.search('updated:20200520');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('updated:20200519');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
}));
it('should support filtering by updated with smart value: day', asyncTest(async () => {
let rows;
const today = parseInt(time.goBackInTime(Date.now(), 0, 'day'), 10);
const yesterday = parseInt(time.goBackInTime(Date.now(), 1, 'day'), 10);
const dayBeforeYesterday = parseInt(time.goBackInTime(Date.now(), 2, 'day'), 10);
const n1 = await Note.save({ title: 'I made this', body: 'today', updated_time: today, user_updated_time: today }, { autoTimestamp: false });
const n11 = await Note.save({ title: 'I also made this', body: 'today', updated_time: today, user_updated_time: today }, { autoTimestamp: false });
const n2 = await Note.save({ title: 'I made this', body: 'yesterday', updated_time: yesterday, user_updated_time: yesterday }, { autoTimestamp: false });
const n3 = await Note.save({ title: 'I made this', body: 'day before yesterday', updated_time: dayBeforeYesterday ,user_updated_time: dayBeforeYesterday }, { autoTimestamp: false });
await engine.syncTables();
rows = await engine.search('updated:day-0');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n11.id);
rows = await engine.search('updated:day-1');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n11.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('updated:day-2');
expect(rows.length).toBe(4);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n11.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
}));
it('should support filtering by type todo', asyncTest(async () => {
let rows;
const t1 = await Note.save({ title: 'This is a ', body: 'todo', is_todo: 1 });
const t2 = await Note.save({ title: 'This is another', body: 'todo but completed', is_todo: 1, todo_completed: 1590085027710 });
const t3 = await Note.save({ title: 'This is NOT a ', body: 'todo' });
await engine.syncTables();
rows = await engine.search('type:todo');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(t1.id);
expect(ids(rows)).toContain(t2.id);
rows = await engine.search('any:1 type:todo');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(t1.id);
expect(ids(rows)).toContain(t2.id);
rows = await engine.search('iscompleted:1');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(t2.id);
rows = await engine.search('iscompleted:0');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(t1.id);
}));
it('should support filtering by type note', asyncTest(async () => {
let rows;
const t1 = await Note.save({ title: 'This is a ', body: 'todo', is_todo: 1 });
const t2 = await Note.save({ title: 'This is another', body: 'todo but completed', is_todo: 1, todo_completed: 1590085027710 });
const t3 = await Note.save({ title: 'This is NOT a ', body: 'todo' });
await engine.syncTables();
rows = await engine.search('type:note');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(t3.id);
}));
it('should support filtering by latitude, longitude, altitude', asyncTest(async () => {
let rows;
const n1 = await Note.save({ title: 'I made this', body: 'this week', latitude: 12.97, longitude: 88.88, altitude: 69.96 });
const n2 = await Note.save({ title: 'I made this', body: 'the week before', latitude: 42.11, longitude: 77.77, altitude: 42.00 });
const n3 = await Note.save({ title: 'I made this', body: 'before before week', latitude: 82.01, longitude: 66.66, altitude: 13.13 });
await engine.syncTables();
rows = await engine.search('latitude:13.5');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('-latitude:40');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('latitude:13 -latitude:80');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('altitude:13.5');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('-altitude:80.12');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('longitude:70 -longitude:80');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('latitude:20 longitude:50 altitude:40');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('any:1 latitude:20 longitude:50 altitude:40');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
}));
it('should support filtering by resource MIME type', asyncTest(async () => {
let rows;
const service = new ResourceService();
// console.log(testImagePath)
const folder1 = await Folder.save({ title: 'folder1' });
let n1 = await Note.save({ title: 'I have a picture', body: 'Im awesome', parent_id: folder1.id });
const n2 = await Note.save({ title: 'Boring note 1', body: 'I just have text', parent_id: folder1.id });
const n3 = await Note.save({ title: 'Boring note 2', body: 'me too', parent_id: folder1.id });
let n4 = await Note.save({ title: 'A picture?', body: 'pfff, I have a pdf', parent_id: folder1.id });
await engine.syncTables();
// let note1 = await Note.save({ title: 'ma note', parent_id: folder1.id });
n1 = await shim.attachFileToNote(n1, `${__dirname}/../tests/support/photo.jpg`);
// const resource1 = (await Resource.all())[0];
n4 = await shim.attachFileToNote(n4, `${__dirname}/../tests/support/welcome.pdf`);
await service.indexNoteResources();
rows = await engine.search('resource:image/jpeg');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('resource:image/*');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('resource:application/pdf');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n4.id);
rows = await engine.search('-resource:image/jpeg');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
expect(ids(rows)).toContain(n4.id);
rows = await engine.search('any:1 resource:application/pdf resource:image/jpeg');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n4.id);
}));
it('should ignore dashes in a word', asyncTest(async () => {
const n0 = await Note.save({ title: 'doesnotwork' });
const n1 = await Note.save({ title: 'does not work' });
const n2 = await Note.save({ title: 'does-not-work' });
const n3 = await Note.save({ title: 'does_not_work' });
await engine.syncTables();
let rows = await engine.search('does-not-work');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('does not work');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('"does not work"');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('title:does-not-work');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('doesnotwork');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n0.id);
}));
it('should support filtering by sourceurl', asyncTest(async () => {
const n0 = await Note.save({ title: 'n0', source_url: 'https://discourse.joplinapp.org' });
const n1 = await Note.save({ title: 'n1', source_url: 'https://google.com' });
const n2 = await Note.save({ title: 'n2', source_url: 'https://reddit.com' });
const n3 = await Note.save({ title: 'n3', source_url: 'https://joplinapp.org' });
await engine.syncTables();
let rows = await engine.search('sourceurl:https://joplinapp.org');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('sourceurl:https://google.com');
expect(rows.length).toBe(1);
expect(ids(rows)).toContain(n1.id);
rows = await engine.search('any:1 sourceurl:https://google.com sourceurl:https://reddit.com');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n1.id);
expect(ids(rows)).toContain(n2.id);
rows = await engine.search('-sourceurl:https://google.com');
expect(rows.length).toBe(3);
expect(ids(rows)).toContain(n0.id);
expect(ids(rows)).toContain(n2.id);
expect(ids(rows)).toContain(n3.id);
rows = await engine.search('sourceurl:*joplinapp.org');
expect(rows.length).toBe(2);
expect(ids(rows)).toContain(n0.id);
expect(ids(rows)).toContain(n3.id);
}));
});

Binary file not shown.

View File

@ -0,0 +1,66 @@
/* eslint-disable no-unused-vars */
require('app-module-path').addPath(__dirname);
const { time } = require('lib/time-utils.js');
const { asyncTest, fileContentEqual, setupDatabase, setupDatabaseAndSynchronizer, db, synchronizer, fileApi, sleep, clearDatabase, switchClient, syncTargetId, objectsEqual, checkThrowAsync } = require('test-utils.js');
const timeUtils = require('../../ReactNativeClient/lib/time-utils');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
describe('timeUtils', function() {
beforeEach(async (done) => {
done();
});
it('should go back in time', asyncTest(async () => {
let startDate = new Date('3 Aug 2020');
let endDate = new Date('2 Aug 2020');
expect(time.goBackInTime(startDate, 1, 'day')).toBe(endDate.getTime().toString());
// We're always subtracting time from the beginning of the current period.
startDate = new Date('3 Aug 2020 07:30:20');
expect(time.goBackInTime(startDate, 1, 'day')).toBe(endDate.getTime().toString());
startDate = new Date('11 Aug 2020');
endDate = new Date('9 Aug 2020'); // week start;
expect(time.goBackInTime(startDate, 0, 'week')).toBe(endDate.getTime().toString());
startDate = new Date('02 Feb 2020');
endDate = new Date('01 Jan 2020');
expect(time.goBackInTime(startDate, 1, 'month')).toBe(endDate.getTime().toString());
startDate = new Date('19 September 2020');
endDate = new Date('01 Jan 1997');
expect(time.goBackInTime(startDate, 23, 'year')).toBe(endDate.getTime().toString());
}));
it('should go forward in time', asyncTest(async () => {
let startDate = new Date('2 Aug 2020');
let endDate = new Date('3 Aug 2020');
expect(time.goForwardInTime(startDate, 1, 'day')).toBe(endDate.getTime().toString());
startDate = new Date('2 Aug 2020 07:30:20');
expect(time.goForwardInTime(startDate, 1, 'day')).toBe(endDate.getTime().toString());
startDate = new Date('9 Aug 2020');
endDate = new Date('9 Aug 2020'); // week start;
expect(time.goForwardInTime(startDate, 0, 'week')).toBe(endDate.getTime().toString());
startDate = new Date('02 Jan 2020');
endDate = new Date('01 Feb 2020');
expect(time.goForwardInTime(startDate, 1, 'month')).toBe(endDate.getTime().toString());
startDate = new Date('19 September 1997');
endDate = new Date('01 Jan 2020');
expect(time.goForwardInTime(startDate, 23, 'year')).toBe(endDate.getTime().toString());
}));
});

View File

@ -1,7 +1,7 @@
import { useMemo } from 'react';
const BaseModel = require('lib/BaseModel.js');
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
interface SearchMarkersOptions {
searchTimestamp: number,

View File

@ -7,7 +7,7 @@ const BaseModel = require('lib/BaseModel');
const { _ } = require('lib/locale.js');
const { bridge } = require('electron').remote.require('./bridge');
const eventManager = require('lib/eventManager');
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const Note = require('lib/models/Note');
const Setting = require('lib/models/Setting');
const NoteListUtils = require('../utils/NoteListUtils');

View File

@ -2,8 +2,8 @@ const React = require('react');
const { connect } = require('react-redux');
const { _ } = require('lib/locale.js');
const { themeStyle } = require('lib/theme');
const SearchEngine = require('lib/services/SearchEngine');
const CommandService = require('lib/services/CommandService').default;
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const BaseModel = require('lib/BaseModel');
const Tag = require('lib/models/Tag');
const Folder = require('lib/models/Folder');

View File

@ -32,12 +32,12 @@ const SyncTargetDropbox = require('lib/SyncTargetDropbox.js');
const SyncTargetAmazonS3 = require('lib/SyncTargetAmazonS3.js');
const EncryptionService = require('lib/services/EncryptionService');
const ResourceFetcher = require('lib/services/ResourceFetcher');
const SearchEngineUtils = require('lib/services/SearchEngineUtils');
const SearchEngineUtils = require('lib/services/searchengine/SearchEngineUtils');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const RevisionService = require('lib/services/RevisionService');
const ResourceService = require('lib/services/RevisionService');
const DecryptionWorker = require('lib/services/DecryptionWorker');
const BaseService = require('lib/services/BaseService');
const SearchEngine = require('lib/services/SearchEngine');
const { loadKeychainServiceAndSettings } = require('lib/services/SettingUtils');
const KeychainServiceDriver = require('lib/services/keychain/KeychainServiceDriver.node').default;
const KvStore = require('lib/services/KvStore');

View File

@ -16,7 +16,7 @@ const VersionInfo = require('react-native-version-info').default;
const { ReportService } = require('lib/services/report.js');
const { time } = require('lib/time-utils');
const { shim } = require('lib/shim');
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const RNFS = require('react-native-fs');
const checkPermissions = require('lib/checkPermissions.js').default;

View File

@ -40,7 +40,7 @@ const ImagePicker = require('react-native-image-picker');
const { SelectDateTimeDialog } = require('lib/components/select-date-time-dialog.js');
const ShareExtension = require('lib/ShareExtension.js').default;
const CameraView = require('lib/components/CameraView');
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const urlUtils = require('lib/urlUtils');
class NoteScreenComponent extends BaseScreenComponent {

View File

@ -9,8 +9,8 @@ const Note = require('lib/models/Note.js');
const { NoteItem } = require('lib/components/note-item.js');
const { BaseScreenComponent } = require('lib/components/base-screen.js');
const { themeStyle } = require('lib/components/global-style.js');
const SearchEngineUtils = require('lib/services/SearchEngineUtils');
const DialogBox = require('react-native-dialogbox').default;
const SearchEngineUtils = require('lib/services/searchengine/SearchEngineUtils');
Icon.loadFont();

View File

@ -326,7 +326,7 @@ class JoplinDatabase extends Database {
// must be set in the synchronizer too.
// Note: v16 and v17 don't do anything. They were used to debug an issue.
const existingDatabaseVersions = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32];
const existingDatabaseVersions = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33];
let currentVersionIndex = existingDatabaseVersions.indexOf(fromVersion);
@ -757,13 +757,96 @@ class JoplinDatabase extends Database {
GROUP BY tags.id`);
}
if (targetVersion == 33) {
queries.push('DROP TRIGGER notes_fts_before_update');
queries.push('DROP TRIGGER notes_fts_before_delete');
queries.push('DROP TRIGGER notes_after_update');
queries.push('DROP TRIGGER notes_after_insert');
queries.push('DROP INDEX notes_normalized_id');
queries.push('DROP TABLE notes_normalized');
queries.push('DROP TABLE notes_fts');
const notesNormalized = `
CREATE TABLE notes_normalized (
id TEXT NOT NULL,
title TEXT NOT NULL DEFAULT "",
body TEXT NOT NULL DEFAULT "",
user_created_time INT NOT NULL DEFAULT 0,
user_updated_time INT NOT NULL DEFAULT 0,
is_todo INT NOT NULL DEFAULT 0,
todo_completed INT NOT NULL DEFAULT 0,
parent_id TEXT NOT NULL DEFAULT "",
latitude NUMERIC NOT NULL DEFAULT 0,
longitude NUMERIC NOT NULL DEFAULT 0,
altitude NUMERIC NOT NULL DEFAULT 0,
source_url TEXT NOT NULL DEFAULT ""
);
`;
queries.push(this.sqlStringToLines(notesNormalized)[0]);
queries.push('CREATE INDEX notes_normalized_id ON notes_normalized (id)');
queries.push('CREATE INDEX notes_normalized_user_created_time ON notes_normalized (user_created_time)');
queries.push('CREATE INDEX notes_normalized_user_updated_time ON notes_normalized (user_updated_time)');
queries.push('CREATE INDEX notes_normalized_is_todo ON notes_normalized (is_todo)');
queries.push('CREATE INDEX notes_normalized_todo_completed ON notes_normalized (todo_completed)');
queries.push('CREATE INDEX notes_normalized_parent_id ON notes_normalized (parent_id)');
queries.push('CREATE INDEX notes_normalized_latitude ON notes_normalized (latitude)');
queries.push('CREATE INDEX notes_normalized_longitude ON notes_normalized (longitude)');
queries.push('CREATE INDEX notes_normalized_altitude ON notes_normalized (altitude)');
queries.push('CREATE INDEX notes_normalized_source_url ON notes_normalized (source_url)');
const tableFields = 'id, title, body, user_created_time, user_updated_time, is_todo, todo_completed, parent_id, latitude, longitude, altitude, source_url';
const newVirtualTableSql = `
CREATE VIRTUAL TABLE notes_fts USING fts4(
content="notes_normalized",
notindexed="id",
notindexed="user_created_time",
notindexed="user_updated_time",
notindexed="is_todo",
notindexed="todo_completed",
notindexed="parent_id",
notindexed="latitude",
notindexed="longitude",
notindexed="altitude",
notindexed="source_url",
${tableFields}
);`
;
queries.push(this.sqlStringToLines(newVirtualTableSql)[0]);
queries.push(`
CREATE TRIGGER notes_fts_before_update BEFORE UPDATE ON notes_normalized BEGIN
DELETE FROM notes_fts WHERE docid=old.rowid;
END;`);
queries.push(`
CREATE TRIGGER notes_fts_before_delete BEFORE DELETE ON notes_normalized BEGIN
DELETE FROM notes_fts WHERE docid=old.rowid;
END;`);
queries.push(`
CREATE TRIGGER notes_after_update AFTER UPDATE ON notes_normalized BEGIN
INSERT INTO notes_fts(docid, ${tableFields}) SELECT rowid, ${tableFields} FROM notes_normalized WHERE new.rowid = notes_normalized.rowid;
END;`);
queries.push(`
CREATE TRIGGER notes_after_insert AFTER INSERT ON notes_normalized BEGIN
INSERT INTO notes_fts(docid, ${tableFields}) SELECT rowid, ${tableFields} FROM notes_normalized WHERE new.rowid = notes_normalized.rowid;
END;`);
queries.push(this.addMigrationFile(33));
}
queries.push({ sql: 'UPDATE version SET version = ?', params: [targetVersion] });
try {
await this.transactionExecBatch(queries);
} catch (error) {
if (targetVersion === 15 || targetVersion === 18) {
this.logger().warn('Could not upgrade to database v15 or v18 - FTS feature will not be used', error);
if (targetVersion === 15 || targetVersion === 18 || targetVersion === 33) {
this.logger().warn('Could not upgrade to database v15 or v18 or v33- FTS feature will not be used', error);
} else {
throw error;
}

View File

@ -0,0 +1,9 @@
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const script = {};
script.exec = async function() {
await SearchEngine.instance().rebuildIndex();
};
module.exports = script;

View File

@ -3,6 +3,7 @@ const BaseModel = require('lib/BaseModel.js');
const migrationScripts = {
20: require('lib/migrations/20.js'),
27: require('lib/migrations/27.js'),
33: require('lib/migrations/33.js'),
};
class Migration extends BaseModel {

View File

@ -4,7 +4,7 @@ const Note = require('lib/models/Note');
const Resource = require('lib/models/Resource');
const BaseModel = require('lib/BaseModel');
const BaseService = require('lib/services/BaseService');
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const Setting = require('lib/models/Setting');
const { shim } = require('lib/shim');
const ItemChangeUtils = require('lib/services/ItemChangeUtils');

View File

@ -19,7 +19,7 @@ const ArrayUtils = require('lib/ArrayUtils.js');
const { netUtils } = require('lib/net-utils');
const { fileExtension, safeFileExtension, safeFilename, filename } = require('lib/path-utils');
const ApiResponse = require('lib/services/rest/ApiResponse');
const SearchEngineUtils = require('lib/services/SearchEngineUtils');
const SearchEngineUtils = require('lib/services/searchengine/SearchEngineUtils');
const { FoldersScreenUtils } = require('lib/folders-screen-utils.js');
const uri2path = require('file-uri-to-path');
const { MarkupToHtml } = require('lib/joplin-renderer');

View File

@ -7,8 +7,11 @@ const ItemChangeUtils = require('lib/services/ItemChangeUtils');
const { pregQuote, scriptType } = require('lib/string-utils.js');
const removeDiacritics = require('diacritics').remove;
const { sprintf } = require('sprintf-js');
const filterParser = require('./filterParser').default;
const queryBuilder = require('./queryBuilder').default;
class SearchEngine {
constructor() {
this.dispatch = () => {};
this.logger_ = new Logger();
@ -62,13 +65,20 @@ class SearchEngine {
while (noteIds.length) {
const currentIds = noteIds.splice(0, 100);
const notes = await Note.modelSelectAll(`SELECT id, title, body FROM notes WHERE id IN ("${currentIds.join('","')}") AND is_conflict = 0 AND encryption_applied = 0`);
const notes = await Note.modelSelectAll(`
SELECT ${SearchEngine.relevantFields}
FROM notes
WHERE id IN ("${currentIds.join('","')}") AND is_conflict = 0 AND encryption_applied = 0`);
const queries = [];
for (let i = 0; i < notes.length; i++) {
const note = notes[i];
const n = this.normalizeNote_(note);
queries.push({ sql: 'INSERT INTO notes_normalized(id, title, body) VALUES (?, ?, ?)', params: [n.id, n.title, n.body] });
queries.push({ sql: `
INSERT INTO notes_normalized(${SearchEngine.relevantFields})
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
params: [n.id, n.title, n.body, n.user_created_time, n.user_updated_time, n.is_todo, n.todo_completed, n.parent_id, n.latitude, n.longitude, n.altitude, n.source_url] }
);
}
await this.db().transactionExecBatch(queries);
@ -138,7 +148,11 @@ class SearchEngine {
if (!changes.length) break;
const noteIds = changes.map(a => a.item_id);
const notes = await Note.modelSelectAll(`SELECT id, title, body FROM notes WHERE id IN ("${noteIds.join('","')}") AND is_conflict = 0 AND encryption_applied = 0`);
const notes = await Note.modelSelectAll(`
SELECT ${SearchEngine.relevantFields}
FROM notes WHERE id IN ("${noteIds.join('","')}") AND is_conflict = 0 AND encryption_applied = 0`
);
const queries = [];
for (let i = 0; i < changes.length; i++) {
@ -149,7 +163,10 @@ class SearchEngine {
const note = this.noteById_(notes, change.item_id);
if (note) {
const n = this.normalizeNote_(note);
queries.push({ sql: 'INSERT INTO notes_normalized(id, title, body) VALUES (?, ?, ?)', params: [change.item_id, n.title, n.body] });
queries.push({ sql: `
INSERT INTO notes_normalized(${SearchEngine.relevantFields})
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
params: [change.item_id, n.title, n.body, n.user_created_time, n.user_updated_time, n.is_todo, n.todo_completed, n.parent_id, n.latitude, n.longitude, n.altitude, n.source_url] });
report.inserted++;
}
} else if (change.type === ItemChange.TYPE_DELETE) {
@ -295,44 +312,20 @@ class SearchEngine {
}
parseQuery(query) {
const terms = { _: [] };
const trimQuotes = (str) => str.startsWith('"') ? str.substr(1, str.length - 2) : str;
let inQuote = false;
let currentCol = '_';
let currentTerm = '';
for (let i = 0; i < query.length; i++) {
const c = query[i];
if (c === '"') {
if (inQuote) {
terms[currentCol].push(currentTerm);
currentTerm = '';
inQuote = false;
} else {
inQuote = true;
}
continue;
}
if (c === ' ' && !inQuote) {
if (!currentTerm) continue;
terms[currentCol].push(currentTerm);
currentCol = '_';
currentTerm = '';
continue;
}
if (c === ':' && !inQuote) {
currentCol = currentTerm;
if (!terms[currentCol]) terms[currentCol] = [];
currentTerm = '';
continue;
}
currentTerm += c;
let allTerms = [];
try {
allTerms = filterParser(query);
} catch (error) {
console.warn(error);
}
if (currentTerm) terms[currentCol].push(currentTerm);
const textTerms = allTerms.filter(x => x.name === 'text').map(x => trimQuotes(x.value));
const titleTerms = allTerms.filter(x => x.name === 'title').map(x => trimQuotes(x.value));
const bodyTerms = allTerms.filter(x => x.name === 'body').map(x => trimQuotes(x.value));
const terms = { _: textTerms, 'title': titleTerms, 'body': bodyTerms };
// Filter terms:
// - Convert wildcards to regex
@ -373,7 +366,8 @@ class SearchEngine {
return {
termCount: termCount,
keys: keys,
terms: terms,
terms: terms, // text terms
allTerms: allTerms,
};
}
@ -432,54 +426,38 @@ class SearchEngine {
return SearchEngine.SEARCH_TYPE_FTS;
}
async search(query, options = null) {
async search(searchString, options = null) {
options = Object.assign({}, {
searchType: SearchEngine.SEARCH_TYPE_AUTO,
}, options);
query = this.normalizeText_(query);
searchString = this.normalizeText_(searchString);
const searchType = this.determineSearchType_(query, options.searchType);
const parsedQuery = this.parseQuery(query);
const searchType = this.determineSearchType_(searchString, options.searchType);
if (searchType === SearchEngine.SEARCH_TYPE_BASIC) {
// Non-alphabetical languages aren't support by SQLite FTS (except with extensions which are not available in all platforms)
const rows = await this.basicSearch(query);
const rows = await this.basicSearch(searchString);
const parsedQuery = this.parseQuery(searchString);
this.processResults_(rows, parsedQuery, true);
return rows;
} else { // SEARCH_TYPE_FTS
} else {
// SEARCH_TYPE_FTS
// FTS will ignore all special characters, like "-" in the index. So if
// we search for "this-phrase" it won't find it because it will only
// see "this phrase" in the index. Because of this, we remove the dashes
// when searching.
// https://github.com/laurent22/joplin/issues/1075#issuecomment-459258856
query = query.replace(/-/g, ' ');
// Note that when the search engine index is somehow corrupted, it might contain
// references to notes that don't exist. Not clear how it can happen, but anyway
// handle it here by checking if `user_updated_time` IS NOT NULL. Was causing this
// issue: https://discourse.joplinapp.org/t/how-to-recover-corrupted-database/9367
const sql = `
SELECT
notes_fts.id,
notes_fts.title AS normalized_title,
offsets(notes_fts) AS offsets,
notes.title,
notes.user_updated_time,
notes.is_todo,
notes.todo_completed,
notes.parent_id
FROM notes_fts
LEFT JOIN notes ON notes_fts.id = notes.id
WHERE notes_fts MATCH ?
AND notes.user_updated_time IS NOT NULL
`;
const parsedQuery = this.parseQuery(searchString);
try {
const rows = await this.db().selectAll(sql, [query]);
const { query, params } = queryBuilder(parsedQuery.allTerms);
const rows = await this.db().selectAll(query, params);
this.processResults_(rows, parsedQuery);
return rows;
} catch (error) {
this.logger().warn(`Cannot execute MATCH query: ${query}: ${error.message}`);
this.logger().warn(`Cannot execute MATCH query: ${searchString}: ${error.message}`);
return [];
}
}
@ -504,6 +482,8 @@ class SearchEngine {
}
}
SearchEngine.relevantFields = 'id, title, body, user_created_time, user_updated_time, is_todo, todo_completed, parent_id, latitude, longitude, altitude, source_url';
SearchEngine.instance_ = null;
SearchEngine.SEARCH_TYPE_AUTO = 'auto';

View File

@ -1,4 +1,4 @@
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const Note = require('lib/models/Note');
class SearchEngineUtils {
@ -42,7 +42,13 @@ class SearchEngineUtils {
if (idWasAutoAdded) delete sortedNotes[idx].id;
}
return sortedNotes;
if (noteIds.length !== notes.length) {
// remove null objects
return sortedNotes.filter(n => n);
} else {
return sortedNotes;
}
}
}

View File

@ -0,0 +1,133 @@
interface Term {
name: string
value: string
negated: boolean
}
const makeTerm = (name: string, value: string): Term => {
if (name.startsWith('-')) { return { name: name.slice(1), value: value, negated: true }; }
return { name: name, value: value, negated: false };
};
const quote = (s : string) => {
const quoted = (s: string) => s.startsWith('"') && s.endsWith('"');
if (!quoted(s)) {
return `"${s}"`;
}
return s;
};
const getTerms = (query: string) : Term[] => {
const terms: Term[] = [];
let inQuote = false;
let inTerm = false;
let currentCol = '_';
let currentTerm = '';
for (let i = 0; i < query.length; i++) {
const c = query[i];
if (c === '"') {
currentTerm += c; // keep the quotes
if (inQuote) {
terms.push(makeTerm(currentCol, currentTerm));
currentTerm = '';
inQuote = false;
} else {
inQuote = true;
}
continue;
}
if (c === ' ' && !inQuote) {
inTerm = false;
if (!currentTerm) continue;
terms.push(makeTerm(currentCol, currentTerm));
currentCol = '_';
currentTerm = '';
continue;
}
if (c === ':' && !inQuote && !inTerm) {
currentCol = currentTerm;
currentTerm = '';
inTerm = true; // to ignore any other ':' before a space eg.'sourceurl:https://www.google.com'
continue;
}
currentTerm += c;
}
if (currentTerm) terms.push(makeTerm(currentCol, currentTerm));
return terms;
};
const parseQuery = (query: string): Term[] => {
const validFilters = new Set(['any', 'title', 'body', 'tag',
'notebook', 'created', 'updated', 'type',
'iscompleted', 'latitude', 'longitude',
'altitude', 'resource', 'sourceurl']);
const terms = getTerms(query);
// console.log(terms);
const result: Term[] = [];
for (let i = 0; i < terms.length; i++) {
const { name, value, negated } = terms[i];
if (name !== '_') {
if (!validFilters.has(name)) {
throw new Error(`Invalid filter: ${name}`);
}
if (name === 'tag' || name === 'notebook' || name === 'resource' || name === 'sourceurl') {
result.push({ name, value: value.replace(/[*]/g, '%'), negated }); // for wildcard search
} else if (name === 'title' || name === 'body') {
// Trim quotes since we don't support phrase query here
// eg. Split title:"hello world" to title:hello title:world
const values = trimQuotes(value).split(/[\s-_]+/);
values.forEach(value => {
result.push({ name, value, negated });
});
} else {
result.push({ name, value, negated });
}
} else {
// Every word is quoted if not already.
// By quoting the word, FTS match query will take care of removing dashes and other word seperators.
if (value.startsWith('-')) {
result.push({ name: 'text', value: quote(value.slice(1)) , negated: true });
} else {
result.push({ name: 'text', value: quote(value), negated: false });
}
}
}
// validation
let incorrect = result.filter(term => term.name === 'type' || term.name === 'iscompleted' || term.name === 'notebook')
.find(x => x.negated);
if (incorrect) throw new Error(`${incorrect.name} can't be negated`);
incorrect = result.filter(term => term.name === 'type')
.find(x => (x.value !== 'note' && x.value !== 'todo'));
if (incorrect) throw new Error('The value of filter "type" must be "note" or "todo"');
incorrect = result.filter(term => term.name === 'iscompleted')
.find(x => (x.value !== '1' && x.value !== '0'));
if (incorrect) throw new Error('The value of filter "iscompleted" must be "1" or "0"');
return result;
};
const trimQuotes = (str: string): string => str.startsWith('"') ? str.substr(1, str.length - 2) : str;
export default function filterParser(searchString: string) {
searchString = searchString.trim();
const result = parseQuery(searchString);
return result;
}

View File

@ -0,0 +1,428 @@
const { time } = require('lib/time-utils.js');
interface Term {
name: string
value: string
negated: boolean
}
enum Relation {
OR = 'OR',
AND = 'AND',
}
enum Operation {
UNION = 'UNION',
INTERSECT = 'INTERSECT'
}
enum Requirement {
EXCLUSION = 'EXCLUSION',
INCLUSION = 'INCLUSION'
}
const notebookFilter = (terms: Term[], conditions: string[], params: string[], withs: string[]) => {
const notebooks = terms.filter(x => x.name === 'notebook' && !x.negated).map(x => x.value);
if (notebooks.length === 0) return;
const likes = [];
for (let i = 0; i < notebooks.length; i++) {
likes.push('folders.title LIKE ?');
}
const relevantFolders = likes.join(' OR ');
const withInNotebook = `
notebooks_in_scope(id)
AS (
SELECT folders.id
FROM folders
WHERE id
IN (
SELECT id
FROM folders
WHERE ${relevantFolders}
)
UNION ALL
SELECT folders.id
FROM folders
JOIN notebooks_in_scope
ON folders.parent_id=notebooks_in_scope.id
)`;
const where = `
AND ROWID IN (
SELECT notes_normalized.ROWID
FROM notebooks_in_scope
JOIN notes_normalized
ON notebooks_in_scope.id=notes_normalized.parent_id
)`;
withs.push(withInNotebook);
params.push(...notebooks);
conditions.push(where);
};
const getOperator = (requirement: Requirement, relation: Relation): Operation => {
if (relation === 'AND' && requirement === 'INCLUSION') { return Operation.INTERSECT; } else { return Operation.UNION; }
};
const filterByTableName = (
terms: Term[],
conditions: string[],
params: string[],
relation: Relation,
noteIDs: string,
requirement: Requirement,
withs: string[],
tableName: string
) => {
const operator: Operation = getOperator(requirement, relation);
const values = terms.map(x => x.value);
let withCondition = null;
if (relation === Relation.OR && requirement === Requirement.EXCLUSION) {
// with_${requirement}_${tableName} is added to the names to make them unique
withs.push(`
all_notes_with_${requirement}_${tableName}
AS (
SELECT DISTINCT note_${tableName}.note_id AS id FROM note_${tableName}
)`);
const notesWithoutExcludedField = `
SELECT * FROM (
SELECT *
FROM all_notes_with_${requirement}_${tableName}
EXCEPT ${noteIDs}
)`;
const requiredNotes = [];
for (let i = 0; i < values.length; i++) {
requiredNotes.push(notesWithoutExcludedField);
}
const requiredNotesQuery = requiredNotes.join(' UNION ');
// We need notes without atleast one excluded (tag/resource)
withCondition = `
notes_with_${requirement}_${tableName}
AS (
${requiredNotesQuery}
)`;
} else {
const requiredNotes = [];
for (let i = 0; i < values.length; i++) {
requiredNotes.push(noteIDs);
}
const requiredNotesQuery = requiredNotes.join(` ${operator} `);
// Notes with any/all values depending upon relation and requirement
withCondition = `
notes_with_${requirement}_${tableName}
AS (
SELECT note_${tableName}.note_id as id
FROM note_${tableName}
WHERE
${operator === 'INTERSECT' ? 1 : 0} ${operator}
${requiredNotesQuery}
)`;
}
// Get the ROWIDs that satisfy the condition so we can filter the result
const whereCondition = `
${relation} ROWID ${(relation === 'AND' && requirement === 'EXCLUSION') ? 'NOT' : ''}
IN (
SELECT notes_normalized.ROWID
FROM notes_with_${requirement}_${tableName}
JOIN notes_normalized
ON notes_with_${requirement}_${tableName}.id=notes_normalized.id
)`;
withs.push(withCondition);
params.push(...values);
conditions.push(whereCondition);
};
const resourceFilter = (terms: Term[], conditions: string[], params: string[], relation: Relation, withs: string[]) => {
const tableName = 'resources';
const resourceIDs = `
SELECT resources.id
FROM resources
WHERE resources.mime LIKE ?`;
const noteIDsWithResource = `
SELECT note_resources.note_id AS id
FROM note_resources
WHERE note_resources.is_associated=1
AND note_resources.resource_id IN (${resourceIDs})`;
const requiredResources = terms.filter(x => x.name === 'resource' && !x.negated);
const excludedResources = terms.filter(x => x.name === 'resource' && x.negated);
if (requiredResources.length > 0) {
filterByTableName(requiredResources, conditions, params, relation, noteIDsWithResource, Requirement.INCLUSION, withs, tableName);
}
if (excludedResources.length > 0) {
filterByTableName(excludedResources, conditions, params, relation, noteIDsWithResource, Requirement.EXCLUSION, withs, tableName);
}
};
const tagFilter = (terms: Term[], conditions: string[], params: string[], relation: Relation, withs: string[]) => {
const tableName = 'tags';
const tagIDs = `
SELECT tags.id
FROM tags
WHERE tags.title
LIKE ?`;
const noteIDsWithTag = `
SELECT note_tags.note_id AS id
FROM note_tags
WHERE note_tags.tag_id IN (${tagIDs})`;
const requiredTags = terms.filter(x => x.name === 'tag' && !x.negated);
const excludedTags = terms.filter(x => x.name === 'tag' && x.negated);
if (requiredTags.length > 0) {
filterByTableName(requiredTags, conditions, params, relation, noteIDsWithTag, Requirement.INCLUSION, withs, tableName);
}
if (excludedTags.length > 0) {
filterByTableName(excludedTags, conditions, params, relation, noteIDsWithTag, Requirement.EXCLUSION, withs, tableName);
}
};
const genericFilter = (terms: Term[], conditions: string[], params: string[], relation: Relation, fieldName: string) => {
if (fieldName === 'iscompleted' || fieldName === 'type') {
// Faster query when values can only take two distinct values
biConditionalFilter(terms, conditions, relation, fieldName);
return;
}
const getCondition = (term: Term) => {
if (fieldName === 'sourceurl') { return `notes_normalized.source_url ${term.negated ? 'NOT' : ''} LIKE ?`; } else { return `notes_normalized.${fieldName === 'date' ? `user_${term.name}_time` : `${term.name}`} ${term.negated ? '<' : '>='} ?`; }
};
terms.forEach(term => {
conditions.push(`
${relation} ROWID IN (
SELECT ROWID
FROM notes_normalized
WHERE ${getCondition(term)}
)`);
params.push(term.value);
});
};
const biConditionalFilter = (terms: Term[], conditions: string[], relation: Relation, filterName: string) => {
const getCondition = (filterName: string , value: string, relation: Relation) => {
const tableName = (relation === 'AND') ? 'notes_fts' : 'notes_normalized';
if (filterName === 'type') {
return `${tableName}.is_todo IS ${value === 'todo' ? 1 : 0}`;
} else if (filterName === 'iscompleted') {
return `${tableName}.is_todo IS 1 AND ${tableName}.todo_completed IS ${value === '1' ? 'NOT 0' : '0'}`;
} else {
throw new Error('Invalid filter name.');
}
};
const values = terms.map(x => x.value);
// AND and OR are handled differently because FTS restricts how OR can be used.
values.forEach(value => {
if (relation === 'AND') {
conditions.push(`
AND ${getCondition(filterName, value, relation)}`);
}
if (relation === 'OR') {
conditions.push(`
OR ROWID IN (
SELECT ROWID
FROM notes_normalized
WHERE ${getCondition(filterName, value, relation)}
)`);
}
});
};
const typeFilter = (terms: Term[], conditions: string[], params: string[], relation: Relation) => {
const typeTerms = terms.filter(x => x.name === 'type');
genericFilter(typeTerms, conditions, params, relation, 'type');
};
const completedFilter = (terms: Term[], conditions: string[], params: string[], relation: Relation) => {
const completedTerms = terms.filter(x => x.name === 'iscompleted');
genericFilter(completedTerms, conditions, params, relation, 'iscompleted');
};
const locationFilter = (terms: Term[], conditons: string[], params: string[], relation: Relation) => {
const locationTerms = terms.filter(x => x.name === 'latitude' || x.name === 'longitude' || x.name === 'altitude');
genericFilter(locationTerms, conditons, params, relation, 'location');
};
const dateFilter = (terms: Term[], conditons: string[], params: string[], relation: Relation) => {
const getUnixMs = (date:string): string => {
const yyyymmdd = /^[0-9]{8}$/;
const yyyymm = /^[0-9]{6}$/;
const yyyy = /^[0-9]{4}$/;
const smartValue = /^(day|week|month|year)-([0-9]+)$/i;
if (yyyymmdd.test(date)) {
return time.formatLocalToMs(date, 'YYYYMMDD').toString();
} else if (yyyymm.test(date)) {
return time.formatLocalToMs(date, 'YYYYMM').toString();
} else if (yyyy.test(date)) {
return time.formatLocalToMs(date, 'YYYY').toString();
} else if (smartValue.test(date)) {
const match = smartValue.exec(date);
const timeUnit = match[1]; // eg. day, week, month, year
const num = Number(match[2]); // eg. 1, 12, 15
return time.goBackInTime(Date.now(), num, timeUnit);
} else {
throw new Error('Invalid date format!');
}
};
const dateTerms = terms.filter(x => x.name === 'created' || x.name === 'updated');
const unixDateTerms = dateTerms.map(term => { return { ...term, value: getUnixMs(term.value) }; });
genericFilter(unixDateTerms, conditons, params, relation, 'date');
};
const sourceUrlFilter = (terms: Term[], conditons: string[], params: string[], relation: Relation) => {
const urlTerms = terms.filter(x => x.name === 'sourceurl');
genericFilter(urlTerms, conditons, params, relation, 'sourceurl');
};
const textFilter = (terms: Term[], conditions: string[], params: string[], relation: Relation) => {
const addExcludeTextConditions = (excludedTerms: Term[], conditions:string[], params: string[], relation: Relation) => {
const type = excludedTerms[0].name === 'text' ? '' : `.${excludedTerms[0].name}`;
if (relation === 'AND') {
conditions.push(`
AND ROWID NOT IN (
SELECT ROWID
FROM notes_fts
WHERE notes_fts${type} MATCH ?
)`);
params.push(excludedTerms.map(x => x.value).join(' OR '));
}
if (relation === 'OR') {
excludedTerms.forEach(term => {
conditions.push(`
OR ROWID IN (
SELECT *
FROM (
SELECT ROWID
FROM notes_fts
EXCEPT
SELECT ROWID
FROM notes_fts
WHERE notes_fts${type} MATCH ?
)
)`);
params.push(term.value);
});
}
};
const allTerms = terms.filter(x => x.name === 'title' || x.name === 'body' || x.name === 'text');
const includedTerms = allTerms.filter(x => !x.negated);
if (includedTerms.length > 0) {
conditions.push(`${relation} notes_fts MATCH ?`);
const termsToMatch = includedTerms.map(term => {
if (term.name === 'text') return term.value;
else return `${term.name}:${term.value}`;
});
const matchQuery = (relation === 'OR') ? termsToMatch.join(' OR ') : termsToMatch.join(' ');
params.push(matchQuery);
}
const excludedTextTerms = allTerms.filter(x => x.name === 'text' && x.negated);
const excludedTitleTerms = allTerms.filter(x => x.name === 'title' && x.negated);
const excludedBodyTerms = allTerms.filter(x => x.name === 'body' && x.negated);
if ((excludedTextTerms.length > 0)) {
addExcludeTextConditions(excludedTextTerms, conditions, params, relation);
}
if (excludedTitleTerms.length > 0) {
addExcludeTextConditions(excludedTitleTerms, conditions, params, relation);
}
if (excludedBodyTerms.length > 0) {
addExcludeTextConditions(excludedBodyTerms, conditions, params, relation);
}
};
const getDefaultRelation = (terms: Term[]): Relation => {
const anyTerm = terms.find(term => term.name === 'any');
if (anyTerm) { return (anyTerm.value === '1') ? Relation.OR : Relation.AND; }
return Relation.AND;
};
const getConnective = (terms: Term[], relation: Relation): string => {
const notebookTerm = terms.find(x => x.name === 'notebook');
return (!notebookTerm && (relation === 'OR')) ? 'ROWID=-1' : '1'; // ROWID=-1 acts as 0 (something always false)
};
export default function queryBuilder(terms: Term[]) {
const queryParts: string[] = [];
const params: string[] = [];
const withs: string[] = [];
// console.log("testing beep beep boop boop")
// console.log(terms);
const relation: Relation = getDefaultRelation(terms);
queryParts.push(`
SELECT
notes_fts.id,
notes_fts.title,
offsets(notes_fts) AS offsets,
notes_fts.user_created_time,
notes_fts.user_updated_time,
notes_fts.is_todo,
notes_fts.todo_completed,
notes_fts.parent_id
FROM notes_fts
WHERE ${getConnective(terms, relation)}`);
notebookFilter(terms, queryParts, params, withs);
tagFilter(terms, queryParts, params, relation, withs);
resourceFilter(terms, queryParts, params, relation, withs);
textFilter(terms, queryParts, params, relation);
typeFilter(terms, queryParts, params, relation);
completedFilter(terms, queryParts, params, relation);
dateFilter(terms, queryParts, params, relation);
locationFilter(terms, queryParts, params, relation);
sourceUrlFilter(terms, queryParts, params, relation);
let query;
if (withs.length > 0) {
query = ['WITH RECURSIVE' , withs.join(',') ,queryParts.join(' ')].join(' ');
} else {
query = queryParts.join(' ');
}
return { query, params };
}

View File

@ -111,6 +111,17 @@ class Time {
sleep(seconds) {
return this.msleep(seconds * 1000);
}
goBackInTime(startDate, n, period) {
// period is a string (eg. "day", "week", "month", "year" ), n is an integer
return moment(startDate).startOf(period).subtract(n, period).format('x');
}
goForwardInTime(startDate, n, period) {
return moment(startDate).startOf(period).add(n, period).format('x');
}
}
const time = new Time();

View File

@ -61,7 +61,7 @@ const DropdownAlert = require('react-native-dropdownalert').default;
const ShareExtension = require('lib/ShareExtension.js').default;
const handleShared = require('lib/shareHandler').default;
const ResourceFetcher = require('lib/services/ResourceFetcher');
const SearchEngine = require('lib/services/SearchEngine');
const SearchEngine = require('lib/services/searchengine/SearchEngine');
const WelcomeUtils = require('lib/WelcomeUtils');
const { themeStyle } = require('lib/components/global-style.js');
const { uuid } = require('lib/uuid.js');