mirror of
https://github.com/bpatrik/pigallery2.git
synced 2025-04-09 07:14:07 +02:00
upgrading to bootstrap 5
This commit is contained in:
parent
864209de4a
commit
8d13af48b0
@ -14,4 +14,3 @@ last 2 Edge major versions
|
||||
last 2 Safari major versions
|
||||
last 2 iOS major versions
|
||||
Firefox ESR
|
||||
not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.
|
||||
|
43
.eslintrc.json
Normal file
43
.eslintrc.json
Normal file
@ -0,0 +1,43 @@
|
||||
{
|
||||
"root": true,
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": [
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:@angular-eslint/recommended"
|
||||
],
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
"*.component.ts"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"project": "./src/frontend/tsconfig.app.json",
|
||||
"ecmaVersion": 2020,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"@angular-eslint/template"
|
||||
],
|
||||
"processor": "@angular-eslint/template/extract-inline-html"
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"*.component.html"
|
||||
],
|
||||
"parser": "@angular-eslint/template-parser",
|
||||
"parserOptions": {
|
||||
"project": "./src/frontend/tsconfig.app.json",
|
||||
"ecmaVersion": 2020,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"@angular-eslint/template"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
56
angular.json
56
angular.json
@ -1,5 +1,8 @@
|
||||
{
|
||||
"$schema": "./node_modules/@angular/cli/lib/config/schema.json",
|
||||
"cli": {
|
||||
"defaultCollection": "@angular-eslint/schematics"
|
||||
},
|
||||
"version": 1,
|
||||
"newProjectRoot": "projects",
|
||||
"projects": {
|
||||
@ -7,7 +10,11 @@
|
||||
"root": "",
|
||||
"sourceRoot": "src/frontend",
|
||||
"projectType": "application",
|
||||
"schematics": {},
|
||||
"schematics": {
|
||||
"@schematics/angular:application": {
|
||||
"strict": true
|
||||
}
|
||||
},
|
||||
"prefix": "app",
|
||||
"i18n": {
|
||||
"sourceLocale": {
|
||||
@ -15,7 +22,7 @@
|
||||
"baseHref": ""
|
||||
},
|
||||
"locales": {
|
||||
"cn": {
|
||||
"zh": {
|
||||
"baseHref": "",
|
||||
"translation": "src/frontend/translate/messages.cn.xlf"
|
||||
},
|
||||
@ -185,49 +192,16 @@
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
"builder": "@angular-devkit/build-angular:tslint",
|
||||
"builder": "@angular-eslint/builder:lint",
|
||||
"options": {
|
||||
"tsConfig": [
|
||||
"src/tsconfig.app.json",
|
||||
"src/tsconfig.spec.json"
|
||||
],
|
||||
"exclude": []
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pigallery2-e2e": {
|
||||
"root": "",
|
||||
"sourceRoot": "",
|
||||
"projectType": "application",
|
||||
"architect": {
|
||||
"e2e": {
|
||||
"builder": "@angular-devkit/build-angular:protractor",
|
||||
"options": {
|
||||
"protractorConfig": "./protractor.conf.js",
|
||||
"devServerTarget": "pigallery2:serve"
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
"builder": "@angular-devkit/build-angular:tslint",
|
||||
"options": {
|
||||
"tsConfig": [
|
||||
"test/e2e/tsconfig.e2e.json"
|
||||
],
|
||||
"exclude": []
|
||||
"lintFilePatterns": [
|
||||
"src/**/*.ts",
|
||||
"src/**/*.html"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultProject": "pigallery2",
|
||||
"schematics": {
|
||||
"@schematics/angular:component": {
|
||||
"prefix": "app",
|
||||
"styleext": "css"
|
||||
},
|
||||
"@schematics/angular:directive": {
|
||||
"prefix": "app"
|
||||
}
|
||||
}
|
||||
"defaultProject": "pigallery2"
|
||||
}
|
||||
|
@ -1,14 +1,10 @@
|
||||
/* tslint:disable:no-inferrable-types */
|
||||
/* eslint-disable @typescript-eslint/no-inferrable-types */
|
||||
import * as path from 'path';
|
||||
import {ConfigClass, ConfigClassBuilder} from 'typeconfig/node';
|
||||
import {ConfigProperty, SubConfigClass} from 'typeconfig/common';
|
||||
import {JobTrigger, JobTriggerType} from '../src/common/entities/job/JobScheduleDTO';
|
||||
import {ServerVideoConfig} from '../src/common/config/private/PrivateConfig';
|
||||
|
||||
|
||||
import { ConfigClass, ConfigClassBuilder } from 'typeconfig/node';
|
||||
import { ConfigProperty, SubConfigClass } from 'typeconfig/common';
|
||||
|
||||
@SubConfigClass()
|
||||
export class BenchmarksConfig {
|
||||
export class BenchmarksConfig {
|
||||
@ConfigProperty()
|
||||
bmScanDirectory: boolean = true;
|
||||
@ConfigProperty()
|
||||
@ -39,25 +35,28 @@ export class BenchmarksConfig {
|
||||
rewriteENVConfig: true,
|
||||
enumsAsString: true,
|
||||
saveIfNotExist: true,
|
||||
exitOnConfig: true
|
||||
exitOnConfig: true,
|
||||
},
|
||||
defaults: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
export class PrivateConfigClass {
|
||||
@ConfigProperty({description: 'Images are loaded from this folder (read permission required)'})
|
||||
@ConfigProperty({
|
||||
description:
|
||||
'Images are loaded from this folder (read permission required)',
|
||||
})
|
||||
path: string = '/app/data/images';
|
||||
@ConfigProperty({description: 'Describe your system setup'})
|
||||
@ConfigProperty({ description: 'Describe your system setup' })
|
||||
system: string = '';
|
||||
@ConfigProperty({description: 'Number of times to run the benchmark'})
|
||||
@ConfigProperty({ description: 'Number of times to run the benchmark' })
|
||||
RUNS: number = 50;
|
||||
@ConfigProperty({description: 'Enables / disables benchmarks'})
|
||||
@ConfigProperty({ description: 'Enables / disables benchmarks' })
|
||||
Benchmarks: BenchmarksConfig = new BenchmarksConfig();
|
||||
|
||||
|
||||
}
|
||||
|
||||
export const BMConfig = ConfigClassBuilder.attachInterface(new PrivateConfigClass());
|
||||
export const BMConfig = ConfigClassBuilder.attachInterface(
|
||||
new PrivateConfigClass()
|
||||
);
|
||||
BMConfig.loadSync();
|
||||
|
303
gulpfile.ts
303
gulpfile.ts
@ -1,23 +1,24 @@
|
||||
import * as gulp from 'gulp';
|
||||
import * as fs from 'fs';
|
||||
import {promises as fsp} from 'fs';
|
||||
import { promises as fsp } from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as util from 'util';
|
||||
import * as zip from 'gulp-zip';
|
||||
import * as ts from 'gulp-typescript';
|
||||
import * as xml2js from 'xml2js';
|
||||
import * as child_process from 'child_process';
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
import * as jeditor from 'gulp-json-editor';
|
||||
import {XLIFF} from 'xlf-google-translate';
|
||||
import {PrivateConfigClass} from './src/common/config/private/Config';
|
||||
import {ConfigClassBuilder} from 'typeconfig/src/decorators/builders/ConfigClassBuilder';
|
||||
import { XLIFF } from 'xlf-google-translate';
|
||||
import { PrivateConfigClass } from './src/common/config/private/Config';
|
||||
import { ConfigClassBuilder } from 'typeconfig/src/decorators/builders/ConfigClassBuilder';
|
||||
|
||||
const execPr = util.promisify(child_process.exec);
|
||||
|
||||
const translationFolder = 'translate';
|
||||
const tsBackendProject = ts.createProject('tsconfig.json');
|
||||
declare var process: NodeJS.Process;
|
||||
declare const process: NodeJS.Process;
|
||||
|
||||
const getSwitch = (name: string, def: string = null): string => {
|
||||
name = '--' + name;
|
||||
@ -36,26 +37,35 @@ const getSwitch = (name: string, def: string = null): string => {
|
||||
};
|
||||
|
||||
gulp.task('build-backend', (): any =>
|
||||
gulp.src([
|
||||
'src/common/**/*.ts',
|
||||
'src/backend/**/*.ts',
|
||||
'benchmark/**/*.ts'], {base: '.'})
|
||||
gulp
|
||||
.src(['src/common/**/*.ts', 'src/backend/**/*.ts', 'benchmark/**/*.ts'], {
|
||||
base: '.',
|
||||
})
|
||||
.pipe(tsBackendProject())
|
||||
.js
|
||||
.pipe(gulp.dest('./release')));
|
||||
.js.pipe(gulp.dest('./release'))
|
||||
);
|
||||
|
||||
|
||||
const createDynamicTranslationFile = async (language: string): Promise<void> => {
|
||||
const createDynamicTranslationFile = async (
|
||||
language: string
|
||||
): Promise<void> => {
|
||||
// load
|
||||
const folder = './src/frontend/' + translationFolder;
|
||||
const data: string = await fsp.readFile(path.join(folder, `messages.${language}.xlf`), 'utf-8');
|
||||
const data: string = await fsp.readFile(
|
||||
path.join(folder, `messages.${language}.xlf`),
|
||||
'utf-8'
|
||||
);
|
||||
const translationXml: XLIFF.Root = await xml2js.parseStringPromise(data);
|
||||
|
||||
// clean translations, keep only .ts transaltions
|
||||
const hasTsTranslation = (cg: XLIFF.ContextGroup): boolean =>
|
||||
cg.context.findIndex((c: any): boolean => c.$['context-type'] === 'sourcefile' && c._.endsWith('.ts')) !== -1;
|
||||
cg.context.findIndex(
|
||||
(c: any): boolean =>
|
||||
c.$['context-type'] === 'sourcefile' && c._.endsWith('.ts')
|
||||
) !== -1;
|
||||
const translations = translationXml.xliff.file[0].body[0]['trans-unit'];
|
||||
const filtered = translations.filter((tr): boolean => tr['context-group'].findIndex(hasTsTranslation) !== -1);
|
||||
const filtered = translations.filter(
|
||||
(tr): boolean => tr['context-group'].findIndex(hasTsTranslation) !== -1
|
||||
);
|
||||
filtered.forEach((tr): boolean => delete tr['context-group']);
|
||||
translationXml.xliff.file[0].body[0]['trans-unit'] = filtered;
|
||||
|
||||
@ -63,15 +73,19 @@ const createDynamicTranslationFile = async (language: string): Promise<void> =>
|
||||
const builder = new xml2js.Builder();
|
||||
const xml = builder.buildObject(translationXml);
|
||||
await fsp.writeFile(path.join(folder, `ts-only-msg.${language}.xlf`), xml);
|
||||
|
||||
};
|
||||
|
||||
const removeDynamicTranslationFile = async (language: string): Promise<void> => {
|
||||
const translationFile = path.join('./src/frontend/', translationFolder, `ts-only-msg.${language}.xlf`);
|
||||
const removeDynamicTranslationFile = async (
|
||||
language: string
|
||||
): Promise<void> => {
|
||||
const translationFile = path.join(
|
||||
'./src/frontend/',
|
||||
translationFolder,
|
||||
`ts-only-msg.${language}.xlf`
|
||||
);
|
||||
fsp.unlink(translationFile);
|
||||
};
|
||||
|
||||
|
||||
const setDynTransFileAtAppModule = async (language: string): Promise<void> => {
|
||||
const file = './src/frontend/app/app.module.ts';
|
||||
let data: string = await fsp.readFile(file, 'utf-8');
|
||||
@ -90,11 +104,14 @@ const resetAppModule = async (language: string): Promise<void> => {
|
||||
await fsp.writeFile(file, data);
|
||||
};
|
||||
|
||||
|
||||
const createFrontendTask = (type: string, language: string, script: string): void => {
|
||||
const createFrontendTask = (
|
||||
type: string,
|
||||
language: string,
|
||||
script: string
|
||||
): void => {
|
||||
gulp.task(type, async (cb): Promise<void> => {
|
||||
try {
|
||||
const {stdout, stderr} = await execPr(script);
|
||||
const { stdout, stderr } = await execPr(script);
|
||||
console.log(stdout);
|
||||
console.error(stderr);
|
||||
} catch (e) {
|
||||
@ -104,14 +121,13 @@ const createFrontendTask = (type: string, language: string, script: string): voi
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
const getLanguages = (): any[] | string[] => {
|
||||
if (!fs.existsSync('./src/frontend/' + translationFolder)) {
|
||||
return [];
|
||||
}
|
||||
const dirCont = fs.readdirSync('./src/frontend/' + translationFolder);
|
||||
const files: string[] = dirCont.filter((elm): any => {
|
||||
return elm.match(/.*\.[a-zA-Z]+\.(xlf)/ig);
|
||||
return elm.match(/.*\.[a-zA-Z]+\.(xlf)/gi);
|
||||
});
|
||||
|
||||
// get languages to filter
|
||||
@ -132,76 +148,111 @@ const getLanguages = (): any[] | string[] => {
|
||||
return languages;
|
||||
};
|
||||
|
||||
gulp.task('build-frontend', ((): any => {
|
||||
const tasks = [];
|
||||
createFrontendTask('build-frontend-release default', 'all',
|
||||
'ng build --prod --no-progress --output-path=./release/dist');
|
||||
tasks.push('build-frontend-release default');
|
||||
return gulp.series(...tasks);
|
||||
})());
|
||||
gulp.task(
|
||||
'build-frontend',
|
||||
((): any => {
|
||||
const tasks = [];
|
||||
createFrontendTask(
|
||||
'build-frontend-release default',
|
||||
'all',
|
||||
'ng build --prod --no-progress --output-path=./release/dist'
|
||||
);
|
||||
tasks.push('build-frontend-release default');
|
||||
return gulp.series(...tasks);
|
||||
})()
|
||||
);
|
||||
|
||||
gulp.task('copy-static', (): any => gulp.src([
|
||||
'src/backend/model/diagnostics/blank.jpg',
|
||||
'README.md',
|
||||
// 'package-lock.json', should not add, it keeps optional packages optional even with --force-opt-packages.
|
||||
'LICENSE'], {base: '.'})
|
||||
.pipe(gulp.dest('./release')));
|
||||
gulp.task('copy-static', (): any =>
|
||||
gulp
|
||||
.src(
|
||||
[
|
||||
'src/backend/model/diagnostics/blank.jpg',
|
||||
'README.md',
|
||||
// 'package-lock.json', should not add, it keeps optional packages optional even with --force-opt-packages.
|
||||
'LICENSE',
|
||||
],
|
||||
{ base: '.' }
|
||||
)
|
||||
.pipe(gulp.dest('./release'))
|
||||
);
|
||||
|
||||
gulp.task('copy-package', (): any => gulp.src([
|
||||
'package.json'], {base: '.'})
|
||||
.pipe(jeditor((json: {
|
||||
devDependencies: { [key: string]: string },
|
||||
scripts: { [key: string]: string },
|
||||
dependencies: { [key: string]: string },
|
||||
optionalDependencies: { [key: string]: string },
|
||||
buildTime: string,
|
||||
buildCommitHash: string
|
||||
}): {
|
||||
devDependencies: { [p: string]: string };
|
||||
scripts: { [p: string]: string };
|
||||
dependencies: { [p: string]: string };
|
||||
optionalDependencies: { [p: string]: string };
|
||||
buildTime: string; buildCommitHash: string
|
||||
} => {
|
||||
delete json.devDependencies;
|
||||
json.scripts = {start: 'node ./src/backend/index.js'};
|
||||
gulp.task('copy-package', (): any =>
|
||||
gulp
|
||||
.src(['package.json'], { base: '.' })
|
||||
.pipe(
|
||||
jeditor(
|
||||
(json: {
|
||||
devDependencies: { [key: string]: string };
|
||||
scripts: { [key: string]: string };
|
||||
dependencies: { [key: string]: string };
|
||||
optionalDependencies: { [key: string]: string };
|
||||
buildTime: string;
|
||||
buildCommitHash: string;
|
||||
}): {
|
||||
devDependencies: { [p: string]: string };
|
||||
scripts: { [p: string]: string };
|
||||
dependencies: { [p: string]: string };
|
||||
optionalDependencies: { [p: string]: string };
|
||||
buildTime: string;
|
||||
buildCommitHash: string;
|
||||
} => {
|
||||
delete json.devDependencies;
|
||||
json.scripts = { start: 'node ./src/backend/index.js' };
|
||||
|
||||
if (getSwitch('skip-opt-packages')) {
|
||||
const skipPackages = getSwitch('skip-opt-packages').replace(new RegExp(' ', 'g'), ',').split(',');
|
||||
for (const pkg of skipPackages) {
|
||||
for (const key of Object.keys(json.optionalDependencies)) {
|
||||
if (key.indexOf(pkg) !== -1) {
|
||||
delete json.optionalDependencies[key];
|
||||
if (getSwitch('skip-opt-packages')) {
|
||||
const skipPackages = getSwitch('skip-opt-packages')
|
||||
.replace(new RegExp(' ', 'g'), ',')
|
||||
.split(',');
|
||||
for (const pkg of skipPackages) {
|
||||
for (const key of Object.keys(json.optionalDependencies)) {
|
||||
if (key.indexOf(pkg) !== -1) {
|
||||
delete json.optionalDependencies[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (getSwitch('force-opt-packages')) {
|
||||
for (const key of Object.keys(json.optionalDependencies)) {
|
||||
json.dependencies[key] = json.optionalDependencies[key];
|
||||
}
|
||||
delete json.optionalDependencies;
|
||||
}
|
||||
json.buildTime = new Date().toISOString();
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
json.buildCommitHash = require('child_process')
|
||||
.execSync('git rev-parse HEAD')
|
||||
.toString()
|
||||
.trim();
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch (e) {}
|
||||
|
||||
return json;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!!getSwitch('force-opt-packages')) {
|
||||
for (const key of Object.keys(json.optionalDependencies)) {
|
||||
json.dependencies[key] = json.optionalDependencies[key];
|
||||
}
|
||||
delete json.optionalDependencies;
|
||||
}
|
||||
json.buildTime = (new Date()).toISOString();
|
||||
|
||||
try {
|
||||
json.buildCommitHash = require('child_process').execSync('git rev-parse HEAD').toString().trim();
|
||||
} catch (e) {
|
||||
}
|
||||
|
||||
return json;
|
||||
}))
|
||||
.pipe(gulp.dest('./release')));
|
||||
|
||||
)
|
||||
)
|
||||
.pipe(gulp.dest('./release'))
|
||||
);
|
||||
|
||||
gulp.task('zip-release', (): any =>
|
||||
gulp.src(['release/**/*'], {base: './release'})
|
||||
gulp
|
||||
.src(['release/**/*'], { base: './release' })
|
||||
.pipe(zip('pigallery2.zip'))
|
||||
.pipe(gulp.dest('.')));
|
||||
|
||||
gulp.task('create-release', gulp.series('build-frontend', 'build-backend', 'copy-static', 'copy-package', 'zip-release'));
|
||||
.pipe(gulp.dest('.'))
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
'create-release',
|
||||
gulp.series(
|
||||
'build-frontend',
|
||||
'build-backend',
|
||||
'copy-static',
|
||||
'copy-package',
|
||||
'zip-release'
|
||||
)
|
||||
);
|
||||
|
||||
const simpleBuild = (isProd: boolean): any => {
|
||||
const tasks = [];
|
||||
@ -209,10 +260,14 @@ const simpleBuild = (isProd: boolean): any => {
|
||||
if (isProd) {
|
||||
cmd += ' --prod --no-extract-licenses ';
|
||||
}
|
||||
if (!process.env.CI) {
|
||||
if (!process.env['CI']) {
|
||||
createFrontendTask('build-frontend default', 'all', cmd);
|
||||
} else {
|
||||
createFrontendTask('build-frontend default', 'all', cmd + '--localize=false');
|
||||
createFrontendTask(
|
||||
'build-frontend default',
|
||||
'all',
|
||||
cmd + '--localize=false'
|
||||
);
|
||||
}
|
||||
tasks.push('build-frontend default');
|
||||
return gulp.series(...tasks);
|
||||
@ -222,8 +277,10 @@ gulp.task('extract-locale', async (cb): Promise<any> => {
|
||||
console.log('creating source translation file: locale.source.xlf');
|
||||
try {
|
||||
{
|
||||
const {stdout, stderr} = await execPr('ng extract-i18n --out-file=locale.source.xlf --format=xlf',
|
||||
{maxBuffer: 1024 * 1024});
|
||||
const { stdout, stderr } = await execPr(
|
||||
'ng extract-i18n --out-file=locale.source.xlf --format=xlf',
|
||||
{ maxBuffer: 1024 * 1024 }
|
||||
);
|
||||
console.log(stdout);
|
||||
console.error(stderr);
|
||||
}
|
||||
@ -234,14 +291,22 @@ gulp.task('extract-locale', async (cb): Promise<any> => {
|
||||
}
|
||||
});
|
||||
|
||||
const translate = async (list: any[], cb: (err?: any) => void): Promise<void> => {
|
||||
const translate = async (
|
||||
list: any[],
|
||||
cb: (err?: any) => void
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const localsStr = '"[\\"' + list.join('\\",\\"') + '\\"]"';
|
||||
const {stdout, stderr} = await execPr('xlf-google-translate ' +
|
||||
'--source-lang="en" ' +
|
||||
'--source-file="./locale.source.xlf" ' +
|
||||
'--destination-filename="messages" ' +
|
||||
'--destination-folder="./src/frontend/"' + translationFolder + ' --destination-languages=' + localsStr);
|
||||
const { stdout, stderr } = await execPr(
|
||||
'xlf-google-translate ' +
|
||||
'--source-lang="en" ' +
|
||||
'--source-file="./locale.source.xlf" ' +
|
||||
'--destination-filename="messages" ' +
|
||||
'--destination-folder="./src/frontend/"' +
|
||||
translationFolder +
|
||||
' --destination-languages=' +
|
||||
localsStr
|
||||
);
|
||||
console.log(stdout);
|
||||
console.error(stderr);
|
||||
cb();
|
||||
@ -253,15 +318,19 @@ const translate = async (list: any[], cb: (err?: any) => void): Promise<void> =>
|
||||
const merge = async (list: any[], cb: (err?: any) => void): Promise<void> => {
|
||||
try {
|
||||
const localsStr = '"[\\"' + list.join('\\",\\"') + '\\"]"';
|
||||
const command = 'xlf-google-translate ' +
|
||||
const command =
|
||||
'xlf-google-translate ' +
|
||||
'--method="extend-only" ' +
|
||||
'--source-lang="en" ' +
|
||||
'--source-file="./locale.source.xlf" ' +
|
||||
'--destination-filename="messages" ' +
|
||||
'--destination-folder="./src/frontend/' + translationFolder + '" ' +
|
||||
'--destination-languages=' + localsStr;
|
||||
'--destination-folder="./src/frontend/' +
|
||||
translationFolder +
|
||||
'" ' +
|
||||
'--destination-languages=' +
|
||||
localsStr;
|
||||
console.log(command);
|
||||
const {stdout, stderr} = await execPr(command);
|
||||
const { stdout, stderr } = await execPr(command);
|
||||
console.log(stdout);
|
||||
console.error(stderr);
|
||||
cb();
|
||||
@ -278,10 +347,15 @@ gulp.task('merge-translation-only', (cb): void => {
|
||||
merge(getLanguages(), cb).catch(console.error);
|
||||
});
|
||||
|
||||
gulp.task('update-translation', gulp.series('extract-locale', 'update-translation-only'));
|
||||
|
||||
gulp.task('merge-new-translation', gulp.series('extract-locale', 'merge-translation-only'));
|
||||
gulp.task(
|
||||
'update-translation',
|
||||
gulp.series('extract-locale', 'update-translation-only')
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
'merge-new-translation',
|
||||
gulp.series('extract-locale', 'merge-translation-only')
|
||||
);
|
||||
|
||||
gulp.task('add-translation-only', (cb): any => {
|
||||
const languages = getLanguages();
|
||||
@ -292,11 +366,16 @@ gulp.task('add-translation-only', (cb): any => {
|
||||
}
|
||||
}
|
||||
if (lng == null) {
|
||||
console.error('Error: set language with \'--\' e.g: npm run add-translation -- --en');
|
||||
console.error(
|
||||
"Error: set language with '--' e.g: npm run add-translation -- --en"
|
||||
);
|
||||
return cb();
|
||||
}
|
||||
if (languages.indexOf(lng) !== -1) {
|
||||
console.error('Error: language already exists, can\'t add. These language(s) already exist(s): ' + languages);
|
||||
console.error(
|
||||
"Error: language already exists, can't add. These language(s) already exist(s): " +
|
||||
languages
|
||||
);
|
||||
return cb();
|
||||
}
|
||||
translate([lng], cb);
|
||||
@ -306,17 +385,23 @@ gulp.task('generate-man', async (cb): Promise<void> => {
|
||||
const defCFG = ConfigClassBuilder.attachInterface(new PrivateConfigClass());
|
||||
defCFG.Server.sessionSecret = [];
|
||||
let txt = '# Pigallery 2 man page\n';
|
||||
txt += 'pigallery2 uses [typeconfig](https://github.com/bpatrik/typeconfig) for configuration\n\n';
|
||||
txt +=
|
||||
'pigallery2 uses [typeconfig](https://github.com/bpatrik/typeconfig) for configuration\n\n';
|
||||
txt += '`npm start -- --help` prints the following:\n\n';
|
||||
txt += '```\n' + ConfigClassBuilder.attachPrivateInterface(defCFG).__printMan() + '```';
|
||||
txt +=
|
||||
'```\n' +
|
||||
ConfigClassBuilder.attachPrivateInterface(defCFG).__printMan() +
|
||||
'```';
|
||||
txt += '\n\n ### `config.json` sample:\n';
|
||||
txt += '```json\n' + JSON.stringify(defCFG, null, 4) + '```';
|
||||
await fsp.writeFile('MANPAGE.md', txt);
|
||||
cb();
|
||||
});
|
||||
|
||||
gulp.task('add-translation', gulp.series('extract-locale', 'add-translation-only'));
|
||||
|
||||
gulp.task(
|
||||
'add-translation',
|
||||
gulp.series('extract-locale', 'add-translation-only')
|
||||
);
|
||||
|
||||
gulp.task('build-dev', simpleBuild(false));
|
||||
gulp.task('build-prod', simpleBuild(true));
|
||||
|
3495
package-lock.json
generated
3495
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
15
package.json
15
package.json
@ -14,14 +14,14 @@
|
||||
"build-backend": "tsc",
|
||||
"pretest": "tsc",
|
||||
"test": "ng test && nyc mocha --recursive test",
|
||||
"lint": "tslint -p tsconfig.json -c tslint.json src/**/**.ts test/**/**.ts",
|
||||
"coverage": "nyc report --reporter=text-lcov | coveralls",
|
||||
"start": "node ./src/backend/index",
|
||||
"run-dev": "ng build --configuration=dev",
|
||||
"build-stats": "ng build --stats-json",
|
||||
"analyze": "webpack-bundle-analyzer dist/en/stats.json",
|
||||
"merge-new-translation": "gulp merge-new-translation",
|
||||
"generate-man": "gulp generate-man"
|
||||
"generate-man": "gulp generate-man",
|
||||
"lint": "ng lint"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -56,6 +56,11 @@
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "13.3.1",
|
||||
"@angular-devkit/build-optimizer": "0.1302.1",
|
||||
"@angular-eslint/builder": "13.1.0",
|
||||
"@angular-eslint/eslint-plugin": "13.1.0",
|
||||
"@angular-eslint/eslint-plugin-template": "13.1.0",
|
||||
"@angular-eslint/schematics": "13.1.0",
|
||||
"@angular-eslint/template-parser": "13.1.0",
|
||||
"@angular/animations": "13.3.1",
|
||||
"@angular/cli": "13.3.1",
|
||||
"@angular/common": "13.3.1",
|
||||
@ -92,7 +97,9 @@
|
||||
"@types/node-geocoder": "3.24.4",
|
||||
"@types/sharp": "0.30.0",
|
||||
"@types/xml2js": "0.4.9",
|
||||
"bootstrap": "4.6.1",
|
||||
"@typescript-eslint/eslint-plugin": "5.11.0",
|
||||
"@typescript-eslint/parser": "5.11.0",
|
||||
"bootstrap": "5.1.3",
|
||||
"chai": "4.3.6",
|
||||
"chai-http": "4.3.0",
|
||||
"codelyzer": "6.0.2",
|
||||
@ -100,6 +107,7 @@
|
||||
"coveralls": "3.1.1",
|
||||
"deep-equal-in-any-order": "1.1.15",
|
||||
"ejs-loader": "0.5.0",
|
||||
"eslint": "7.32.0",
|
||||
"gulp": "4.0.2",
|
||||
"gulp-json-editor": "2.5.6",
|
||||
"gulp-typescript": "5.0.1",
|
||||
@ -127,7 +135,6 @@
|
||||
"rxjs": "7.5.5",
|
||||
"ts-helpers": "1.1.2",
|
||||
"ts-node": "10.7.0",
|
||||
"tslint": "6.1.3",
|
||||
"webpack-bundle-analyzer": "4.5.0",
|
||||
"xlf-google-translate": "1.0.0-beta.22",
|
||||
"xml2js": "0.4.23",
|
||||
|
@ -1,12 +1,14 @@
|
||||
import {Config} from '../common/config/private/Config';
|
||||
import {LogLevel} from '../common/config/private/PrivateConfig';
|
||||
import { Config } from '../common/config/private/Config';
|
||||
import { LogLevel } from '../common/config/private/PrivateConfig';
|
||||
|
||||
export type logFN = (...args: (string | number)[]) => void;
|
||||
|
||||
const forcedDebug = process.env.NODE_ENV === 'debug';
|
||||
const forcedDebug = process.env['NODE_ENV'] === 'debug';
|
||||
|
||||
if (forcedDebug === true) {
|
||||
console.log('NODE_ENV environmental variable is set to debug, forcing all logs to print');
|
||||
console.log(
|
||||
'NODE_ENV environmental variable is set to debug, forcing all logs to print'
|
||||
);
|
||||
}
|
||||
|
||||
export class Logger {
|
||||
@ -22,10 +24,8 @@ export class Logger {
|
||||
return;
|
||||
}
|
||||
Logger.log(`[\x1b[34mDEBUG\x1b[0m]`, ...args);
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static verbose(...args: (string | number)[]): void {
|
||||
if (!forcedDebug && Config.Server.Log.level < LogLevel.verbose) {
|
||||
return;
|
||||
@ -38,7 +38,6 @@ export class Logger {
|
||||
return;
|
||||
}
|
||||
Logger.log(`[\x1b[32mINFO_\x1b[0m]`, ...args);
|
||||
|
||||
}
|
||||
|
||||
public static warn(...args: (string | number)[]): void {
|
||||
@ -49,14 +48,18 @@ export class Logger {
|
||||
}
|
||||
|
||||
public static error(...args: (string | number)[]): void {
|
||||
|
||||
Logger.log(`[\x1b[31mERROR\x1b[0m]`, ...args);
|
||||
}
|
||||
|
||||
private static log(tag: string, ...args: (string | number)[]): void {
|
||||
const date = (new Date()).toLocaleString();
|
||||
const date = new Date().toLocaleString();
|
||||
let LOG_TAG = '';
|
||||
if (args.length > 0 && typeof args[0] === 'string' && args[0].startsWith('[') && args[0].endsWith(']')) {
|
||||
if (
|
||||
args.length > 0 &&
|
||||
typeof args[0] === 'string' &&
|
||||
args[0].startsWith('[') &&
|
||||
args[0].endsWith(']')
|
||||
) {
|
||||
LOG_TAG = args[0];
|
||||
args.shift();
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import {Config} from '../common/config/private/Config';
|
||||
import { Config } from '../common/config/private/Config';
|
||||
|
||||
class ProjectPathClass {
|
||||
public Root: string;
|
||||
@ -40,7 +40,6 @@ class ProjectPathClass {
|
||||
if (!fs.existsSync(this.TempFolder)) {
|
||||
fs.mkdirSync(this.TempFolder);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
export class LocationLookupException extends Error {
|
||||
|
||||
constructor(message: string, public location: string) {
|
||||
super(message);
|
||||
}
|
||||
|
@ -1,9 +1,8 @@
|
||||
import * as cluster from 'cluster';
|
||||
import {Server} from './server';
|
||||
import {Worker} from './model/threading/Worker';
|
||||
import { Server } from './server';
|
||||
import { Worker } from './model/threading/Worker';
|
||||
|
||||
if ((cluster as any).isMaster) {
|
||||
// tslint:disable-next-line:no-unused-expression
|
||||
new Server();
|
||||
} else {
|
||||
Worker.process();
|
||||
|
@ -1,63 +1,91 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {ErrorCodes, ErrorDTO} from '../../common/entities/Error';
|
||||
import {ObjectManagers} from '../model/ObjectManagers';
|
||||
import {Utils} from '../../common/Utils';
|
||||
import {Config} from '../../common/config/private/Config';
|
||||
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ErrorCodes, ErrorDTO } from '../../common/entities/Error';
|
||||
import { ObjectManagers } from '../model/ObjectManagers';
|
||||
import { Utils } from '../../common/Utils';
|
||||
import { Config } from '../../common/config/private/Config';
|
||||
|
||||
export class AlbumMWs {
|
||||
|
||||
|
||||
public static async listAlbums(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
public static async listAlbums(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
if (Config.Client.Album.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
try {
|
||||
req.resultPipe = await ObjectManagers.getInstance()
|
||||
.AlbumManager.getAlbums();
|
||||
req.resultPipe =
|
||||
await ObjectManagers.getInstance().AlbumManager.getAlbums();
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.ALBUM_ERROR, 'Error during listing albums', err));
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.ALBUM_ERROR, 'Error during listing albums', err)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static async deleteAlbum(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
public static async deleteAlbum(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
if (Config.Client.Album.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
if (!req.params.id || !Utils.isUInt32(parseInt(req.params.id, 10))) {
|
||||
if (!req.params['id'] || !Utils.isUInt32(parseInt(req.params['id'], 10))) {
|
||||
return next();
|
||||
}
|
||||
try {
|
||||
await ObjectManagers.getInstance().AlbumManager.deleteAlbum(parseInt(req.params.id, 10));
|
||||
await ObjectManagers.getInstance().AlbumManager.deleteAlbum(
|
||||
parseInt(req.params['id'], 10)
|
||||
);
|
||||
req.resultPipe = 'ok';
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.ALBUM_ERROR, 'Error during deleting albums', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.ALBUM_ERROR,
|
||||
'Error during deleting albums',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static async createSavedSearch(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
public static async createSavedSearch(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
if (Config.Client.Album.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
if ((typeof req.body === 'undefined') || (typeof req.body.name !== 'string') || (typeof req.body.searchQuery !== 'object')) {
|
||||
return next(new ErrorDTO(ErrorCodes.INPUT_ERROR, 'updateSharing filed is missing'));
|
||||
if (
|
||||
typeof req.body === 'undefined' ||
|
||||
typeof req.body.name !== 'string' ||
|
||||
typeof req.body.searchQuery !== 'object'
|
||||
) {
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.INPUT_ERROR, 'updateSharing filed is missing')
|
||||
);
|
||||
}
|
||||
try {
|
||||
await ObjectManagers.getInstance().AlbumManager.addSavedSearch(req.body.name, req.body.searchQuery);
|
||||
await ObjectManagers.getInstance().AlbumManager.addSavedSearch(
|
||||
req.body.name,
|
||||
req.body.searchQuery
|
||||
);
|
||||
req.resultPipe = 'ok';
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.ALBUM_ERROR, 'Error during creating saved search albums', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.ALBUM_ERROR,
|
||||
'Error during creating saved search albums',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,32 +1,43 @@
|
||||
import * as path from 'path';
|
||||
import {promises as fsp} from 'fs';
|
||||
import { promises as fsp } from 'fs';
|
||||
import * as archiver from 'archiver';
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {ErrorCodes, ErrorDTO} from '../../common/entities/Error';
|
||||
import {DirectoryDTOUtils, ParentDirectoryDTO} from '../../common/entities/DirectoryDTO';
|
||||
import {ObjectManagers} from '../model/ObjectManagers';
|
||||
import {ContentWrapper} from '../../common/entities/ConentWrapper';
|
||||
import {PhotoDTO} from '../../common/entities/PhotoDTO';
|
||||
import {ProjectPath} from '../ProjectPath';
|
||||
import {Config} from '../../common/config/private/Config';
|
||||
import {UserDTOUtils} from '../../common/entities/UserDTO';
|
||||
import {MediaDTO, MediaDTOUtils} from '../../common/entities/MediaDTO';
|
||||
import {VideoDTO} from '../../common/entities/VideoDTO';
|
||||
import {Utils} from '../../common/Utils';
|
||||
import {QueryParams} from '../../common/QueryParams';
|
||||
import {VideoProcessing} from '../model/fileprocessing/VideoProcessing';
|
||||
import {SearchQueryDTO, SearchQueryTypes} from '../../common/entities/SearchQueryDTO';
|
||||
import {LocationLookupException} from '../exceptions/LocationLookupException';
|
||||
import {SupportedFormats} from '../../common/SupportedFormats';
|
||||
import {ServerTime} from './ServerTimingMWs';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ErrorCodes, ErrorDTO } from '../../common/entities/Error';
|
||||
import {
|
||||
DirectoryDTOUtils,
|
||||
ParentDirectoryDTO,
|
||||
} from '../../common/entities/DirectoryDTO';
|
||||
import { ObjectManagers } from '../model/ObjectManagers';
|
||||
import { ContentWrapper } from '../../common/entities/ConentWrapper';
|
||||
import { PhotoDTO } from '../../common/entities/PhotoDTO';
|
||||
import { ProjectPath } from '../ProjectPath';
|
||||
import { Config } from '../../common/config/private/Config';
|
||||
import { UserDTOUtils } from '../../common/entities/UserDTO';
|
||||
import { MediaDTO, MediaDTOUtils } from '../../common/entities/MediaDTO';
|
||||
import { VideoDTO } from '../../common/entities/VideoDTO';
|
||||
import { Utils } from '../../common/Utils';
|
||||
import { QueryParams } from '../../common/QueryParams';
|
||||
import { VideoProcessing } from '../model/fileprocessing/VideoProcessing';
|
||||
import {
|
||||
SearchQueryDTO,
|
||||
SearchQueryTypes,
|
||||
} from '../../common/entities/SearchQueryDTO';
|
||||
import { LocationLookupException } from '../exceptions/LocationLookupException';
|
||||
import { SupportedFormats } from '../../common/SupportedFormats';
|
||||
import { ServerTime } from './ServerTimingMWs';
|
||||
|
||||
export class GalleryMWs {
|
||||
|
||||
|
||||
@ServerTime('1.db', 'List Directory')
|
||||
public static async listDirectory(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
const directoryName = req.params.directory || '/';
|
||||
const absoluteDirectoryName = path.join(ProjectPath.ImageFolder, directoryName);
|
||||
public static async listDirectory(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
const directoryName = req.params['directory'] || '/';
|
||||
const absoluteDirectoryName = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
directoryName
|
||||
);
|
||||
try {
|
||||
if ((await fsp.stat(absoluteDirectoryName)).isDirectory() === false) {
|
||||
return next();
|
||||
@ -36,36 +47,59 @@ export class GalleryMWs {
|
||||
}
|
||||
|
||||
try {
|
||||
const directory = await ObjectManagers.getInstance()
|
||||
.GalleryManager.listDirectory(directoryName,
|
||||
parseInt(req.query[QueryParams.gallery.knownLastModified] as string, 10),
|
||||
parseInt(req.query[QueryParams.gallery.knownLastScanned] as string, 10));
|
||||
const directory =
|
||||
await ObjectManagers.getInstance().GalleryManager.listDirectory(
|
||||
directoryName,
|
||||
parseInt(
|
||||
req.query[QueryParams.gallery.knownLastModified] as string,
|
||||
10
|
||||
),
|
||||
parseInt(
|
||||
req.query[QueryParams.gallery.knownLastScanned] as string,
|
||||
10
|
||||
)
|
||||
);
|
||||
|
||||
if (directory == null) {
|
||||
req.resultPipe = new ContentWrapper(null, null, true);
|
||||
return next();
|
||||
}
|
||||
if (req.session.user.permissions &&
|
||||
req.session.user.permissions.length > 0 &&
|
||||
req.session.user.permissions[0] !== '/*') {
|
||||
if (
|
||||
req.session['user'].permissions &&
|
||||
req.session['user'].permissions.length > 0 &&
|
||||
req.session['user'].permissions[0] !== '/*'
|
||||
) {
|
||||
directory.directories = directory.directories.filter((d): boolean =>
|
||||
UserDTOUtils.isDirectoryAvailable(d, req.session.user.permissions));
|
||||
UserDTOUtils.isDirectoryAvailable(d, req.session['user'].permissions)
|
||||
);
|
||||
}
|
||||
req.resultPipe = new ContentWrapper(directory, null);
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during listing the directory', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error during listing the directory',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ServerTime('1.zip', 'Zip Directory')
|
||||
public static async zipDirectory(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async zipDirectory(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.Other.enableDownloadZip === false) {
|
||||
return next();
|
||||
}
|
||||
const directoryName = req.params.directory || '/';
|
||||
const absoluteDirectoryName = path.join(ProjectPath.ImageFolder, directoryName);
|
||||
const directoryName = req.params['directory'] || '/';
|
||||
const absoluteDirectoryName = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
directoryName
|
||||
);
|
||||
try {
|
||||
if ((await fsp.stat(absoluteDirectoryName)).isDirectory() === false) {
|
||||
return next();
|
||||
@ -95,24 +129,29 @@ export class GalleryMWs {
|
||||
// append photos in absoluteDirectoryName
|
||||
// using case-insensitive glob of extensions
|
||||
for (const ext of SupportedFormats.WithDots.Photos) {
|
||||
archive.glob(`*${ext}`, {cwd: absoluteDirectoryName, nocase: true});
|
||||
archive.glob(`*${ext}`, { cwd: absoluteDirectoryName, nocase: true });
|
||||
}
|
||||
// append videos in absoluteDirectoryName
|
||||
// using case-insensitive glob of extensions
|
||||
for (const ext of SupportedFormats.WithDots.Videos) {
|
||||
archive.glob(`*${ext}`, {cwd: absoluteDirectoryName, nocase: true});
|
||||
archive.glob(`*${ext}`, { cwd: absoluteDirectoryName, nocase: true });
|
||||
}
|
||||
|
||||
await archive.finalize();
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error creating zip', err));
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error creating zip', err)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ServerTime('3.cleanUp', 'Clean up')
|
||||
public static cleanUpGalleryResults(req: Request, res: Response, next: NextFunction): any {
|
||||
public static cleanUpGalleryResults(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): any {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
@ -148,32 +187,43 @@ export class GalleryMWs {
|
||||
cleanUpMedia(cw.directory.media);
|
||||
}
|
||||
if (cw.searchResult) {
|
||||
cw.searchResult.directories.forEach(d => DirectoryDTOUtils.packDirectory(d));
|
||||
cw.searchResult.directories.forEach((d) =>
|
||||
DirectoryDTOUtils.packDirectory(d)
|
||||
);
|
||||
cleanUpMedia(cw.searchResult.media);
|
||||
}
|
||||
|
||||
|
||||
if (Config.Client.Media.Video.enabled === false) {
|
||||
if (cw.directory) {
|
||||
const removeVideos = (dir: ParentDirectoryDTO): void => {
|
||||
dir.media = dir.media.filter((m): boolean => !MediaDTOUtils.isVideo(m));
|
||||
dir.media = dir.media.filter(
|
||||
(m): boolean => !MediaDTOUtils.isVideo(m)
|
||||
);
|
||||
};
|
||||
removeVideos(cw.directory);
|
||||
}
|
||||
if (cw.searchResult) {
|
||||
cw.searchResult.media = cw.searchResult.media.filter((m): boolean => !MediaDTOUtils.isVideo(m));
|
||||
cw.searchResult.media = cw.searchResult.media.filter(
|
||||
(m): boolean => !MediaDTOUtils.isVideo(m)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
|
||||
public static async loadFile(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
if (!(req.params.mediaPath)) {
|
||||
public static async loadFile(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (!req.params['mediaPath']) {
|
||||
return next();
|
||||
}
|
||||
const fullMediaPath = path.join(ProjectPath.ImageFolder, req.params.mediaPath);
|
||||
const fullMediaPath = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
req.params['mediaPath']
|
||||
);
|
||||
|
||||
// check if file exist
|
||||
try {
|
||||
@ -181,63 +231,95 @@ export class GalleryMWs {
|
||||
return next();
|
||||
}
|
||||
} catch (e) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'no such file:' + req.params.mediaPath, 'can\'t find file: ' + fullMediaPath));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'no such file:' + req.params['mediaPath'],
|
||||
"can't find file: " + fullMediaPath
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
req.resultPipe = fullMediaPath;
|
||||
return next();
|
||||
}
|
||||
|
||||
public static async loadBestFitVideo(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
if (!(req.resultPipe)) {
|
||||
public static async loadBestFitVideo(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
const fullMediaPath: string = req.resultPipe;
|
||||
|
||||
const convertedVideo = VideoProcessing.generateConvertedFilePath(fullMediaPath);
|
||||
const convertedVideo =
|
||||
VideoProcessing.generateConvertedFilePath(fullMediaPath);
|
||||
|
||||
// check if transcoded video exist
|
||||
try {
|
||||
await fsp.access(convertedVideo);
|
||||
req.resultPipe = convertedVideo;
|
||||
} catch (e) {
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch (e) {}
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
|
||||
@ServerTime('1.db', 'Search')
|
||||
public static async search(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
if (Config.Client.Search.enabled === false || !(req.params.searchQueryDTO)) {
|
||||
public static async search(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (
|
||||
Config.Client.Search.enabled === false ||
|
||||
!req.params['searchQueryDTO']
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const query: SearchQueryDTO = JSON.parse(req.params.searchQueryDTO as any);
|
||||
const query: SearchQueryDTO = JSON.parse(
|
||||
req.params['searchQueryDTO'] as string
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await ObjectManagers.getInstance().SearchManager.search(query);
|
||||
const result = await ObjectManagers.getInstance().SearchManager.search(
|
||||
query
|
||||
);
|
||||
|
||||
result.directories.forEach((dir): MediaDTO[] => dir.media = dir.media || []);
|
||||
result.directories.forEach(
|
||||
(dir): MediaDTO[] => (dir.media = dir.media || [])
|
||||
);
|
||||
req.resultPipe = new ContentWrapper(null, result);
|
||||
return next();
|
||||
} catch (err) {
|
||||
if (err instanceof LocationLookupException) {
|
||||
return next(new ErrorDTO(ErrorCodes.LocationLookUp_ERROR, 'Cannot find location: ' + err.location, err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.LocationLookUp_ERROR,
|
||||
'Cannot find location: ' + err.location,
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during searching', err));
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during searching', err)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ServerTime('1.db', 'Autocomplete')
|
||||
public static async autocomplete(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async autocomplete(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.Search.AutoComplete.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
if (!(req.params.text)) {
|
||||
if (!req.params['text']) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@ -246,35 +328,55 @@ export class GalleryMWs {
|
||||
type = parseInt(req.query[QueryParams.gallery.search.type] as string, 10);
|
||||
}
|
||||
try {
|
||||
req.resultPipe = await ObjectManagers.getInstance().SearchManager.autocomplete(req.params.text, type);
|
||||
req.resultPipe =
|
||||
await ObjectManagers.getInstance().SearchManager.autocomplete(
|
||||
req.params['text'],
|
||||
type
|
||||
);
|
||||
return next();
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during searching', err));
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during searching', err)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static async getRandomImage(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
if (Config.Client.RandomPhoto.enabled === false || !(req.params.searchQueryDTO)) {
|
||||
public static async getRandomImage(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (
|
||||
Config.Client.RandomPhoto.enabled === false ||
|
||||
!req.params['searchQueryDTO']
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
const query: SearchQueryDTO = JSON.parse(req.params.searchQueryDTO as any);
|
||||
const query: SearchQueryDTO = JSON.parse(
|
||||
req.params['searchQueryDTO'] as string
|
||||
);
|
||||
|
||||
const photo = await ObjectManagers.getInstance()
|
||||
.SearchManager.getRandomPhoto(query);
|
||||
const photo =
|
||||
await ObjectManagers.getInstance().SearchManager.getRandomPhoto(query);
|
||||
if (!photo) {
|
||||
return next(new ErrorDTO(ErrorCodes.INPUT_ERROR, 'No photo found'));
|
||||
}
|
||||
|
||||
req.params.mediaPath = path.join(photo.directory.path, photo.directory.name, photo.name);
|
||||
req.params['mediaPath'] = path.join(
|
||||
photo.directory.path,
|
||||
photo.directory.name,
|
||||
photo.name
|
||||
);
|
||||
return next();
|
||||
} catch (e) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Can\'t get random photo: ' + e.toString()));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
"Can't get random photo: " + e.toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,17 +1,12 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {UserRoles} from '../../common/entities/UserDTO';
|
||||
import {NotificationManager} from '../model/NotifocationManager';
|
||||
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { UserRoles } from '../../common/entities/UserDTO';
|
||||
import { NotificationManager } from '../model/NotifocationManager';
|
||||
|
||||
export class NotificationMWs {
|
||||
|
||||
|
||||
public static list(req: Request, res: Response, next: NextFunction): any {
|
||||
|
||||
if (req.session.user.role >= UserRoles.Admin) {
|
||||
if (req.session['user'].role >= UserRoles.Admin) {
|
||||
req.resultPipe = NotificationManager.notifications;
|
||||
} else if (NotificationManager.notifications.length > 0) {
|
||||
|
||||
req.resultPipe = NotificationManager.HasNotification;
|
||||
} else {
|
||||
req.resultPipe = [];
|
||||
|
@ -1,59 +1,91 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {ErrorCodes, ErrorDTO} from '../../common/entities/Error';
|
||||
import {ObjectManagers} from '../model/ObjectManagers';
|
||||
import {PersonDTO, PersonWithSampleRegion} from '../../common/entities/PersonDTO';
|
||||
import {Utils} from '../../common/Utils';
|
||||
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ErrorCodes, ErrorDTO } from '../../common/entities/Error';
|
||||
import { ObjectManagers } from '../model/ObjectManagers';
|
||||
import {
|
||||
PersonDTO,
|
||||
PersonWithSampleRegion,
|
||||
} from '../../common/entities/PersonDTO';
|
||||
import { Utils } from '../../common/Utils';
|
||||
|
||||
export class PersonMWs {
|
||||
|
||||
|
||||
public static async updatePerson(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
if (!req.params.name) {
|
||||
public static async updatePerson(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (!req.params['name']) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
req.resultPipe = await ObjectManagers.getInstance()
|
||||
.PersonManager.updatePerson(req.params.name as string,
|
||||
req.body as PersonDTO);
|
||||
req.resultPipe =
|
||||
await ObjectManagers.getInstance().PersonManager.updatePerson(
|
||||
req.params['name'] as string,
|
||||
req.body as PersonDTO
|
||||
);
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.PERSON_ERROR, 'Error during updating a person', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.PERSON_ERROR,
|
||||
'Error during updating a person',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static async getPerson(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
if (!req.params.name) {
|
||||
public static async getPerson(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (!req.params['name']) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
req.resultPipe = await ObjectManagers.getInstance()
|
||||
.PersonManager.get(req.params.name as string);
|
||||
req.resultPipe = await ObjectManagers.getInstance().PersonManager.get(
|
||||
req.params['name'] as string
|
||||
);
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.PERSON_ERROR, 'Error during updating a person', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.PERSON_ERROR,
|
||||
'Error during updating a person',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static async listPersons(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async listPersons(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
try {
|
||||
req.resultPipe = await ObjectManagers.getInstance()
|
||||
.PersonManager.getAll();
|
||||
req.resultPipe =
|
||||
await ObjectManagers.getInstance().PersonManager.getAll();
|
||||
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.PERSON_ERROR, 'Error during listing persons', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.PERSON_ERROR,
|
||||
'Error during listing persons',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static async cleanUpPersonResults(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async cleanUpPersonResults(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
@ -64,13 +96,16 @@ export class PersonMWs {
|
||||
}
|
||||
req.resultPipe = persons;
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.PERSON_ERROR, 'Error during removing sample photo from all persons', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.PERSON_ERROR,
|
||||
'Error during removing sample photo from all persons',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,17 +1,20 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {ErrorCodes, ErrorDTO} from '../../common/entities/Error';
|
||||
import {Message} from '../../common/entities/Message';
|
||||
import {Config, PrivateConfigClass} from '../../common/config/private/Config';
|
||||
import {UserDTO, UserRoles} from '../../common/entities/UserDTO';
|
||||
import {NotificationManager} from '../model/NotifocationManager';
|
||||
import {Logger} from '../Logger';
|
||||
import {SharingDTO} from '../../common/entities/SharingDTO';
|
||||
import {Utils} from '../../common/Utils';
|
||||
import {LoggerRouter} from '../routes/LoggerRouter';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ErrorCodes, ErrorDTO } from '../../common/entities/Error';
|
||||
import { Message } from '../../common/entities/Message';
|
||||
import { Config, PrivateConfigClass } from '../../common/config/private/Config';
|
||||
import { UserDTO, UserRoles } from '../../common/entities/UserDTO';
|
||||
import { NotificationManager } from '../model/NotifocationManager';
|
||||
import { Logger } from '../Logger';
|
||||
import { SharingDTO } from '../../common/entities/SharingDTO';
|
||||
import { Utils } from '../../common/Utils';
|
||||
import { LoggerRouter } from '../routes/LoggerRouter';
|
||||
|
||||
export class RenderingMWs {
|
||||
|
||||
public static renderResult(req: Request, res: Response, next: NextFunction): any {
|
||||
public static renderResult(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): any {
|
||||
if (typeof req.resultPipe === 'undefined') {
|
||||
return next();
|
||||
}
|
||||
@ -19,19 +22,22 @@ export class RenderingMWs {
|
||||
return RenderingMWs.renderMessage(res, req.resultPipe);
|
||||
}
|
||||
|
||||
|
||||
public static renderSessionUser(req: Request, res: Response, next: NextFunction): any {
|
||||
if (!(req.session.user)) {
|
||||
public static renderSessionUser(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): any {
|
||||
if (!req.session['user']) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'User not exists'));
|
||||
}
|
||||
|
||||
const user = {
|
||||
id: req.session.user.id,
|
||||
name: req.session.user.name,
|
||||
csrfToken: req.session.user.csrfToken || req.csrfToken(),
|
||||
role: req.session.user.role,
|
||||
usedSharingKey: req.session.user.usedSharingKey,
|
||||
permissions: req.session.user.permissions
|
||||
id: req.session['user'].id,
|
||||
name: req.session['user'].name,
|
||||
csrfToken: req.session['user'].csrfToken || req.csrfToken(),
|
||||
role: req.session['user'].role,
|
||||
usedSharingKey: req.session['user'].usedSharingKey,
|
||||
permissions: req.session['user'].permissions,
|
||||
} as UserDTO;
|
||||
|
||||
if (!user.csrfToken && req.csrfToken) {
|
||||
@ -41,17 +47,24 @@ export class RenderingMWs {
|
||||
RenderingMWs.renderMessage(res, user);
|
||||
}
|
||||
|
||||
public static renderSharing(req: Request, res: Response, next: NextFunction): any {
|
||||
public static renderSharing(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): any {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const {password, creator, ...sharing} = req.resultPipe;
|
||||
const { password, creator, ...sharing } = req.resultPipe;
|
||||
RenderingMWs.renderMessage(res, sharing);
|
||||
}
|
||||
|
||||
|
||||
public static renderSharingList(req: Request, res: Response, next: NextFunction): any {
|
||||
public static renderSharingList(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): any {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
@ -64,44 +77,70 @@ export class RenderingMWs {
|
||||
return RenderingMWs.renderMessage(res, shares);
|
||||
}
|
||||
|
||||
public static renderFile(req: Request, res: Response, next: NextFunction): any {
|
||||
public static renderFile(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): any {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
return res.sendFile(req.resultPipe, {maxAge: 31536000, dotfiles: 'allow'});
|
||||
return res.sendFile(req.resultPipe, {
|
||||
maxAge: 31536000,
|
||||
dotfiles: 'allow',
|
||||
});
|
||||
}
|
||||
|
||||
public static renderOK(req: Request, res: Response, next: NextFunction): void {
|
||||
public static renderOK(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void {
|
||||
const message = new Message<string>(null, 'ok');
|
||||
res.json(message);
|
||||
}
|
||||
|
||||
|
||||
public static async renderConfig(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
public static async renderConfig(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
const originalConf = await Config.original();
|
||||
// These are sensitive information, do not send to the client side
|
||||
originalConf.Server.sessionSecret = null;
|
||||
originalConf.Server.Database.enforcedUsers = null;
|
||||
const message = new Message<PrivateConfigClass>(null, originalConf.toJSON({
|
||||
attachState: true,
|
||||
attachVolatile: true
|
||||
}) as any);
|
||||
const message = new Message<PrivateConfigClass>(
|
||||
null,
|
||||
originalConf.toJSON({
|
||||
attachState: true,
|
||||
attachVolatile: true,
|
||||
}) as any
|
||||
);
|
||||
res.json(message);
|
||||
}
|
||||
|
||||
|
||||
public static renderError(err: any, req: Request, res: Response, next: NextFunction): any {
|
||||
|
||||
public static renderError(
|
||||
err: any,
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): any {
|
||||
if (err instanceof ErrorDTO) {
|
||||
if (err.details) {
|
||||
Logger.warn('Handled error:');
|
||||
LoggerRouter.log(Logger.warn, req, res);
|
||||
console.log(err);
|
||||
delete (err.details); // do not send back error object to the client side
|
||||
delete err.details; // do not send back error object to the client side
|
||||
|
||||
// hide error details for non developers
|
||||
if (!(req.session && req.session.user && req.session.user.role >= UserRoles.Developer)) {
|
||||
delete (err.detailsStr);
|
||||
if (
|
||||
!(
|
||||
req.session &&
|
||||
req.session['user'] &&
|
||||
req.session['user'].role >= UserRoles.Developer
|
||||
)
|
||||
) {
|
||||
delete err.detailsStr;
|
||||
}
|
||||
}
|
||||
const message = new Message<any>(err, null);
|
||||
@ -111,11 +150,8 @@ export class RenderingMWs {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
|
||||
protected static renderMessage<T>(res: Response, content: T): void {
|
||||
const message = new Message<T>(null, content);
|
||||
res.json(message);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,13 +1,11 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {Config} from '../../common/config/private/Config';
|
||||
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { Config } from '../../common/config/private/Config';
|
||||
|
||||
export class ServerTimeEntry {
|
||||
public name: string;
|
||||
startHR: any;
|
||||
public endTime: number = null;
|
||||
|
||||
|
||||
constructor(name: string) {
|
||||
this.name = name;
|
||||
}
|
||||
@ -18,13 +16,16 @@ export class ServerTimeEntry {
|
||||
|
||||
public end(): void {
|
||||
const duration = process.hrtime(this.startHR);
|
||||
this.endTime = (duration[0] * 1E3) + (duration[1] * 1e-6);
|
||||
this.endTime = duration[0] * 1e3 + duration[1] * 1e-6;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const ServerTime = (id: string, name: string) => {
|
||||
return (target: any, propertyName: string, descriptor: TypedPropertyDescriptor<any>): any => {
|
||||
return (
|
||||
target: any,
|
||||
propertyName: string,
|
||||
descriptor: TypedPropertyDescriptor<any>
|
||||
): any => {
|
||||
if (Config.Server.Log.logServerTiming === false) {
|
||||
return;
|
||||
}
|
||||
@ -38,27 +39,34 @@ export const ServerTime = (id: string, name: string) => {
|
||||
next(err);
|
||||
});
|
||||
};
|
||||
descriptor.value = new Function('action', 'return function ' + m.name + '(...args){ action(...args) };')(customAction);
|
||||
|
||||
descriptor.value = new Function(
|
||||
'action',
|
||||
'return function ' + m.name + '(...args){ action(...args) };'
|
||||
)(customAction);
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
const forcedDebug = process.env.NODE_ENV === 'debug';
|
||||
const forcedDebug = process.env['NODE_ENV'] === 'debug';
|
||||
|
||||
export class ServerTimingMWs {
|
||||
|
||||
|
||||
/**
|
||||
* Add server timing
|
||||
*/
|
||||
public static async addServerTiming(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
if ((Config.Server.Log.logServerTiming === false && !forcedDebug) || !req.timing) {
|
||||
public static async addServerTiming(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (
|
||||
(Config.Server.Log.logServerTiming === false && !forcedDebug) ||
|
||||
!req.timing
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
const l = Object.entries(req.timing).filter(e => e[1].endTime).map(e => `${e[0]};dur=${e[1].endTime};desc="${e[1].name}"`);
|
||||
const l = Object.entries(req.timing)
|
||||
.filter((e) => e[1].endTime)
|
||||
.map((e) => `${e[0]};dur=${e[1].endTime};desc="${e[1].name}"`);
|
||||
res.header('Server-Timing', l.join(', '));
|
||||
next();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,139 +1,215 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {CreateSharingDTO, SharingDTO} from '../../common/entities/SharingDTO';
|
||||
import {ObjectManagers} from '../model/ObjectManagers';
|
||||
import {ErrorCodes, ErrorDTO} from '../../common/entities/Error';
|
||||
import {Config} from '../../common/config/private/Config';
|
||||
import {QueryParams} from '../../common/QueryParams';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { CreateSharingDTO, SharingDTO } from '../../common/entities/SharingDTO';
|
||||
import { ObjectManagers } from '../model/ObjectManagers';
|
||||
import { ErrorCodes, ErrorDTO } from '../../common/entities/Error';
|
||||
import { Config } from '../../common/config/private/Config';
|
||||
import { QueryParams } from '../../common/QueryParams';
|
||||
import * as path from 'path';
|
||||
import {UserRoles} from '../../common/entities/UserDTO';
|
||||
|
||||
import { UserRoles } from '../../common/entities/UserDTO';
|
||||
|
||||
export class SharingMWs {
|
||||
|
||||
|
||||
public static async getSharing(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async getSharing(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.Sharing.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
const sharingKey = req.params[QueryParams.gallery.sharingKey_params];
|
||||
|
||||
try {
|
||||
req.resultPipe = await ObjectManagers.getInstance().SharingManager.findOne({sharingKey});
|
||||
req.resultPipe =
|
||||
await ObjectManagers.getInstance().SharingManager.findOne({
|
||||
sharingKey,
|
||||
});
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during retrieving sharing link', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error during retrieving sharing link',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static async createSharing(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async createSharing(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.Sharing.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
if ((typeof req.body === 'undefined') || (typeof req.body.createSharing === 'undefined')) {
|
||||
return next(new ErrorDTO(ErrorCodes.INPUT_ERROR, 'createSharing filed is missing'));
|
||||
if (
|
||||
typeof req.body === 'undefined' ||
|
||||
typeof req.body.createSharing === 'undefined'
|
||||
) {
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.INPUT_ERROR, 'createSharing filed is missing')
|
||||
);
|
||||
}
|
||||
const createSharing: CreateSharingDTO = req.body.createSharing;
|
||||
let sharingKey = SharingMWs.generateKey();
|
||||
|
||||
// create one not yet used
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
try {
|
||||
await ObjectManagers.getInstance().SharingManager.findOne({sharingKey});
|
||||
await ObjectManagers.getInstance().SharingManager.findOne({
|
||||
sharingKey,
|
||||
});
|
||||
sharingKey = this.generateKey();
|
||||
} catch (err) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const directoryName = path.normalize(req.params.directory || '/');
|
||||
const directoryName = path.normalize(req.params['directory'] || '/');
|
||||
const sharing: SharingDTO = {
|
||||
id: null,
|
||||
sharingKey,
|
||||
path: directoryName,
|
||||
password: createSharing.password,
|
||||
creator: req.session.user,
|
||||
expires: createSharing.valid >= 0 ? // if === -1 its forever
|
||||
Date.now() + createSharing.valid :
|
||||
(new Date(9999, 0, 1)).getTime(), // never expire
|
||||
creator: req.session['user'],
|
||||
expires:
|
||||
createSharing.valid >= 0 // if === -1 its forever
|
||||
? Date.now() + createSharing.valid
|
||||
: new Date(9999, 0, 1).getTime(), // never expire
|
||||
includeSubfolders: createSharing.includeSubfolders,
|
||||
timeStamp: Date.now()
|
||||
timeStamp: Date.now(),
|
||||
};
|
||||
|
||||
try {
|
||||
|
||||
req.resultPipe = await ObjectManagers.getInstance().SharingManager.createSharing(sharing);
|
||||
req.resultPipe =
|
||||
await ObjectManagers.getInstance().SharingManager.createSharing(
|
||||
sharing
|
||||
);
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
console.warn(err);
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during creating sharing link', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error during creating sharing link',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static async updateSharing(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async updateSharing(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.Sharing.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
if ((typeof req.body === 'undefined') || (typeof req.body.updateSharing === 'undefined')) {
|
||||
return next(new ErrorDTO(ErrorCodes.INPUT_ERROR, 'updateSharing filed is missing'));
|
||||
if (
|
||||
typeof req.body === 'undefined' ||
|
||||
typeof req.body.updateSharing === 'undefined'
|
||||
) {
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.INPUT_ERROR, 'updateSharing filed is missing')
|
||||
);
|
||||
}
|
||||
const updateSharing: CreateSharingDTO = req.body.updateSharing;
|
||||
const directoryName = path.normalize(req.params.directory || '/');
|
||||
const directoryName = path.normalize(req.params['directory'] || '/');
|
||||
const sharing: SharingDTO = {
|
||||
id: updateSharing.id,
|
||||
path: directoryName,
|
||||
sharingKey: '',
|
||||
password: (updateSharing.password && updateSharing.password !== '') ? updateSharing.password : null,
|
||||
creator: req.session.user,
|
||||
expires: updateSharing.valid >= 0 // if === -1 its forever
|
||||
? Date.now() + updateSharing.valid :
|
||||
(new Date(9999, 0, 1)).getTime(), // never expire
|
||||
password:
|
||||
updateSharing.password && updateSharing.password !== ''
|
||||
? updateSharing.password
|
||||
: null,
|
||||
creator: req.session['user'],
|
||||
expires:
|
||||
updateSharing.valid >= 0 // if === -1 its forever
|
||||
? Date.now() + updateSharing.valid
|
||||
: new Date(9999, 0, 1).getTime(), // never expire
|
||||
includeSubfolders: updateSharing.includeSubfolders,
|
||||
timeStamp: Date.now()
|
||||
timeStamp: Date.now(),
|
||||
};
|
||||
|
||||
try {
|
||||
const forceUpdate = req.session.user.role >= UserRoles.Admin;
|
||||
req.resultPipe = await ObjectManagers.getInstance().SharingManager.updateSharing(sharing, forceUpdate);
|
||||
const forceUpdate = req.session['user'].role >= UserRoles.Admin;
|
||||
req.resultPipe =
|
||||
await ObjectManagers.getInstance().SharingManager.updateSharing(
|
||||
sharing,
|
||||
forceUpdate
|
||||
);
|
||||
return next();
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during updating sharing link', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error during updating sharing link',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static async deleteSharing(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async deleteSharing(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.Sharing.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
if ((typeof req.params === 'undefined') || (typeof req.params.sharingKey === 'undefined')) {
|
||||
return next(new ErrorDTO(ErrorCodes.INPUT_ERROR, 'sharingKey is missing'));
|
||||
if (
|
||||
typeof req.params === 'undefined' ||
|
||||
typeof req.params['sharingKey'] === 'undefined'
|
||||
) {
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.INPUT_ERROR, 'sharingKey is missing')
|
||||
);
|
||||
}
|
||||
const sharingKey: string = req.params.sharingKey;
|
||||
const sharingKey: string = req.params['sharingKey'];
|
||||
|
||||
try {
|
||||
req.resultPipe = await ObjectManagers.getInstance().SharingManager.deleteSharing(sharingKey);
|
||||
req.resultPipe =
|
||||
await ObjectManagers.getInstance().SharingManager.deleteSharing(
|
||||
sharingKey
|
||||
);
|
||||
req.resultPipe = 'ok';
|
||||
return next();
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during deleting sharing', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error during deleting sharing',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static async listSharing(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async listSharing(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.Sharing.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
try {
|
||||
req.resultPipe = await ObjectManagers.getInstance().SharingManager.listAll();
|
||||
req.resultPipe =
|
||||
await ObjectManagers.getInstance().SharingManager.listAll();
|
||||
return next();
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error during listing shares', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error during listing shares',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,22 +1,31 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {ObjectManagers} from '../model/ObjectManagers';
|
||||
import {ErrorCodes, ErrorDTO} from '../../common/entities/Error';
|
||||
import {CustomHeaders} from '../../common/CustomHeaders';
|
||||
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ObjectManagers } from '../model/ObjectManagers';
|
||||
import { ErrorCodes, ErrorDTO } from '../../common/entities/Error';
|
||||
import { CustomHeaders } from '../../common/CustomHeaders';
|
||||
|
||||
export class VersionMWs {
|
||||
|
||||
|
||||
/**
|
||||
* This version data is mainly used on the client side to invalidate the cache
|
||||
*/
|
||||
public static async injectGalleryVersion(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async injectGalleryVersion(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
try {
|
||||
res.header(CustomHeaders.dataVersion, await ObjectManagers.getInstance().VersionManager.getDataVersion());
|
||||
res.header(
|
||||
CustomHeaders.dataVersion,
|
||||
await ObjectManagers.getInstance().VersionManager.getDataVersion()
|
||||
);
|
||||
next();
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Can not get data version', err.toString()));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Can not get data version',
|
||||
err.toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,23 +1,31 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {ErrorCodes, ErrorDTO} from '../../../common/entities/Error';
|
||||
import {ObjectManagers} from '../../model/ObjectManagers';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {ISQLGalleryManager} from '../../model/database/sql/IGalleryManager';
|
||||
import {DatabaseType, ServerConfig} from '../../../common/config/private/PrivateConfig';
|
||||
import {ISQLPersonManager} from '../../model/database/sql/IPersonManager';
|
||||
import {StatisticDTO} from '../../../common/entities/settings/StatisticDTO';
|
||||
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ErrorCodes, ErrorDTO } from '../../../common/entities/Error';
|
||||
import { ObjectManagers } from '../../model/ObjectManagers';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import { ISQLGalleryManager } from '../../model/database/sql/IGalleryManager';
|
||||
import { DatabaseType } from '../../../common/config/private/PrivateConfig';
|
||||
import { ISQLPersonManager } from '../../model/database/sql/IPersonManager';
|
||||
import { StatisticDTO } from '../../../common/entities/settings/StatisticDTO';
|
||||
|
||||
export class AdminMWs {
|
||||
|
||||
public static async loadStatistic(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
public static async loadStatistic(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
if (Config.Server.Database.type === DatabaseType.memory) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Statistic is only available for indexed content'));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Statistic is only available for indexed content'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
const galleryManager = ObjectManagers.getInstance().GalleryManager as ISQLGalleryManager;
|
||||
const personManager = ObjectManagers.getInstance().PersonManager as ISQLPersonManager;
|
||||
const galleryManager = ObjectManagers.getInstance()
|
||||
.GalleryManager as ISQLGalleryManager;
|
||||
const personManager = ObjectManagers.getInstance()
|
||||
.PersonManager as ISQLPersonManager;
|
||||
try {
|
||||
req.resultPipe = {
|
||||
directories: await galleryManager.countDirectories(),
|
||||
@ -29,83 +37,181 @@ export class AdminMWs {
|
||||
return next();
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error while getting statistic: ' + err.toString(), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error while getting statistic: ' + err.toString(),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error while getting statistic', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error while getting statistic',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static async getDuplicates(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
public static async getDuplicates(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
if (Config.Server.Database.type === DatabaseType.memory) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Statistic is only available for indexed content'));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Statistic is only available for indexed content'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
const galleryManager = ObjectManagers.getInstance().GalleryManager as ISQLGalleryManager;
|
||||
const galleryManager = ObjectManagers.getInstance()
|
||||
.GalleryManager as ISQLGalleryManager;
|
||||
try {
|
||||
req.resultPipe = await galleryManager.getPossibleDuplicates();
|
||||
return next();
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error while getting duplicates: ' + err.toString(), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error while getting duplicates: ' + err.toString(),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, 'Error while getting duplicates', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.GENERAL_ERROR,
|
||||
'Error while getting duplicates',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static async startJob(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
public static async startJob(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
try {
|
||||
const id = req.params.id;
|
||||
const id = req.params['id'];
|
||||
const JobConfig: any = req.body.config;
|
||||
const soloRun: boolean = req.body.soloRun;
|
||||
const allowParallelRun: boolean = req.body.allowParallelRun;
|
||||
await ObjectManagers.getInstance().JobManager.run(id, JobConfig, soloRun, allowParallelRun);
|
||||
await ObjectManagers.getInstance().JobManager.run(
|
||||
id,
|
||||
JobConfig,
|
||||
soloRun,
|
||||
allowParallelRun
|
||||
);
|
||||
req.resultPipe = 'ok';
|
||||
return next();
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
return next(new ErrorDTO(ErrorCodes.JOB_ERROR, 'Job error: ' + err.toString(), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.JOB_ERROR,
|
||||
'Job error: ' + err.toString(),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
return next(new ErrorDTO(ErrorCodes.JOB_ERROR, 'Job error: ' + JSON.stringify(err, null, ' '), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.JOB_ERROR,
|
||||
'Job error: ' + JSON.stringify(err, null, ' '),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static stopJob(req: Request, res: Response, next: NextFunction): void {
|
||||
try {
|
||||
const id = req.params.id;
|
||||
const id = req.params['id'];
|
||||
ObjectManagers.getInstance().JobManager.stop(id);
|
||||
req.resultPipe = 'ok';
|
||||
return next();
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
return next(new ErrorDTO(ErrorCodes.JOB_ERROR, 'Job error: ' + err.toString(), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.JOB_ERROR,
|
||||
'Job error: ' + err.toString(),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
return next(new ErrorDTO(ErrorCodes.JOB_ERROR, 'Job error: ' + JSON.stringify(err, null, ' '), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.JOB_ERROR,
|
||||
'Job error: ' + JSON.stringify(err, null, ' '),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static getAvailableJobs(req: Request, res: Response, next: NextFunction): void {
|
||||
public static getAvailableJobs(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void {
|
||||
try {
|
||||
req.resultPipe = ObjectManagers.getInstance().JobManager.getAvailableJobs();
|
||||
req.resultPipe =
|
||||
ObjectManagers.getInstance().JobManager.getAvailableJobs();
|
||||
return next();
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
return next(new ErrorDTO(ErrorCodes.JOB_ERROR, 'Job error: ' + err.toString(), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.JOB_ERROR,
|
||||
'Job error: ' + err.toString(),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
return next(new ErrorDTO(ErrorCodes.JOB_ERROR, 'Job error: ' + JSON.stringify(err, null, ' '), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.JOB_ERROR,
|
||||
'Job error: ' + JSON.stringify(err, null, ' '),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static getJobProgresses(req: Request, res: Response, next: NextFunction): void {
|
||||
public static getJobProgresses(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void {
|
||||
try {
|
||||
req.resultPipe = ObjectManagers.getInstance().JobManager.getProgresses();
|
||||
return next();
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
return next(new ErrorDTO(ErrorCodes.JOB_ERROR, 'Job error: ' + err.toString(), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.JOB_ERROR,
|
||||
'Job error: ' + err.toString(),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
return next(new ErrorDTO(ErrorCodes.JOB_ERROR, 'Job error: ' + JSON.stringify(err, null, ' '), err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.JOB_ERROR,
|
||||
'Job error: ' + JSON.stringify(err, null, ' '),
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,12 @@
|
||||
import {LoginCredential} from '../../../common/entities/LoginCredential';
|
||||
import {UserDTO} from '../../../common/entities/UserDTO';
|
||||
|
||||
import { LoginCredential } from '../../../common/entities/LoginCredential';
|
||||
import { UserDTO } from '../../../common/entities/UserDTO';
|
||||
|
||||
declare global {
|
||||
namespace Express {
|
||||
interface Request {
|
||||
|
||||
resultPipe?: any;
|
||||
body?: {
|
||||
loginCredential?: LoginCredential
|
||||
loginCredential?: LoginCredential;
|
||||
};
|
||||
locale?: string;
|
||||
}
|
||||
|
@ -1,11 +1,14 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import * as fs from 'fs';
|
||||
import {PhotoProcessing} from '../../model/fileprocessing/PhotoProcessing';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import { PhotoProcessing } from '../../model/fileprocessing/PhotoProcessing';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
|
||||
export class PhotoConverterMWs {
|
||||
|
||||
public static async convertPhoto(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async convertPhoto(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
@ -15,7 +18,10 @@ export class PhotoConverterMWs {
|
||||
}
|
||||
const fullMediaPath = req.resultPipe;
|
||||
|
||||
const convertedVideo = PhotoProcessing.generateConvertedPath(fullMediaPath, Config.Server.Media.Photo.Converting.resolution);
|
||||
const convertedVideo = PhotoProcessing.generateConvertedPath(
|
||||
fullMediaPath,
|
||||
Config.Server.Media.Photo.Converting.resolution
|
||||
);
|
||||
|
||||
// check if converted photo exist
|
||||
if (fs.existsSync(convertedVideo) === true) {
|
||||
|
@ -1,23 +1,30 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {ErrorCodes, ErrorDTO} from '../../../common/entities/Error';
|
||||
import {ContentWrapper} from '../../../common/entities/ConentWrapper';
|
||||
import {ParentDirectoryDTO, SubDirectoryDTO} from '../../../common/entities/DirectoryDTO';
|
||||
import {ProjectPath} from '../../ProjectPath';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {ThumbnailSourceType} from '../../model/threading/PhotoWorker';
|
||||
import {MediaDTO} from '../../../common/entities/MediaDTO';
|
||||
import {PhotoProcessing} from '../../model/fileprocessing/PhotoProcessing';
|
||||
import {PersonWithSampleRegion} from '../../../common/entities/PersonDTO';
|
||||
import {ServerTime} from '../ServerTimingMWs';
|
||||
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ErrorCodes, ErrorDTO } from '../../../common/entities/Error';
|
||||
import { ContentWrapper } from '../../../common/entities/ConentWrapper';
|
||||
import {
|
||||
ParentDirectoryDTO,
|
||||
SubDirectoryDTO,
|
||||
} from '../../../common/entities/DirectoryDTO';
|
||||
import { ProjectPath } from '../../ProjectPath';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import { ThumbnailSourceType } from '../../model/threading/PhotoWorker';
|
||||
import { MediaDTO } from '../../../common/entities/MediaDTO';
|
||||
import { PhotoProcessing } from '../../model/fileprocessing/PhotoProcessing';
|
||||
import { PersonWithSampleRegion } from '../../../common/entities/PersonDTO';
|
||||
import { ServerTime } from '../ServerTimingMWs';
|
||||
|
||||
export class ThumbnailGeneratorMWs {
|
||||
private static ThumbnailMap: { [key: number]: number } = Config.Client.Media.Thumbnail.generateThumbnailMap();
|
||||
private static ThumbnailMap: { [key: number]: number } =
|
||||
Config.Client.Media.Thumbnail.generateThumbnailMap();
|
||||
|
||||
@ServerTime('2.th', 'Thumbnail decoration')
|
||||
public static async addThumbnailInformation(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async addThumbnailInformation(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
@ -33,20 +40,26 @@ export class ThumbnailGeneratorMWs {
|
||||
if (cw.searchResult && cw.searchResult.media) {
|
||||
ThumbnailGeneratorMWs.addThInfoToPhotos(cw.searchResult.media);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return next(new ErrorDTO(ErrorCodes.SERVER_ERROR, 'error during postprocessing result (adding thumbnail info)', error.toString()));
|
||||
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.SERVER_ERROR,
|
||||
'error during postprocessing result (adding thumbnail info)',
|
||||
error.toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return next();
|
||||
|
||||
}
|
||||
|
||||
|
||||
// tslint:disable-next-line:typedef
|
||||
public static addThumbnailInfoForPersons(req: Request, res: Response, next: NextFunction): void {
|
||||
// eslint-disable-next-line @typescript-eslint/typedef, @typescript-eslint/explicit-function-return-type, @typescript-eslint/explicit-module-boundary-types
|
||||
public static addThumbnailInfoForPersons(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
@ -61,28 +74,40 @@ export class ThumbnailGeneratorMWs {
|
||||
continue;
|
||||
}
|
||||
// load parameters
|
||||
const mediaPath = path.join(ProjectPath.ImageFolder,
|
||||
const mediaPath = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
item.sampleRegion.media.directory.path,
|
||||
item.sampleRegion.media.directory.name, item.sampleRegion.media.name);
|
||||
item.sampleRegion.media.directory.name,
|
||||
item.sampleRegion.media.name
|
||||
);
|
||||
|
||||
// generate thumbnail path
|
||||
const thPath = PhotoProcessing.generatePersonThumbnailPath(mediaPath, item.sampleRegion, size);
|
||||
const thPath = PhotoProcessing.generatePersonThumbnailPath(
|
||||
mediaPath,
|
||||
item.sampleRegion,
|
||||
size
|
||||
);
|
||||
|
||||
item.missingThumbnail = !fs.existsSync(thPath);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
return next(new ErrorDTO(ErrorCodes.SERVER_ERROR, 'error during postprocessing result (adding thumbnail info for persons)',
|
||||
error.toString()));
|
||||
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.SERVER_ERROR,
|
||||
'error during postprocessing result (adding thumbnail info for persons)',
|
||||
error.toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return next();
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static async generatePersonThumbnail(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async generatePersonThumbnail(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
@ -92,43 +117,67 @@ export class ThumbnailGeneratorMWs {
|
||||
return next();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return next(new ErrorDTO(ErrorCodes.THUMBNAIL_GENERATION_ERROR,
|
||||
'Error during generating face thumbnail: ' + person.name, error.toString()));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.THUMBNAIL_GENERATION_ERROR,
|
||||
'Error during generating face thumbnail: ' + person.name,
|
||||
error.toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static generateThumbnailFactory(sourceType: ThumbnailSourceType):
|
||||
(req: Request, res: Response, next: NextFunction) => Promise<any> {
|
||||
return async (req: Request, res: Response, next: NextFunction): Promise<any> => {
|
||||
public static generateThumbnailFactory(
|
||||
sourceType: ThumbnailSourceType
|
||||
): (req: Request, res: Response, next: NextFunction) => Promise<any> {
|
||||
return async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> => {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// load parameters
|
||||
const mediaPath = req.resultPipe;
|
||||
let size: number = parseInt(req.params.size, 10) || Config.Client.Media.Thumbnail.thumbnailSizes[0];
|
||||
let size: number =
|
||||
parseInt(req.params.size, 10) ||
|
||||
Config.Client.Media.Thumbnail.thumbnailSizes[0];
|
||||
|
||||
// validate size
|
||||
if (Config.Client.Media.Thumbnail.thumbnailSizes.indexOf(size) === -1) {
|
||||
size = Config.Client.Media.Thumbnail.thumbnailSizes[0];
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
req.resultPipe = await PhotoProcessing.generateThumbnail(mediaPath, size, sourceType, false);
|
||||
req.resultPipe = await PhotoProcessing.generateThumbnail(
|
||||
mediaPath,
|
||||
size,
|
||||
sourceType,
|
||||
false
|
||||
);
|
||||
return next();
|
||||
} catch (error) {
|
||||
return next(new ErrorDTO(ErrorCodes.THUMBNAIL_GENERATION_ERROR,
|
||||
'Error during generating thumbnail: ' + mediaPath, error.toString()));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.THUMBNAIL_GENERATION_ERROR,
|
||||
'Error during generating thumbnail: ' + mediaPath,
|
||||
error.toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static generateIconFactory(sourceType: ThumbnailSourceType):
|
||||
(req: Request, res: Response, next: NextFunction) => Promise<any> {
|
||||
return async (req: Request, res: Response, next: NextFunction): Promise<any> => {
|
||||
public static generateIconFactory(
|
||||
sourceType: ThumbnailSourceType
|
||||
): (req: Request, res: Response, next: NextFunction) => Promise<any> {
|
||||
return async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> => {
|
||||
if (!req.resultPipe) {
|
||||
return next();
|
||||
}
|
||||
@ -138,18 +187,30 @@ export class ThumbnailGeneratorMWs {
|
||||
const size: number = Config.Client.Media.Thumbnail.iconSize;
|
||||
|
||||
try {
|
||||
req.resultPipe = await PhotoProcessing.generateThumbnail(mediaPath, size, sourceType, true);
|
||||
req.resultPipe = await PhotoProcessing.generateThumbnail(
|
||||
mediaPath,
|
||||
size,
|
||||
sourceType,
|
||||
true
|
||||
);
|
||||
return next();
|
||||
} catch (error) {
|
||||
return next(new ErrorDTO(ErrorCodes.THUMBNAIL_GENERATION_ERROR,
|
||||
'Error during generating thumbnail: ' + mediaPath, error.toString()));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.THUMBNAIL_GENERATION_ERROR,
|
||||
'Error during generating thumbnail: ' + mediaPath,
|
||||
error.toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
private static addThInfoTODir(directory: ParentDirectoryDTO | SubDirectoryDTO): void {
|
||||
ThumbnailGeneratorMWs.ThumbnailMap = Config.Client.Media.Thumbnail.generateThumbnailMap();
|
||||
private static addThInfoTODir(
|
||||
directory: ParentDirectoryDTO | SubDirectoryDTO
|
||||
): void {
|
||||
ThumbnailGeneratorMWs.ThumbnailMap =
|
||||
Config.Client.Media.Thumbnail.generateThumbnailMap();
|
||||
if (typeof directory.media !== 'undefined') {
|
||||
ThumbnailGeneratorMWs.addThInfoToPhotos(directory.media);
|
||||
}
|
||||
@ -165,18 +226,26 @@ export class ThumbnailGeneratorMWs {
|
||||
}
|
||||
|
||||
private static addThInfoToAPhoto(photo: MediaDTO): void {
|
||||
const fullMediaPath = path.join(ProjectPath.ImageFolder, photo.directory.path, photo.directory.name, photo.name);
|
||||
const fullMediaPath = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
photo.directory.path,
|
||||
photo.directory.name,
|
||||
photo.name
|
||||
);
|
||||
for (const size of Object.keys(ThumbnailGeneratorMWs.ThumbnailMap)) {
|
||||
const thPath = PhotoProcessing.generateConvertedPath(fullMediaPath, size as any);
|
||||
const thPath = PhotoProcessing.generateConvertedPath(
|
||||
fullMediaPath,
|
||||
size as any
|
||||
);
|
||||
if (fs.existsSync(thPath) !== true) {
|
||||
if (typeof photo.missingThumbnails === 'undefined') {
|
||||
photo.missingThumbnails = 0;
|
||||
}
|
||||
// this is a bitwise operation
|
||||
photo.missingThumbnails += ThumbnailGeneratorMWs.ThumbnailMap[size as any];
|
||||
photo.missingThumbnails +=
|
||||
ThumbnailGeneratorMWs.ThumbnailMap[size as any];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -1,82 +1,109 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {ErrorCodes, ErrorDTO} from '../../../common/entities/Error';
|
||||
import {UserDTO, UserDTOUtils, UserRoles} from '../../../common/entities/UserDTO';
|
||||
import {ObjectManagers} from '../../model/ObjectManagers';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {PasswordHelper} from '../../model/PasswordHelper';
|
||||
import {Utils} from '../../../common/Utils';
|
||||
import {QueryParams} from '../../../common/QueryParams';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ErrorCodes, ErrorDTO } from '../../../common/entities/Error';
|
||||
import {
|
||||
UserDTO,
|
||||
UserDTOUtils,
|
||||
UserRoles,
|
||||
} from '../../../common/entities/UserDTO';
|
||||
import { ObjectManagers } from '../../model/ObjectManagers';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import { PasswordHelper } from '../../model/PasswordHelper';
|
||||
import { Utils } from '../../../common/Utils';
|
||||
import { QueryParams } from '../../../common/QueryParams';
|
||||
import * as path from 'path';
|
||||
|
||||
export class AuthenticationMWs {
|
||||
|
||||
public static async tryAuthenticate(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
public static async tryAuthenticate(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
if (Config.Client.authenticationRequired === false) {
|
||||
req.session.user = {
|
||||
req.session['user'] = {
|
||||
name: UserRoles[Config.Client.unAuthenticatedUserRole],
|
||||
role: Config.Client.unAuthenticatedUserRole
|
||||
role: Config.Client.unAuthenticatedUserRole,
|
||||
} as UserDTO;
|
||||
return next();
|
||||
}
|
||||
try {
|
||||
const user = await AuthenticationMWs.getSharingUser(req);
|
||||
if (!!user) {
|
||||
req.session.user = user;
|
||||
req.session['user'] = user;
|
||||
return next();
|
||||
}
|
||||
} catch (err) {
|
||||
}
|
||||
} catch (err) {}
|
||||
|
||||
return next();
|
||||
|
||||
}
|
||||
|
||||
public static async authenticate(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
public static async authenticate(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
if (Config.Client.authenticationRequired === false) {
|
||||
req.session.user = {
|
||||
req.session['user'] = {
|
||||
name: UserRoles[Config.Client.unAuthenticatedUserRole],
|
||||
role: Config.Client.unAuthenticatedUserRole
|
||||
role: Config.Client.unAuthenticatedUserRole,
|
||||
} as UserDTO;
|
||||
return next();
|
||||
}
|
||||
|
||||
// if already authenticated, do not try to use sharing authentication
|
||||
if (typeof req.session.user !== 'undefined') {
|
||||
if (typeof req.session['user'] !== 'undefined') {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
const user = await AuthenticationMWs.getSharingUser(req);
|
||||
if (!!user) {
|
||||
req.session.user = user;
|
||||
req.session['user'] = user;
|
||||
return next();
|
||||
}
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.CREDENTIAL_NOT_FOUND, null, err));
|
||||
}
|
||||
if (typeof req.session.user === 'undefined') {
|
||||
if (typeof req.session['user'] === 'undefined') {
|
||||
res.status(401);
|
||||
return next(new ErrorDTO(ErrorCodes.NOT_AUTHENTICATED, 'Not authenticated'));
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.NOT_AUTHENTICATED, 'Not authenticated')
|
||||
);
|
||||
}
|
||||
return next();
|
||||
}
|
||||
|
||||
|
||||
public static normalizePathParam(paramName: string): (req: Request, res: Response, next: NextFunction) => void {
|
||||
return function normalizePathParam(req: Request, res: Response, next: NextFunction): void {
|
||||
req.params[paramName] = path.normalize(req.params[paramName] || path.sep).replace(/^(\.\.[\/\\])+/, '');
|
||||
public static normalizePathParam(
|
||||
paramName: string
|
||||
): (req: Request, res: Response, next: NextFunction) => void {
|
||||
return function normalizePathParam(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void {
|
||||
req.params[paramName] = path
|
||||
.normalize(req.params[paramName] || path.sep)
|
||||
.replace(/^(\.\.[\/\\])+/, '');
|
||||
return next();
|
||||
};
|
||||
}
|
||||
|
||||
public static authorisePath(paramName: string, isDirectory: boolean): (req: Request, res: Response, next: NextFunction) => void {
|
||||
return function authorisePath(req: Request, res: Response, next: NextFunction): Response | void {
|
||||
public static authorisePath(
|
||||
paramName: string,
|
||||
isDirectory: boolean
|
||||
): (req: Request, res: Response, next: NextFunction) => void {
|
||||
return function authorisePath(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Response | void {
|
||||
let p: string = req.params[paramName];
|
||||
if (!isDirectory) {
|
||||
p = path.dirname(p);
|
||||
}
|
||||
|
||||
if (!UserDTOUtils.isDirectoryPathAvailable(p, req.session.user.permissions)) {
|
||||
if (
|
||||
!UserDTOUtils.isDirectoryPathAvailable(p, req.session['user'].permissions)
|
||||
) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
@ -84,38 +111,57 @@ export class AuthenticationMWs {
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
public static authorise(role: UserRoles): (req: Request, res: Response, next: NextFunction) => void {
|
||||
return function authorise(req: Request, res: Response, next: NextFunction): void {
|
||||
if (req.session.user.role < role) {
|
||||
public static authorise(
|
||||
role: UserRoles
|
||||
): (req: Request, res: Response, next: NextFunction) => void {
|
||||
return function authorise(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void {
|
||||
if (req.session['user'].role < role) {
|
||||
return next(new ErrorDTO(ErrorCodes.NOT_AUTHORISED));
|
||||
}
|
||||
return next();
|
||||
};
|
||||
}
|
||||
|
||||
public static async shareLogin(req: Request, res: Response, next: NextFunction): Promise<void> {
|
||||
|
||||
public static async shareLogin(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
if (Config.Client.Sharing.enabled === false) {
|
||||
return next();
|
||||
}
|
||||
// not enough parameter
|
||||
if ((!req.query[QueryParams.gallery.sharingKey_query] && !req.params[QueryParams.gallery.sharingKey_params])) {
|
||||
return next(new ErrorDTO(ErrorCodes.INPUT_ERROR, 'no sharing key provided'));
|
||||
if (
|
||||
!req.query[QueryParams.gallery.sharingKey_query] &&
|
||||
!req.params[QueryParams.gallery.sharingKey_params]
|
||||
) {
|
||||
return next(
|
||||
new ErrorDTO(ErrorCodes.INPUT_ERROR, 'no sharing key provided')
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const password = (req.body ? req.body.password : null) || null;
|
||||
const sharingKey: string = req.query[QueryParams.gallery.sharingKey_query] as string ||
|
||||
req.params[QueryParams.gallery.sharingKey_params] as string;
|
||||
const sharing = await ObjectManagers.getInstance().SharingManager.findOne({
|
||||
sharingKey
|
||||
});
|
||||
const sharingKey: string =
|
||||
(req.query[QueryParams.gallery.sharingKey_query] as string) ||
|
||||
(req.params[QueryParams.gallery.sharingKey_params] as string);
|
||||
const sharing = await ObjectManagers.getInstance().SharingManager.findOne(
|
||||
{
|
||||
sharingKey,
|
||||
}
|
||||
);
|
||||
|
||||
if (!sharing || sharing.expires < Date.now() ||
|
||||
(Config.Client.Sharing.passwordProtected === true
|
||||
&& (sharing.password)
|
||||
&& !PasswordHelper.comparePassword(password, sharing.password))) {
|
||||
if (
|
||||
!sharing ||
|
||||
sharing.expires < Date.now() ||
|
||||
(Config.Client.Sharing.passwordProtected === true &&
|
||||
sharing.password &&
|
||||
!PasswordHelper.comparePassword(password, sharing.password))
|
||||
) {
|
||||
res.status(401);
|
||||
return next(new ErrorDTO(ErrorCodes.CREDENTIAL_NOT_FOUND));
|
||||
}
|
||||
@ -125,78 +171,106 @@ export class AuthenticationMWs {
|
||||
sharingPath += '*';
|
||||
}
|
||||
|
||||
req.session.user = {
|
||||
req.session['user'] = {
|
||||
name: 'Guest',
|
||||
role: UserRoles.LimitedGuest,
|
||||
permissions: [sharingPath],
|
||||
usedSharingKey: sharing.sharingKey
|
||||
usedSharingKey: sharing.sharingKey,
|
||||
} as UserDTO;
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, null, err));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static inverseAuthenticate(req: Request, res: Response, next: NextFunction): void {
|
||||
if (typeof req.session.user !== 'undefined') {
|
||||
public static inverseAuthenticate(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void {
|
||||
if (typeof req.session['user'] !== 'undefined') {
|
||||
return next(new ErrorDTO(ErrorCodes.ALREADY_AUTHENTICATED));
|
||||
}
|
||||
return next();
|
||||
}
|
||||
|
||||
public static async login(req: Request, res: Response, next: NextFunction): Promise<void | Response> {
|
||||
|
||||
public static async login(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void | Response> {
|
||||
if (Config.Client.authenticationRequired === false) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
|
||||
// not enough parameter
|
||||
if ((typeof req.body === 'undefined') ||
|
||||
(typeof req.body.loginCredential === 'undefined') ||
|
||||
(typeof req.body.loginCredential.username === 'undefined') ||
|
||||
(typeof req.body.loginCredential.password === 'undefined')) {
|
||||
return next(new ErrorDTO(ErrorCodes.INPUT_ERROR, 'not all parameters are included for loginCredential'));
|
||||
if (
|
||||
typeof req.body === 'undefined' ||
|
||||
typeof req.body.loginCredential === 'undefined' ||
|
||||
typeof req.body.loginCredential.username === 'undefined' ||
|
||||
typeof req.body.loginCredential.password === 'undefined'
|
||||
) {
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.INPUT_ERROR,
|
||||
'not all parameters are included for loginCredential'
|
||||
)
|
||||
);
|
||||
}
|
||||
try {
|
||||
// lets find the user
|
||||
const user = Utils.clone(await ObjectManagers.getInstance().UserManager.findOne({
|
||||
name: req.body.loginCredential.username,
|
||||
password: req.body.loginCredential.password
|
||||
}));
|
||||
delete (user.password);
|
||||
req.session.user = user;
|
||||
const user = Utils.clone(
|
||||
await ObjectManagers.getInstance().UserManager.findOne({
|
||||
name: req.body.loginCredential.username,
|
||||
password: req.body.loginCredential.password,
|
||||
})
|
||||
);
|
||||
delete user.password;
|
||||
req.session['user'] = user;
|
||||
if (req.body.loginCredential.rememberMe) {
|
||||
req.sessionOptions.expires = new Date(Date.now() + Config.Server.sessionTimeout);
|
||||
req.sessionOptions.expires = new Date(
|
||||
Date.now() + Config.Server.sessionTimeout
|
||||
);
|
||||
}
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.CREDENTIAL_NOT_FOUND, 'credentials not found during login', err));
|
||||
return next(
|
||||
new ErrorDTO(
|
||||
ErrorCodes.CREDENTIAL_NOT_FOUND,
|
||||
'credentials not found during login',
|
||||
err
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
public static logout(req: Request, res: Response, next: NextFunction): void {
|
||||
delete req.session.user;
|
||||
delete req.session['user'];
|
||||
return next();
|
||||
}
|
||||
|
||||
private static async getSharingUser(req: Request): Promise<UserDTO> {
|
||||
if (Config.Client.Sharing.enabled === true &&
|
||||
(!!req.query[QueryParams.gallery.sharingKey_query] || !!req.params[QueryParams.gallery.sharingKey_params])) {
|
||||
const sharingKey: string = req.query[QueryParams.gallery.sharingKey_query] as string ||
|
||||
req.params[QueryParams.gallery.sharingKey_params] as string;
|
||||
const sharing = await ObjectManagers.getInstance().SharingManager.findOne({
|
||||
sharingKey
|
||||
});
|
||||
if (
|
||||
Config.Client.Sharing.enabled === true &&
|
||||
(!!req.query[QueryParams.gallery.sharingKey_query] ||
|
||||
!!req.params[QueryParams.gallery.sharingKey_params])
|
||||
) {
|
||||
const sharingKey: string =
|
||||
(req.query[QueryParams.gallery.sharingKey_query] as string) ||
|
||||
(req.params[QueryParams.gallery.sharingKey_params] as string);
|
||||
const sharing = await ObjectManagers.getInstance().SharingManager.findOne(
|
||||
{
|
||||
sharingKey,
|
||||
}
|
||||
);
|
||||
if (!sharing || sharing.expires < Date.now()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (Config.Client.Sharing.passwordProtected === true && sharing.password) {
|
||||
if (
|
||||
Config.Client.Sharing.passwordProtected === true &&
|
||||
sharing.password
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -208,11 +282,9 @@ export class AuthenticationMWs {
|
||||
name: 'Guest',
|
||||
role: UserRoles.LimitedGuest,
|
||||
permissions: [sharingPath],
|
||||
usedSharingKey: sharing.sharingKey
|
||||
usedSharingKey: sharing.sharingKey,
|
||||
} as UserDTO;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,89 +1,121 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import {ErrorCodes, ErrorDTO} from '../../../common/entities/Error';
|
||||
import {ObjectManagers} from '../../model/ObjectManagers';
|
||||
import {Utils} from '../../../common/Utils';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ErrorCodes, ErrorDTO } from '../../../common/entities/Error';
|
||||
import { ObjectManagers } from '../../model/ObjectManagers';
|
||||
import { Utils } from '../../../common/Utils';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
|
||||
export class UserMWs {
|
||||
|
||||
public static async changePassword(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async changePassword(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.authenticationRequired === false) {
|
||||
return next(new ErrorDTO(ErrorCodes.USER_MANAGEMENT_DISABLED));
|
||||
}
|
||||
if ((typeof req.body === 'undefined') || (typeof req.body.userModReq === 'undefined')
|
||||
|| (typeof req.body.userModReq.id === 'undefined')
|
||||
|| (typeof req.body.userModReq.oldPassword === 'undefined')
|
||||
|| (typeof req.body.userModReq.newPassword === 'undefined')) {
|
||||
if (
|
||||
typeof req.body === 'undefined' ||
|
||||
typeof req.body.userModReq === 'undefined' ||
|
||||
typeof req.body.userModReq.id === 'undefined' ||
|
||||
typeof req.body.userModReq.oldPassword === 'undefined' ||
|
||||
typeof req.body.userModReq.newPassword === 'undefined'
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
await ObjectManagers.getInstance().UserManager.changePassword(req.body.userModReq);
|
||||
await ObjectManagers.getInstance().UserManager.changePassword(
|
||||
req.body.userModReq
|
||||
);
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, null, err));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static async createUser(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async createUser(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.authenticationRequired === false) {
|
||||
return next(new ErrorDTO(ErrorCodes.USER_MANAGEMENT_DISABLED));
|
||||
}
|
||||
if ((typeof req.body === 'undefined') || (typeof req.body.newUser === 'undefined')) {
|
||||
if (
|
||||
typeof req.body === 'undefined' ||
|
||||
typeof req.body.newUser === 'undefined'
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
await ObjectManagers.getInstance().UserManager.createUser(req.body.newUser);
|
||||
await ObjectManagers.getInstance().UserManager.createUser(
|
||||
req.body.newUser
|
||||
);
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.USER_CREATION_ERROR, null, err));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
public static async deleteUser(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async deleteUser(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.authenticationRequired === false) {
|
||||
return next(new ErrorDTO(ErrorCodes.USER_MANAGEMENT_DISABLED));
|
||||
}
|
||||
if ((typeof req.params === 'undefined') || (typeof req.params.id === 'undefined')) {
|
||||
if (
|
||||
typeof req.params === 'undefined' ||
|
||||
typeof req.params.id === 'undefined'
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
await ObjectManagers.getInstance().UserManager.deleteUser(parseInt(req.params.id, 10));
|
||||
await ObjectManagers.getInstance().UserManager.deleteUser(
|
||||
parseInt(req.params.id, 10)
|
||||
);
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, null, err));
|
||||
}
|
||||
}
|
||||
|
||||
public static async changeRole(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async changeRole(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.authenticationRequired === false) {
|
||||
return next(new ErrorDTO(ErrorCodes.USER_MANAGEMENT_DISABLED));
|
||||
}
|
||||
if ((typeof req.params === 'undefined') || (typeof req.params.id === 'undefined')
|
||||
|| (typeof req.body === 'undefined') || (typeof req.body.newRole === 'undefined')) {
|
||||
if (
|
||||
typeof req.params === 'undefined' ||
|
||||
typeof req.params.id === 'undefined' ||
|
||||
typeof req.body === 'undefined' ||
|
||||
typeof req.body.newRole === 'undefined'
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
await ObjectManagers.getInstance().UserManager.changeRole(parseInt(req.params.id, 10), req.body.newRole);
|
||||
await ObjectManagers.getInstance().UserManager.changeRole(
|
||||
parseInt(req.params.id, 10),
|
||||
req.body.newRole
|
||||
);
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, null, err));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static async listUsers(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
public static async listUsers(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (Config.Client.authenticationRequired === false) {
|
||||
return next(new ErrorDTO(ErrorCodes.USER_MANAGEMENT_DISABLED));
|
||||
}
|
||||
@ -100,6 +132,4 @@ export class UserMWs {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR, null, err));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,57 +1,74 @@
|
||||
import {NextFunction, Request, Response} from 'express';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { MoreThanOrEqual } from 'typeorm';
|
||||
import {ErrorCodes, ErrorDTO} from '../../../common/entities/Error';
|
||||
import {UserRoles} from '../../../common/entities/UserDTO';
|
||||
import {ObjectManagers} from '../../model/ObjectManagers';
|
||||
import { ErrorCodes, ErrorDTO } from '../../../common/entities/Error';
|
||||
import { UserRoles } from '../../../common/entities/UserDTO';
|
||||
import { ObjectManagers } from '../../model/ObjectManagers';
|
||||
|
||||
export class UserRequestConstrainsMWs {
|
||||
|
||||
public static forceSelfRequest(req: Request, res: Response, next: NextFunction): any {
|
||||
if ((typeof req.params === 'undefined') || (typeof req.params.id === 'undefined')) {
|
||||
public static forceSelfRequest(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): any {
|
||||
if (
|
||||
typeof req.params === 'undefined' ||
|
||||
typeof req.params.id === 'undefined'
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
if (req.session.user.id !== parseInt(req.params.id, 10)) {
|
||||
if (req.session['user'].id !== parseInt(req.params.id, 10)) {
|
||||
return next(new ErrorDTO(ErrorCodes.NOT_AUTHORISED));
|
||||
}
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
|
||||
public static notSelfRequest(req: Request, res: Response, next: NextFunction): any {
|
||||
if ((typeof req.params === 'undefined') || (typeof req.params.id === 'undefined')) {
|
||||
public static notSelfRequest(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): any {
|
||||
if (
|
||||
typeof req.params === 'undefined' ||
|
||||
typeof req.params.id === 'undefined'
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
|
||||
if (req.session.user.id === parseInt(req.params.id, 10)) {
|
||||
if (req.session['user'].id === parseInt(req.params.id, 10)) {
|
||||
return next(new ErrorDTO(ErrorCodes.NOT_AUTHORISED));
|
||||
}
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
public static async notSelfRequestOr2Admins(req: Request, res: Response, next: NextFunction): Promise<any> {
|
||||
if ((typeof req.params === 'undefined') || (typeof req.params.id === 'undefined')) {
|
||||
public static async notSelfRequestOr2Admins(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
if (
|
||||
typeof req.params === 'undefined' ||
|
||||
typeof req.params.id === 'undefined'
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
|
||||
if (req.session.user.id !== parseInt(req.params.id, 10)) {
|
||||
if (req.session['user'].id !== parseInt(req.params.id, 10)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// TODO: fix it!
|
||||
try {
|
||||
const result = await ObjectManagers.getInstance().UserManager.find({role: MoreThanOrEqual(UserRoles.Admin)});
|
||||
const result = await ObjectManagers.getInstance().UserManager.find({
|
||||
role: MoreThanOrEqual(UserRoles.Admin),
|
||||
});
|
||||
if (result.length <= 1) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR));
|
||||
}
|
||||
return next();
|
||||
|
||||
} catch (err) {
|
||||
return next(new ErrorDTO(ErrorCodes.GENERAL_ERROR));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,10 +1,15 @@
|
||||
import {DirectoryDTOUtils, ParentDirectoryDTO} from '../../common/entities/DirectoryDTO';
|
||||
import {Logger} from '../Logger';
|
||||
import {Config} from '../../common/config/private/Config';
|
||||
import {DiskManagerTH} from './threading/ThreadPool';
|
||||
import {DirectoryScanSettings, DiskMangerWorker} from './threading/DiskMangerWorker';
|
||||
import {FileDTO} from '../../common/entities/FileDTO';
|
||||
|
||||
import {
|
||||
DirectoryDTOUtils,
|
||||
ParentDirectoryDTO,
|
||||
} from '../../common/entities/DirectoryDTO';
|
||||
import { Logger } from '../Logger';
|
||||
import { Config } from '../../common/config/private/Config';
|
||||
import { DiskManagerTH } from './threading/ThreadPool';
|
||||
import {
|
||||
DirectoryScanSettings,
|
||||
DiskMangerWorker,
|
||||
} from './threading/DiskMangerWorker';
|
||||
import { FileDTO } from '../../common/entities/FileDTO';
|
||||
|
||||
const LOG_TAG = '[DiskManager]';
|
||||
|
||||
@ -20,26 +25,34 @@ export class DiskManager {
|
||||
/**
|
||||
* List all files in a folder as fast as possible
|
||||
*/
|
||||
public static async scanDirectoryNoMetadata(relativeDirectoryName: string,
|
||||
settings: DirectoryScanSettings = {}): Promise<ParentDirectoryDTO<FileDTO>> {
|
||||
public static async scanDirectoryNoMetadata(
|
||||
relativeDirectoryName: string,
|
||||
settings: DirectoryScanSettings = {}
|
||||
): Promise<ParentDirectoryDTO<FileDTO>> {
|
||||
settings.noMetadata = true;
|
||||
return this.scanDirectory(relativeDirectoryName, settings);
|
||||
}
|
||||
|
||||
public static async scanDirectory(relativeDirectoryName: string,
|
||||
settings: DirectoryScanSettings = {}): Promise<ParentDirectoryDTO> {
|
||||
|
||||
public static async scanDirectory(
|
||||
relativeDirectoryName: string,
|
||||
settings: DirectoryScanSettings = {}
|
||||
): Promise<ParentDirectoryDTO> {
|
||||
Logger.silly(LOG_TAG, 'scanning directory:', relativeDirectoryName);
|
||||
|
||||
let directory: ParentDirectoryDTO;
|
||||
|
||||
if (Config.Server.Threading.enabled === true) {
|
||||
directory = await DiskManager.threadPool.execute(relativeDirectoryName, settings);
|
||||
directory = await DiskManager.threadPool.execute(
|
||||
relativeDirectoryName,
|
||||
settings
|
||||
);
|
||||
} else {
|
||||
directory = await DiskMangerWorker.scanDirectory(relativeDirectoryName, settings) as ParentDirectoryDTO;
|
||||
directory = (await DiskMangerWorker.scanDirectory(
|
||||
relativeDirectoryName,
|
||||
settings
|
||||
)) as ParentDirectoryDTO;
|
||||
}
|
||||
DirectoryDTOUtils.unpackDirectory(directory);
|
||||
return directory;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -6,8 +6,7 @@ export class FFmpegFactory {
|
||||
ffmpeg.setFfmpegPath(ffmpegPath);
|
||||
const ffprobePath = require('ffprobe-static');
|
||||
ffmpeg.setFfprobePath(ffprobePath.path);
|
||||
} catch (e) {
|
||||
}
|
||||
} catch (e) {}
|
||||
return ffmpeg;
|
||||
}
|
||||
}
|
||||
|
@ -1,19 +1,21 @@
|
||||
import {ProjectPath} from '../ProjectPath';
|
||||
import { ProjectPath } from '../ProjectPath';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import {Config} from '../../common/config/private/Config';
|
||||
import { Config } from '../../common/config/private/Config';
|
||||
|
||||
export class Localizations {
|
||||
|
||||
constructor() {
|
||||
}
|
||||
constructor() {}
|
||||
|
||||
public static init(): void {
|
||||
const notLanguage = ['assets'];
|
||||
const dirCont = fs.readdirSync(ProjectPath.FrontendFolder)
|
||||
.filter((f): any => fs.statSync(path.join(ProjectPath.FrontendFolder, f)).isDirectory());
|
||||
Config.Client.languages = dirCont.filter((d): boolean => notLanguage.indexOf(d) === -1);
|
||||
const dirCont = fs
|
||||
.readdirSync(ProjectPath.FrontendFolder)
|
||||
.filter((f): any =>
|
||||
fs.statSync(path.join(ProjectPath.FrontendFolder, f)).isDirectory()
|
||||
);
|
||||
Config.Client.languages = dirCont.filter(
|
||||
(d): boolean => notLanguage.indexOf(d) === -1
|
||||
);
|
||||
Config.Client.languages.sort();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,28 +1,30 @@
|
||||
import {NotificationDTO, NotificationType} from '../../common/entities/NotificationDTO';
|
||||
import {Request} from 'express';
|
||||
import {
|
||||
NotificationDTO,
|
||||
NotificationType,
|
||||
} from '../../common/entities/NotificationDTO';
|
||||
import { Request } from 'express';
|
||||
|
||||
export class NotificationManager {
|
||||
public static notifications: NotificationDTO[] = [];
|
||||
public static HasNotification: NotificationDTO[] =
|
||||
[
|
||||
{
|
||||
type: NotificationType.info,
|
||||
message: 'There are unhandled server notification. Login as Administrator to handle them.'
|
||||
}
|
||||
];
|
||||
|
||||
public static HasNotification: NotificationDTO[] = [
|
||||
{
|
||||
type: NotificationType.info,
|
||||
message:
|
||||
'There are unhandled server notification. Login as Administrator to handle them.',
|
||||
},
|
||||
];
|
||||
|
||||
public static error(message: string, details?: any, req?: Request): void {
|
||||
const noti: NotificationDTO = {
|
||||
type: NotificationType.error,
|
||||
message,
|
||||
details
|
||||
details,
|
||||
};
|
||||
if (req) {
|
||||
noti.request = {
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
statusCode: req.statusCode
|
||||
statusCode: req.statusCode,
|
||||
};
|
||||
}
|
||||
NotificationManager.notifications.push(noti);
|
||||
@ -32,13 +34,13 @@ export class NotificationManager {
|
||||
const noti: NotificationDTO = {
|
||||
type: NotificationType.warning,
|
||||
message,
|
||||
details
|
||||
details,
|
||||
};
|
||||
if (req) {
|
||||
noti.request = {
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
statusCode: req.statusCode
|
||||
statusCode: req.statusCode,
|
||||
};
|
||||
}
|
||||
NotificationManager.notifications.push(noti);
|
||||
|
@ -1,24 +1,23 @@
|
||||
import {IUserManager} from './database/interfaces/IUserManager';
|
||||
import {IGalleryManager} from './database/interfaces/IGalleryManager';
|
||||
import {ISearchManager} from './database/interfaces/ISearchManager';
|
||||
import {SQLConnection} from './database/sql/SQLConnection';
|
||||
import {ISharingManager} from './database/interfaces/ISharingManager';
|
||||
import {Logger} from '../Logger';
|
||||
import {IIndexingManager} from './database/interfaces/IIndexingManager';
|
||||
import {IPersonManager} from './database/interfaces/IPersonManager';
|
||||
import {IVersionManager} from './database/interfaces/IVersionManager';
|
||||
import {IJobManager} from './database/interfaces/IJobManager';
|
||||
import {LocationManager} from './database/LocationManager';
|
||||
import {IAlbumManager} from './database/interfaces/IAlbumManager';
|
||||
import {JobManager} from './jobs/JobManager';
|
||||
import {IPreviewManager} from './database/interfaces/IPreviewManager';
|
||||
import {ParentDirectoryDTO} from '../../common/entities/DirectoryDTO';
|
||||
import {IObjectManager} from './database/interfaces/IObjectManager';
|
||||
import { IUserManager } from './database/interfaces/IUserManager';
|
||||
import { IGalleryManager } from './database/interfaces/IGalleryManager';
|
||||
import { ISearchManager } from './database/interfaces/ISearchManager';
|
||||
import { SQLConnection } from './database/sql/SQLConnection';
|
||||
import { ISharingManager } from './database/interfaces/ISharingManager';
|
||||
import { Logger } from '../Logger';
|
||||
import { IIndexingManager } from './database/interfaces/IIndexingManager';
|
||||
import { IPersonManager } from './database/interfaces/IPersonManager';
|
||||
import { IVersionManager } from './database/interfaces/IVersionManager';
|
||||
import { IJobManager } from './database/interfaces/IJobManager';
|
||||
import { LocationManager } from './database/LocationManager';
|
||||
import { IAlbumManager } from './database/interfaces/IAlbumManager';
|
||||
import { JobManager } from './jobs/JobManager';
|
||||
import { IPreviewManager } from './database/interfaces/IPreviewManager';
|
||||
import { ParentDirectoryDTO } from '../../common/entities/DirectoryDTO';
|
||||
import { IObjectManager } from './database/interfaces/IObjectManager';
|
||||
|
||||
const LOG_TAG = '[ObjectManagers]';
|
||||
|
||||
export class ObjectManagers {
|
||||
|
||||
private static instance: ObjectManagers = null;
|
||||
|
||||
private readonly managers: IObjectManager[];
|
||||
@ -34,7 +33,6 @@ export class ObjectManagers {
|
||||
private locationManager: LocationManager;
|
||||
private albumManager: IAlbumManager;
|
||||
|
||||
|
||||
constructor() {
|
||||
this.managers = [];
|
||||
}
|
||||
@ -111,7 +109,6 @@ export class ObjectManagers {
|
||||
this.managers.push(this.indexingManager);
|
||||
}
|
||||
|
||||
|
||||
get GalleryManager(): IGalleryManager {
|
||||
return this.galleryManager;
|
||||
}
|
||||
@ -181,8 +178,10 @@ export class ObjectManagers {
|
||||
|
||||
public static async reset(): Promise<void> {
|
||||
Logger.silly(LOG_TAG, 'Object manager reset begin');
|
||||
if (ObjectManagers.getInstance().IndexingManager &&
|
||||
ObjectManagers.getInstance().IndexingManager.IsSavingInProgress) {
|
||||
if (
|
||||
ObjectManagers.getInstance().IndexingManager &&
|
||||
ObjectManagers.getInstance().IndexingManager.IsSavingInProgress
|
||||
) {
|
||||
await ObjectManagers.getInstance().IndexingManager.SavingReady;
|
||||
}
|
||||
if (ObjectManagers.getInstance().JobManager) {
|
||||
@ -207,20 +206,31 @@ export class ObjectManagers {
|
||||
}
|
||||
|
||||
private static initManagers(type: 'memory' | 'sql'): void {
|
||||
ObjectManagers.getInstance().AlbumManager = new (require(`./database/${type}/AlbumManager`).AlbumManager)();
|
||||
ObjectManagers.getInstance().GalleryManager = new (require(`./database/${type}/GalleryManager`).GalleryManager)();
|
||||
ObjectManagers.getInstance().IndexingManager = new (require(`./database/${type}/IndexingManager`).IndexingManager)();
|
||||
ObjectManagers.getInstance().PersonManager = new (require(`./database/${type}/PersonManager`).PersonManager)();
|
||||
ObjectManagers.getInstance().PreviewManager = new (require(`./database/${type}/PreviewManager`).PreviewManager)();
|
||||
ObjectManagers.getInstance().SearchManager = new (require(`./database/${type}/SearchManager`).SearchManager)();
|
||||
ObjectManagers.getInstance().SharingManager = new (require(`./database/${type}/SharingManager`).SharingManager)();
|
||||
ObjectManagers.getInstance().UserManager = new (require(`./database/${type}/UserManager`).UserManager)();
|
||||
ObjectManagers.getInstance().VersionManager = new (require(`./database/${type}/VersionManager`).VersionManager)();
|
||||
ObjectManagers.getInstance().AlbumManager =
|
||||
new (require(`./database/${type}/AlbumManager`).AlbumManager)();
|
||||
ObjectManagers.getInstance().GalleryManager =
|
||||
new (require(`./database/${type}/GalleryManager`).GalleryManager)();
|
||||
ObjectManagers.getInstance().IndexingManager =
|
||||
new (require(`./database/${type}/IndexingManager`).IndexingManager)();
|
||||
ObjectManagers.getInstance().PersonManager =
|
||||
new (require(`./database/${type}/PersonManager`).PersonManager)();
|
||||
ObjectManagers.getInstance().PreviewManager =
|
||||
new (require(`./database/${type}/PreviewManager`).PreviewManager)();
|
||||
ObjectManagers.getInstance().SearchManager =
|
||||
new (require(`./database/${type}/SearchManager`).SearchManager)();
|
||||
ObjectManagers.getInstance().SharingManager =
|
||||
new (require(`./database/${type}/SharingManager`).SharingManager)();
|
||||
ObjectManagers.getInstance().UserManager =
|
||||
new (require(`./database/${type}/UserManager`).UserManager)();
|
||||
ObjectManagers.getInstance().VersionManager =
|
||||
new (require(`./database/${type}/VersionManager`).VersionManager)();
|
||||
ObjectManagers.getInstance().JobManager = new JobManager();
|
||||
ObjectManagers.getInstance().LocationManager = new LocationManager();
|
||||
}
|
||||
|
||||
public async onDataChange(changedDir: ParentDirectoryDTO = null): Promise<void> {
|
||||
public async onDataChange(
|
||||
changedDir: ParentDirectoryDTO = null
|
||||
): Promise<void> {
|
||||
await this.VersionManager.onNewDataVersion(changedDir);
|
||||
|
||||
for (const manager of this.managers) {
|
||||
@ -232,5 +242,4 @@ export class ObjectManagers {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -6,11 +6,13 @@ export class PasswordHelper {
|
||||
return bcrypt.hashSync(password, salt);
|
||||
}
|
||||
|
||||
public static comparePassword(password: string, encryptedPassword: string): boolean {
|
||||
public static comparePassword(
|
||||
password: string,
|
||||
encryptedPassword: string
|
||||
): boolean {
|
||||
try {
|
||||
return bcrypt.compareSync(password, encryptedPassword);
|
||||
} catch (e) {
|
||||
}
|
||||
} catch (e) {}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
import {GPSMetadata} from '../../../common/entities/PhotoDTO';
|
||||
import { GPSMetadata } from '../../../common/entities/PhotoDTO';
|
||||
import * as NodeGeocoder from 'node-geocoder';
|
||||
import {LocationLookupException} from '../../exceptions/LocationLookupException';
|
||||
import {LRU} from '../../../common/Utils';
|
||||
import {IObjectManager} from './interfaces/IObjectManager';
|
||||
import {ParentDirectoryDTO} from '../../../common/entities/DirectoryDTO';
|
||||
import { LocationLookupException } from '../../exceptions/LocationLookupException';
|
||||
import { LRU } from '../../../common/Utils';
|
||||
import { IObjectManager } from './interfaces/IObjectManager';
|
||||
import { ParentDirectoryDTO } from '../../../common/entities/DirectoryDTO';
|
||||
|
||||
export class LocationManager implements IObjectManager {
|
||||
// onNewDataVersion only need for TypeScript, otherwise the interface is not implemented.
|
||||
@ -12,23 +12,21 @@ export class LocationManager implements IObjectManager {
|
||||
cache = new LRU<GPSMetadata>(100);
|
||||
|
||||
constructor() {
|
||||
this.geocoder = NodeGeocoder({provider: 'openstreetmap'});
|
||||
this.geocoder = NodeGeocoder({ provider: 'openstreetmap' });
|
||||
}
|
||||
|
||||
async getGPSData(text: string): Promise<GPSMetadata> {
|
||||
if (!this.cache.get(text)) {
|
||||
|
||||
const ret = await this.geocoder.geocode(text);
|
||||
if (ret.length < 1) {
|
||||
throw new LocationLookupException('Cannot find location:' + text, text);
|
||||
}
|
||||
this.cache.set(text, {
|
||||
latitude: ret[0].latitude,
|
||||
longitude: ret[0].longitude
|
||||
longitude: ret[0].longitude,
|
||||
});
|
||||
}
|
||||
|
||||
return this.cache.get(text);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,19 +1,26 @@
|
||||
import {SearchQueryDTO} from '../../../../common/entities/SearchQueryDTO';
|
||||
import {AlbumBaseDTO} from '../../../../common/entities/album/AlbumBaseDTO';
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import { SearchQueryDTO } from '../../../../common/entities/SearchQueryDTO';
|
||||
import { AlbumBaseDTO } from '../../../../common/entities/album/AlbumBaseDTO';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
|
||||
export interface IAlbumManager extends IObjectManager {
|
||||
/**
|
||||
* Creates a saved search type of album
|
||||
*/
|
||||
addSavedSearch(name: string, searchQuery: SearchQueryDTO, lockedAlbum?: boolean): Promise<void>;
|
||||
|
||||
addSavedSearch(
|
||||
name: string,
|
||||
searchQuery: SearchQueryDTO,
|
||||
lockedAlbum?: boolean
|
||||
): Promise<void>;
|
||||
|
||||
/**
|
||||
* Creates a saved search type of album if the album is not yet exists
|
||||
* lockAlbum: Album cannot be removed from the UI
|
||||
*/
|
||||
addIfNotExistSavedSearch(name: string, searchQuery: SearchQueryDTO, lockedAlbum?: boolean): Promise<void>;
|
||||
addIfNotExistSavedSearch(
|
||||
name: string,
|
||||
searchQuery: SearchQueryDTO,
|
||||
lockedAlbum?: boolean
|
||||
): Promise<void>;
|
||||
|
||||
/**
|
||||
* Deletes an album
|
||||
|
@ -1,10 +1,10 @@
|
||||
import {ParentDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import { ParentDirectoryDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
|
||||
export interface IGalleryManager extends IObjectManager {
|
||||
listDirectory(relativeDirectoryName: string,
|
||||
knownLastModified?: number,
|
||||
knownLastScanned?: number): Promise<ParentDirectoryDTO>;
|
||||
|
||||
|
||||
listDirectory(
|
||||
relativeDirectoryName: string,
|
||||
knownLastModified?: number,
|
||||
knownLastScanned?: number
|
||||
): Promise<ParentDirectoryDTO>;
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {ParentDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import { ParentDirectoryDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
|
||||
export interface IIndexingManager extends IObjectManager {
|
||||
SavingReady: Promise<void>;
|
||||
|
@ -1,11 +1,14 @@
|
||||
import {JobProgressDTO} from '../../../../common/entities/job/JobProgressDTO';
|
||||
import {JobDTO} from '../../../../common/entities/job/JobDTO';
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import { JobProgressDTO } from '../../../../common/entities/job/JobProgressDTO';
|
||||
import { JobDTO } from '../../../../common/entities/job/JobDTO';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
|
||||
export interface IJobManager extends IObjectManager {
|
||||
|
||||
|
||||
run(jobId: string, config: any, soloRun: boolean, allowParallelRun: boolean): Promise<void>;
|
||||
run(
|
||||
jobId: string,
|
||||
config: any,
|
||||
soloRun: boolean,
|
||||
allowParallelRun: boolean
|
||||
): Promise<void>;
|
||||
|
||||
stop(jobId: string): void;
|
||||
|
||||
@ -16,5 +19,4 @@ export interface IJobManager extends IObjectManager {
|
||||
stopSchedules(): void;
|
||||
|
||||
runSchedules(): void;
|
||||
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {ParentDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import { ParentDirectoryDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
|
||||
export interface IObjectManager {
|
||||
onNewDataVersion?: (changedDir?: ParentDirectoryDTO) => Promise<void>;
|
||||
|
@ -1,7 +1,7 @@
|
||||
import {PersonEntry} from '../sql/enitites/PersonEntry';
|
||||
import {PersonDTO} from '../../../../common/entities/PersonDTO';
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import {FaceRegion} from '../../../../common/entities/PhotoDTO';
|
||||
import { PersonEntry } from '../sql/enitites/PersonEntry';
|
||||
import { PersonDTO } from '../../../../common/entities/PersonDTO';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
import { FaceRegion } from '../../../../common/entities/PhotoDTO';
|
||||
|
||||
export interface IPersonManager extends IObjectManager {
|
||||
getAll(): Promise<PersonEntry[]>;
|
||||
@ -9,7 +9,7 @@ export interface IPersonManager extends IObjectManager {
|
||||
get(name: string): Promise<PersonEntry>;
|
||||
|
||||
// saving a Person with a sample region. Person entry cannot exist without a face region
|
||||
saveAll(person: { name: string, faceRegion: FaceRegion }[]): Promise<void>;
|
||||
saveAll(person: { name: string; faceRegion: FaceRegion }[]): Promise<void>;
|
||||
|
||||
updatePerson(name: string, partialPerson: PersonDTO): Promise<PersonEntry>;
|
||||
|
||||
|
@ -1,14 +1,21 @@
|
||||
import {PreviewPhotoDTO} from '../../../../common/entities/PhotoDTO';
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import {SearchQueryDTO} from '../../../../common/entities/SearchQueryDTO';
|
||||
import { PreviewPhotoDTO } from '../../../../common/entities/PhotoDTO';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
import { SearchQueryDTO } from '../../../../common/entities/SearchQueryDTO';
|
||||
|
||||
export interface IPreviewManager extends IObjectManager {
|
||||
setAndGetPreviewForDirectory(dir: { id: number, name: string, path: string }): Promise<PreviewPhotoDTOWithID>;
|
||||
setAndGetPreviewForDirectory(dir: {
|
||||
id: number;
|
||||
name: string;
|
||||
path: string;
|
||||
}): Promise<PreviewPhotoDTOWithID>;
|
||||
|
||||
getAlbumPreview(album: { searchQuery: SearchQueryDTO }): Promise<PreviewPhotoDTOWithID>;
|
||||
|
||||
getPartialDirsWithoutPreviews(): Promise<{ id: number; name: string; path: string }[]>;
|
||||
getAlbumPreview(album: {
|
||||
searchQuery: SearchQueryDTO;
|
||||
}): Promise<PreviewPhotoDTOWithID>;
|
||||
|
||||
getPartialDirsWithoutPreviews(): Promise<
|
||||
{ id: number; name: string; path: string }[]
|
||||
>;
|
||||
|
||||
resetPreviews(): Promise<void>;
|
||||
}
|
||||
|
@ -1,11 +1,17 @@
|
||||
import {AutoCompleteItem} from '../../../../common/entities/AutoCompleteItem';
|
||||
import {SearchResultDTO} from '../../../../common/entities/SearchResultDTO';
|
||||
import {SearchQueryDTO, SearchQueryTypes} from '../../../../common/entities/SearchQueryDTO';
|
||||
import {PhotoDTO} from '../../../../common/entities/PhotoDTO';
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import { AutoCompleteItem } from '../../../../common/entities/AutoCompleteItem';
|
||||
import { SearchResultDTO } from '../../../../common/entities/SearchResultDTO';
|
||||
import {
|
||||
SearchQueryDTO,
|
||||
SearchQueryTypes,
|
||||
} from '../../../../common/entities/SearchQueryDTO';
|
||||
import { PhotoDTO } from '../../../../common/entities/PhotoDTO';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
|
||||
export interface ISearchManager extends IObjectManager{
|
||||
autocomplete(text: string, type: SearchQueryTypes): Promise<AutoCompleteItem[]>;
|
||||
export interface ISearchManager extends IObjectManager {
|
||||
autocomplete(
|
||||
text: string,
|
||||
type: SearchQueryTypes
|
||||
): Promise<AutoCompleteItem[]>;
|
||||
|
||||
search(query: SearchQueryDTO): Promise<SearchResultDTO>;
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import {SharingDTO} from '../../../../common/entities/SharingDTO';
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import {FindOptionsWhere} from 'typeorm';
|
||||
import { SharingDTO } from '../../../../common/entities/SharingDTO';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
import { FindOptionsWhere } from 'typeorm';
|
||||
|
||||
export interface ISharingManager extends IObjectManager {
|
||||
findOne(filter: FindOptionsWhere<SharingDTO>): Promise<SharingDTO>;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import {UserDTO, UserRoles} from '../../../../common/entities/UserDTO';
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import {FindOptionsWhere} from 'typeorm';
|
||||
import { UserDTO, UserRoles } from '../../../../common/entities/UserDTO';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
import { FindOptionsWhere } from 'typeorm';
|
||||
|
||||
export interface IUserManager extends IObjectManager {
|
||||
findOne(filter: FindOptionsWhere<UserDTO>): Promise<UserDTO>;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {IObjectManager} from './IObjectManager';
|
||||
import { IObjectManager } from './IObjectManager';
|
||||
|
||||
export interface IVersionManager extends IObjectManager {
|
||||
getDataVersion(): Promise<string>;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import {AlbumBaseDTO} from '../../../../common/entities/album/AlbumBaseDTO';
|
||||
import {SearchQueryDTO} from '../../../../common/entities/SearchQueryDTO';
|
||||
import {IAlbumManager} from '../interfaces/IAlbumManager';
|
||||
import { AlbumBaseDTO } from '../../../../common/entities/album/AlbumBaseDTO';
|
||||
import { SearchQueryDTO } from '../../../../common/entities/SearchQueryDTO';
|
||||
import { IAlbumManager } from '../interfaces/IAlbumManager';
|
||||
|
||||
export class AlbumManager implements IAlbumManager {
|
||||
resetPreviews(): Promise<void> {
|
||||
@ -15,11 +15,19 @@ export class AlbumManager implements IAlbumManager {
|
||||
throw new Error('not supported by memory DB');
|
||||
}
|
||||
|
||||
public async addIfNotExistSavedSearch(name: string, searchQuery: SearchQueryDTO, lockedAlbum?: boolean): Promise<void> {
|
||||
public async addIfNotExistSavedSearch(
|
||||
name: string,
|
||||
searchQuery: SearchQueryDTO,
|
||||
lockedAlbum?: boolean
|
||||
): Promise<void> {
|
||||
throw new Error('not supported by memory DB');
|
||||
}
|
||||
|
||||
public async addSavedSearch(name: string, searchQuery: SearchQueryDTO, lockedAlbum?: boolean): Promise<void> {
|
||||
public async addSavedSearch(
|
||||
name: string,
|
||||
searchQuery: SearchQueryDTO,
|
||||
lockedAlbum?: boolean
|
||||
): Promise<void> {
|
||||
throw new Error('not supported by memory DB');
|
||||
}
|
||||
|
||||
|
@ -1,32 +1,38 @@
|
||||
import {ParentDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import {IGalleryManager} from '../interfaces/IGalleryManager';
|
||||
import { ParentDirectoryDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
import { IGalleryManager } from '../interfaces/IGalleryManager';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import {DiskManager} from '../../DiskManger';
|
||||
import {ProjectPath} from '../../../ProjectPath';
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {DiskMangerWorker} from '../../threading/DiskMangerWorker';
|
||||
import {ReIndexingSensitivity} from '../../../../common/config/private/PrivateConfig';
|
||||
import {ServerPG2ConfMap} from '../../../../common/PG2ConfMap';
|
||||
import { DiskManager } from '../../DiskManger';
|
||||
import { ProjectPath } from '../../../ProjectPath';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { DiskMangerWorker } from '../../threading/DiskMangerWorker';
|
||||
import { ReIndexingSensitivity } from '../../../../common/config/private/PrivateConfig';
|
||||
import { ServerPG2ConfMap } from '../../../../common/PG2ConfMap';
|
||||
|
||||
export class GalleryManager implements IGalleryManager {
|
||||
|
||||
public async listDirectory(relativeDirectoryName: string,
|
||||
knownLastModified?: number,
|
||||
knownLastScanned?: number): Promise<ParentDirectoryDTO> {
|
||||
public async listDirectory(
|
||||
relativeDirectoryName: string,
|
||||
knownLastModified?: number,
|
||||
knownLastScanned?: number
|
||||
): Promise<ParentDirectoryDTO> {
|
||||
// If it seems that the content did not changed, do not work on it
|
||||
if (knownLastModified && knownLastScanned) {
|
||||
const stat = fs.statSync(path.join(ProjectPath.ImageFolder, relativeDirectoryName));
|
||||
const stat = fs.statSync(
|
||||
path.join(ProjectPath.ImageFolder, relativeDirectoryName)
|
||||
);
|
||||
const lastModified = DiskMangerWorker.calcLastModified(stat);
|
||||
if (Date.now() - knownLastScanned <= Config.Server.Indexing.cachedFolderTimeout &&
|
||||
if (
|
||||
Date.now() - knownLastScanned <=
|
||||
Config.Server.Indexing.cachedFolderTimeout &&
|
||||
lastModified === knownLastModified &&
|
||||
Config.Server.Indexing.reIndexingSensitivity < ReIndexingSensitivity.high) {
|
||||
Config.Server.Indexing.reIndexingSensitivity <
|
||||
ReIndexingSensitivity.high
|
||||
) {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
}
|
||||
const dir = await DiskManager.scanDirectory(relativeDirectoryName);
|
||||
dir.metaFile = dir.metaFile.filter(m => !ServerPG2ConfMap[m.name]);
|
||||
dir.metaFile = dir.metaFile.filter((m) => !ServerPG2ConfMap[m.name]);
|
||||
return dir;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {IIndexingManager} from '../interfaces/IIndexingManager';
|
||||
import {ParentDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import { IIndexingManager } from '../interfaces/IIndexingManager';
|
||||
import { ParentDirectoryDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
|
||||
export class IndexingManager implements IIndexingManager {
|
||||
IsSavingInProgress: boolean;
|
||||
@ -16,6 +16,4 @@ export class IndexingManager implements IIndexingManager {
|
||||
resetDB(): Promise<void> {
|
||||
throw new Error('not supported by memory DB');
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,11 +1,12 @@
|
||||
import {IPersonManager} from '../interfaces/IPersonManager';
|
||||
import {PersonDTO} from '../../../../common/entities/PersonDTO';
|
||||
import {FaceRegion} from '../../../../common/entities/PhotoDTO';
|
||||
import { IPersonManager } from '../interfaces/IPersonManager';
|
||||
import { PersonDTO } from '../../../../common/entities/PersonDTO';
|
||||
import { FaceRegion } from '../../../../common/entities/PhotoDTO';
|
||||
|
||||
export class PersonManager implements IPersonManager {
|
||||
resetPreviews(): Promise<void> {
|
||||
throw new Error('not supported by memory DB');
|
||||
}
|
||||
|
||||
saveAll(person: { name: string; faceRegion: FaceRegion }[]): Promise<void> {
|
||||
throw new Error('not supported by memory DB');
|
||||
}
|
||||
@ -18,7 +19,6 @@ export class PersonManager implements IPersonManager {
|
||||
throw new Error('not supported by memory DB');
|
||||
}
|
||||
|
||||
|
||||
onGalleryIndexUpdate(): Promise<void> {
|
||||
throw new Error('not supported by memory DB');
|
||||
}
|
||||
|
@ -1,14 +1,16 @@
|
||||
import {IPreviewManager} from '../interfaces/IPreviewManager';
|
||||
import {DirectoryPathDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import {MediaDTO} from '../../../../common/entities/MediaDTO';
|
||||
import {SavedSearchDTO} from '../../../../common/entities/album/SavedSearchDTO';
|
||||
import { IPreviewManager } from '../interfaces/IPreviewManager';
|
||||
import { DirectoryPathDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
import { MediaDTO } from '../../../../common/entities/MediaDTO';
|
||||
import { SavedSearchDTO } from '../../../../common/entities/album/SavedSearchDTO';
|
||||
|
||||
export class PreviewManager implements IPreviewManager {
|
||||
resetPreviews(): Promise<void> {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
|
||||
getPartialDirsWithoutPreviews(): Promise<{ id: number; name: string; path: string }[]> {
|
||||
getPartialDirsWithoutPreviews(): Promise<
|
||||
{ id: number; name: string; path: string }[]
|
||||
> {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
|
||||
|
@ -1,15 +1,21 @@
|
||||
import {AutoCompleteItem} from '../../../../common/entities/AutoCompleteItem';
|
||||
import {ISearchManager} from '../interfaces/ISearchManager';
|
||||
import {SearchResultDTO} from '../../../../common/entities/SearchResultDTO';
|
||||
import {SearchQueryDTO, SearchQueryTypes} from '../../../../common/entities/SearchQueryDTO';
|
||||
import {PhotoDTO} from '../../../../common/entities/PhotoDTO';
|
||||
import { AutoCompleteItem } from '../../../../common/entities/AutoCompleteItem';
|
||||
import { ISearchManager } from '../interfaces/ISearchManager';
|
||||
import { SearchResultDTO } from '../../../../common/entities/SearchResultDTO';
|
||||
import {
|
||||
SearchQueryDTO,
|
||||
SearchQueryTypes,
|
||||
} from '../../../../common/entities/SearchQueryDTO';
|
||||
import { PhotoDTO } from '../../../../common/entities/PhotoDTO';
|
||||
|
||||
export class SearchManager implements ISearchManager {
|
||||
getRandomPhoto(queryFilter: SearchQueryDTO): Promise<PhotoDTO> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
autocomplete(text: string, type: SearchQueryTypes): Promise<AutoCompleteItem[]> {
|
||||
autocomplete(
|
||||
text: string,
|
||||
type: SearchQueryTypes
|
||||
): Promise<AutoCompleteItem[]> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,7 @@
|
||||
import {ISharingManager} from '../interfaces/ISharingManager';
|
||||
import {SharingDTO} from '../../../../common/entities/SharingDTO';
|
||||
import { ISharingManager } from '../interfaces/ISharingManager';
|
||||
import { SharingDTO } from '../../../../common/entities/SharingDTO';
|
||||
|
||||
export class SharingManager implements ISharingManager {
|
||||
|
||||
deleteSharing(sharingKey: string): Promise<void> {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
@ -19,9 +18,10 @@ export class SharingManager implements ISharingManager {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
|
||||
updateSharing(sharing: SharingDTO, forceUpdate: boolean): Promise<SharingDTO> {
|
||||
updateSharing(
|
||||
sharing: SharingDTO,
|
||||
forceUpdate: boolean
|
||||
): Promise<SharingDTO> {
|
||||
throw new Error('not implemented');
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,17 +1,15 @@
|
||||
import {UserDTO, UserRoles} from '../../../../common/entities/UserDTO';
|
||||
import {IUserManager} from '../interfaces/IUserManager';
|
||||
import {ProjectPath} from '../../../ProjectPath';
|
||||
import {Utils} from '../../../../common/Utils';
|
||||
import { UserDTO, UserRoles } from '../../../../common/entities/UserDTO';
|
||||
import { IUserManager } from '../interfaces/IUserManager';
|
||||
import { ProjectPath } from '../../../ProjectPath';
|
||||
import { Utils } from '../../../../common/Utils';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import {PasswordHelper} from '../../PasswordHelper';
|
||||
|
||||
import { PasswordHelper } from '../../PasswordHelper';
|
||||
|
||||
export class UserManager implements IUserManager {
|
||||
private db: { users?: UserDTO[], idCounter?: number } = {};
|
||||
private db: { users?: UserDTO[]; idCounter?: number } = {};
|
||||
private readonly dbPath: string;
|
||||
|
||||
|
||||
constructor() {
|
||||
this.dbPath = path.join(ProjectPath.DBFolder, 'users.db');
|
||||
if (fs.existsSync(this.dbPath)) {
|
||||
@ -25,13 +23,15 @@ export class UserManager implements IUserManager {
|
||||
if (!this.db.users) {
|
||||
this.db.users = [];
|
||||
// TODO: remove defaults
|
||||
this.createUser({name: 'admin', password: 'admin', role: UserRoles.Admin} as UserDTO);
|
||||
this.createUser({
|
||||
name: 'admin',
|
||||
password: 'admin',
|
||||
role: UserRoles.Admin,
|
||||
} as UserDTO);
|
||||
}
|
||||
this.saveDB();
|
||||
|
||||
}
|
||||
|
||||
|
||||
public async findOne(filter: any): Promise<UserDTO> {
|
||||
const result = await this.find(filter);
|
||||
|
||||
@ -47,7 +47,7 @@ export class UserManager implements IUserManager {
|
||||
const users = this.db.users.slice();
|
||||
let i = users.length;
|
||||
while (i--) {
|
||||
if (pass && !(PasswordHelper.comparePassword(pass, users[i].password))) {
|
||||
if (pass && !PasswordHelper.comparePassword(pass, users[i].password)) {
|
||||
users.splice(i, 1);
|
||||
continue;
|
||||
}
|
||||
@ -98,5 +98,4 @@ export class UserManager implements IUserManager {
|
||||
private saveDB(): void {
|
||||
fs.writeFileSync(this.dbPath, JSON.stringify(this.db));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {IVersionManager} from '../interfaces/IVersionManager';
|
||||
import {DataStructureVersion} from '../../../../common/DataStructureVersion';
|
||||
import { IVersionManager } from '../interfaces/IVersionManager';
|
||||
import { DataStructureVersion } from '../../../../common/DataStructureVersion';
|
||||
|
||||
export class VersionManager implements IVersionManager {
|
||||
async getDataVersion(): Promise<string> {
|
||||
|
@ -1,18 +1,17 @@
|
||||
import {SQLConnection} from './SQLConnection';
|
||||
import {AlbumBaseEntity} from './enitites/album/AlbumBaseEntity';
|
||||
import {AlbumBaseDTO} from '../../../../common/entities/album/AlbumBaseDTO';
|
||||
import {SavedSearchDTO} from '../../../../common/entities/album/SavedSearchDTO';
|
||||
import {ObjectManagers} from '../../ObjectManagers';
|
||||
import {ISQLSearchManager} from './ISearchManager';
|
||||
import {SearchQueryDTO} from '../../../../common/entities/SearchQueryDTO';
|
||||
import {SavedSearchEntity} from './enitites/album/SavedSearchEntity';
|
||||
import {IAlbumManager} from '../interfaces/IAlbumManager';
|
||||
import {Logger} from '../../../Logger';
|
||||
import { SQLConnection } from './SQLConnection';
|
||||
import { AlbumBaseEntity } from './enitites/album/AlbumBaseEntity';
|
||||
import { AlbumBaseDTO } from '../../../../common/entities/album/AlbumBaseDTO';
|
||||
import { SavedSearchDTO } from '../../../../common/entities/album/SavedSearchDTO';
|
||||
import { ObjectManagers } from '../../ObjectManagers';
|
||||
import { ISQLSearchManager } from './ISearchManager';
|
||||
import { SearchQueryDTO } from '../../../../common/entities/SearchQueryDTO';
|
||||
import { SavedSearchEntity } from './enitites/album/SavedSearchEntity';
|
||||
import { IAlbumManager } from '../interfaces/IAlbumManager';
|
||||
import { Logger } from '../../../Logger';
|
||||
|
||||
const LOG_TAG = '[AlbumManager]';
|
||||
|
||||
export class AlbumManager implements IAlbumManager {
|
||||
|
||||
/**
|
||||
* Person table contains denormalized data that needs to update when isDBValid = false
|
||||
*/
|
||||
@ -20,57 +19,73 @@ export class AlbumManager implements IAlbumManager {
|
||||
|
||||
private static async updateAlbum(album: SavedSearchEntity): Promise<void> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const preview = await ObjectManagers.getInstance().PreviewManager
|
||||
.getAlbumPreview(album);
|
||||
const count = await (ObjectManagers.getInstance().SearchManager as ISQLSearchManager)
|
||||
.getCount((album as SavedSearchDTO).searchQuery);
|
||||
const preview =
|
||||
await ObjectManagers.getInstance().PreviewManager.getAlbumPreview(album);
|
||||
const count = await (
|
||||
ObjectManagers.getInstance().SearchManager as ISQLSearchManager
|
||||
).getCount((album as SavedSearchDTO).searchQuery);
|
||||
|
||||
await connection
|
||||
.createQueryBuilder()
|
||||
.update(AlbumBaseEntity)
|
||||
.set({preview, count})
|
||||
.where('id = :id', {id: album.id})
|
||||
.set({ preview, count })
|
||||
.where('id = :id', { id: album.id })
|
||||
.execute();
|
||||
}
|
||||
|
||||
public async addIfNotExistSavedSearch(name: string, searchQuery: SearchQueryDTO, lockedAlbum: boolean): Promise<void> {
|
||||
public async addIfNotExistSavedSearch(
|
||||
name: string,
|
||||
searchQuery: SearchQueryDTO,
|
||||
lockedAlbum: boolean
|
||||
): Promise<void> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const album = await connection.getRepository(SavedSearchEntity)
|
||||
.findOneBy({name, searchQuery});
|
||||
const album = await connection
|
||||
.getRepository(SavedSearchEntity)
|
||||
.findOneBy({ name, searchQuery });
|
||||
if (album) {
|
||||
return;
|
||||
}
|
||||
await this.addSavedSearch(name, searchQuery, lockedAlbum);
|
||||
}
|
||||
|
||||
public async addSavedSearch(name: string, searchQuery: SearchQueryDTO, lockedAlbum?: boolean): Promise<void> {
|
||||
public async addSavedSearch(
|
||||
name: string,
|
||||
searchQuery: SearchQueryDTO,
|
||||
lockedAlbum?: boolean
|
||||
): Promise<void> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const a = await connection.getRepository(SavedSearchEntity).save({name, searchQuery, locked: lockedAlbum});
|
||||
const a = await connection
|
||||
.getRepository(SavedSearchEntity)
|
||||
.save({ name, searchQuery, locked: lockedAlbum });
|
||||
await AlbumManager.updateAlbum(a);
|
||||
}
|
||||
|
||||
public async deleteAlbum(id: number): Promise<void> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
|
||||
if (await connection.getRepository(AlbumBaseEntity)
|
||||
.countBy({id, locked: false}) !== 1) {
|
||||
if (
|
||||
(await connection
|
||||
.getRepository(AlbumBaseEntity)
|
||||
.countBy({ id, locked: false })) !== 1
|
||||
) {
|
||||
throw new Error('Could not delete album, id:' + id);
|
||||
}
|
||||
|
||||
await connection.getRepository(AlbumBaseEntity).delete({id, locked: false});
|
||||
|
||||
await connection
|
||||
.getRepository(AlbumBaseEntity)
|
||||
.delete({ id, locked: false });
|
||||
}
|
||||
|
||||
public async getAlbums(): Promise<AlbumBaseDTO[]> {
|
||||
await this.updateAlbums();
|
||||
const connection = await SQLConnection.getConnection();
|
||||
return await connection.getRepository(AlbumBaseEntity)
|
||||
return await connection
|
||||
.getRepository(AlbumBaseEntity)
|
||||
.createQueryBuilder('album')
|
||||
.innerJoin('album.preview', 'preview')
|
||||
.innerJoin('preview.directory', 'directory')
|
||||
.select(['album', 'preview.name',
|
||||
'directory.name',
|
||||
'directory.path']).getMany();
|
||||
.select(['album', 'preview.name', 'directory.name', 'directory.path'])
|
||||
.getMany();
|
||||
}
|
||||
|
||||
public async onNewDataVersion(): Promise<void> {
|
||||
@ -94,5 +109,4 @@ export class AlbumManager implements IAlbumManager {
|
||||
}
|
||||
this.isDBValid = true;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,84 +1,128 @@
|
||||
import {IGalleryManager} from '../interfaces/IGalleryManager';
|
||||
import {ParentDirectoryDTO, SubDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import { IGalleryManager } from '../interfaces/IGalleryManager';
|
||||
import {
|
||||
ParentDirectoryDTO,
|
||||
SubDirectoryDTO,
|
||||
} from '../../../../common/entities/DirectoryDTO';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import {DirectoryEntity} from './enitites/DirectoryEntity';
|
||||
import {SQLConnection} from './SQLConnection';
|
||||
import {PhotoEntity} from './enitites/PhotoEntity';
|
||||
import {ProjectPath} from '../../../ProjectPath';
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {ISQLGalleryManager} from './IGalleryManager';
|
||||
import {PhotoDTO} from '../../../../common/entities/PhotoDTO';
|
||||
import {Brackets, Connection, WhereExpression} from 'typeorm';
|
||||
import {MediaEntity} from './enitites/MediaEntity';
|
||||
import {VideoEntity} from './enitites/VideoEntity';
|
||||
import {DiskMangerWorker} from '../../threading/DiskMangerWorker';
|
||||
import {Logger} from '../../../Logger';
|
||||
import {FaceRegionEntry} from './enitites/FaceRegionEntry';
|
||||
import {ObjectManagers} from '../../ObjectManagers';
|
||||
import {DuplicatesDTO} from '../../../../common/entities/DuplicatesDTO';
|
||||
import {ReIndexingSensitivity} from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
import { DirectoryEntity } from './enitites/DirectoryEntity';
|
||||
import { SQLConnection } from './SQLConnection';
|
||||
import { PhotoEntity } from './enitites/PhotoEntity';
|
||||
import { ProjectPath } from '../../../ProjectPath';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { ISQLGalleryManager } from './IGalleryManager';
|
||||
import { PhotoDTO } from '../../../../common/entities/PhotoDTO';
|
||||
import { Connection } from 'typeorm';
|
||||
import { MediaEntity } from './enitites/MediaEntity';
|
||||
import { VideoEntity } from './enitites/VideoEntity';
|
||||
import { DiskMangerWorker } from '../../threading/DiskMangerWorker';
|
||||
import { Logger } from '../../../Logger';
|
||||
import { FaceRegionEntry } from './enitites/FaceRegionEntry';
|
||||
import { ObjectManagers } from '../../ObjectManagers';
|
||||
import { DuplicatesDTO } from '../../../../common/entities/DuplicatesDTO';
|
||||
import { ReIndexingSensitivity } from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
const LOG_TAG = '[GalleryManager]';
|
||||
|
||||
export class GalleryManager implements IGalleryManager, ISQLGalleryManager {
|
||||
|
||||
public static parseRelativeDirePath(relativeDirectoryName: string): { name: string, parent: string } {
|
||||
|
||||
relativeDirectoryName = DiskMangerWorker.normalizeDirPath(relativeDirectoryName);
|
||||
public static parseRelativeDirePath(relativeDirectoryName: string): {
|
||||
name: string;
|
||||
parent: string;
|
||||
} {
|
||||
relativeDirectoryName = DiskMangerWorker.normalizeDirPath(
|
||||
relativeDirectoryName
|
||||
);
|
||||
return {
|
||||
name: path.basename(relativeDirectoryName),
|
||||
parent: path.join(path.dirname(relativeDirectoryName), path.sep),
|
||||
};
|
||||
}
|
||||
|
||||
public async listDirectory(relativeDirectoryName: string,
|
||||
knownLastModified?: number,
|
||||
knownLastScanned?: number): Promise<ParentDirectoryDTO> {
|
||||
const directoryPath = GalleryManager.parseRelativeDirePath(relativeDirectoryName);
|
||||
public async listDirectory(
|
||||
relativeDirectoryName: string,
|
||||
knownLastModified?: number,
|
||||
knownLastScanned?: number
|
||||
): Promise<ParentDirectoryDTO> {
|
||||
const directoryPath = GalleryManager.parseRelativeDirePath(
|
||||
relativeDirectoryName
|
||||
);
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const stat = fs.statSync(path.join(ProjectPath.ImageFolder, relativeDirectoryName));
|
||||
const stat = fs.statSync(
|
||||
path.join(ProjectPath.ImageFolder, relativeDirectoryName)
|
||||
);
|
||||
const lastModified = DiskMangerWorker.calcLastModified(stat);
|
||||
|
||||
const dir = await this.selectParentDir(connection, directoryPath.name, directoryPath.parent);
|
||||
const dir = await this.selectParentDir(
|
||||
connection,
|
||||
directoryPath.name,
|
||||
directoryPath.parent
|
||||
);
|
||||
if (dir && dir.lastScanned != null) {
|
||||
// If it seems that the content did not changed, do not work on it
|
||||
if (knownLastModified && knownLastScanned
|
||||
&& lastModified === knownLastModified &&
|
||||
dir.lastScanned === knownLastScanned) {
|
||||
if (Config.Server.Indexing.reIndexingSensitivity === ReIndexingSensitivity.low) {
|
||||
if (
|
||||
knownLastModified &&
|
||||
knownLastScanned &&
|
||||
lastModified === knownLastModified &&
|
||||
dir.lastScanned === knownLastScanned
|
||||
) {
|
||||
if (
|
||||
Config.Server.Indexing.reIndexingSensitivity ===
|
||||
ReIndexingSensitivity.low
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
if (Date.now() - dir.lastScanned <= Config.Server.Indexing.cachedFolderTimeout &&
|
||||
Config.Server.Indexing.reIndexingSensitivity === ReIndexingSensitivity.medium) {
|
||||
if (
|
||||
Date.now() - dir.lastScanned <=
|
||||
Config.Server.Indexing.cachedFolderTimeout &&
|
||||
Config.Server.Indexing.reIndexingSensitivity ===
|
||||
ReIndexingSensitivity.medium
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (dir.lastModified !== lastModified) {
|
||||
Logger.silly(LOG_TAG, 'Reindexing reason: lastModified mismatch: known: '
|
||||
+ dir.lastModified + ', current:' + lastModified);
|
||||
const ret = await ObjectManagers.getInstance().IndexingManager.indexDirectory(relativeDirectoryName);
|
||||
Logger.silly(
|
||||
LOG_TAG,
|
||||
'Reindexing reason: lastModified mismatch: known: ' +
|
||||
dir.lastModified +
|
||||
', current:' +
|
||||
lastModified
|
||||
);
|
||||
const ret =
|
||||
await ObjectManagers.getInstance().IndexingManager.indexDirectory(
|
||||
relativeDirectoryName
|
||||
);
|
||||
for (const subDir of ret.directories) {
|
||||
if (!subDir.preview) { // if sub directories does not have photos, so cannot show a preview, try get one from DB
|
||||
if (!subDir.preview) {
|
||||
// if sub directories does not have photos, so cannot show a preview, try get one from DB
|
||||
await this.fillPreviewForSubDir(connection, subDir);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
// not indexed since a while, index it in a lazy manner
|
||||
if ((Date.now() - dir.lastScanned > Config.Server.Indexing.cachedFolderTimeout &&
|
||||
Config.Server.Indexing.reIndexingSensitivity >= ReIndexingSensitivity.medium) ||
|
||||
Config.Server.Indexing.reIndexingSensitivity >= ReIndexingSensitivity.high) {
|
||||
if (
|
||||
(Date.now() - dir.lastScanned >
|
||||
Config.Server.Indexing.cachedFolderTimeout &&
|
||||
Config.Server.Indexing.reIndexingSensitivity >=
|
||||
ReIndexingSensitivity.medium) ||
|
||||
Config.Server.Indexing.reIndexingSensitivity >=
|
||||
ReIndexingSensitivity.high
|
||||
) {
|
||||
// on the fly reindexing
|
||||
|
||||
Logger.silly(LOG_TAG, 'lazy reindexing reason: cache timeout: lastScanned: '
|
||||
+ (Date.now() - dir.lastScanned) + 'ms ago, cachedFolderTimeout:' + Config.Server.Indexing.cachedFolderTimeout);
|
||||
ObjectManagers.getInstance().IndexingManager.indexDirectory(relativeDirectoryName).catch(console.error);
|
||||
Logger.silly(
|
||||
LOG_TAG,
|
||||
'lazy reindexing reason: cache timeout: lastScanned: ' +
|
||||
(Date.now() - dir.lastScanned) +
|
||||
'ms ago, cachedFolderTimeout:' +
|
||||
Config.Server.Indexing.cachedFolderTimeout
|
||||
);
|
||||
ObjectManagers.getInstance()
|
||||
.IndexingManager.indexDirectory(relativeDirectoryName)
|
||||
.catch(console.error);
|
||||
}
|
||||
await this.fillParentDir(connection, dir);
|
||||
return dir;
|
||||
@ -86,21 +130,23 @@ export class GalleryManager implements IGalleryManager, ISQLGalleryManager {
|
||||
|
||||
// never scanned (deep indexed), do it and return with it
|
||||
Logger.silly(LOG_TAG, 'Reindexing reason: never scanned');
|
||||
return ObjectManagers.getInstance().IndexingManager.indexDirectory(relativeDirectoryName);
|
||||
|
||||
|
||||
return ObjectManagers.getInstance().IndexingManager.indexDirectory(
|
||||
relativeDirectoryName
|
||||
);
|
||||
}
|
||||
|
||||
async countDirectories(): Promise<number> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
return await connection.getRepository(DirectoryEntity)
|
||||
return await connection
|
||||
.getRepository(DirectoryEntity)
|
||||
.createQueryBuilder('directory')
|
||||
.getCount();
|
||||
}
|
||||
|
||||
async countMediaSize(): Promise<number> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const {sum} = await connection.getRepository(MediaEntity)
|
||||
const { sum } = await connection
|
||||
.getRepository(MediaEntity)
|
||||
.createQueryBuilder('media')
|
||||
.select('SUM(media.metadata.fileSize)', 'sum')
|
||||
.getRawOne();
|
||||
@ -109,14 +155,16 @@ export class GalleryManager implements IGalleryManager, ISQLGalleryManager {
|
||||
|
||||
async countPhotos(): Promise<number> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
return await connection.getRepository(PhotoEntity)
|
||||
return await connection
|
||||
.getRepository(PhotoEntity)
|
||||
.createQueryBuilder('directory')
|
||||
.getCount();
|
||||
}
|
||||
|
||||
async countVideos(): Promise<number> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
return await connection.getRepository(VideoEntity)
|
||||
return await connection
|
||||
.getRepository(VideoEntity)
|
||||
.createQueryBuilder('directory')
|
||||
.getCount();
|
||||
}
|
||||
@ -125,22 +173,33 @@ export class GalleryManager implements IGalleryManager, ISQLGalleryManager {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const mediaRepository = connection.getRepository(MediaEntity);
|
||||
|
||||
let duplicates = await mediaRepository.createQueryBuilder('media')
|
||||
.innerJoin((query): any => query.from(MediaEntity, 'innerMedia')
|
||||
.select(['innerMedia.name as name', 'innerMedia.metadata.fileSize as fileSize', 'count(*)'])
|
||||
.groupBy('innerMedia.name, innerMedia.metadata.fileSize')
|
||||
.having('count(*)>1'),
|
||||
let duplicates = await mediaRepository
|
||||
.createQueryBuilder('media')
|
||||
.innerJoin(
|
||||
(query): any =>
|
||||
query
|
||||
.from(MediaEntity, 'innerMedia')
|
||||
.select([
|
||||
'innerMedia.name as name',
|
||||
'innerMedia.metadata.fileSize as fileSize',
|
||||
'count(*)',
|
||||
])
|
||||
.groupBy('innerMedia.name, innerMedia.metadata.fileSize')
|
||||
.having('count(*)>1'),
|
||||
'innerMedia',
|
||||
'media.name=innerMedia.name AND media.metadata.fileSize = innerMedia.fileSize')
|
||||
'media.name=innerMedia.name AND media.metadata.fileSize = innerMedia.fileSize'
|
||||
)
|
||||
.innerJoinAndSelect('media.directory', 'directory')
|
||||
.orderBy('media.name, media.metadata.fileSize')
|
||||
.limit(Config.Server.Duplicates.listingLimit).getMany();
|
||||
|
||||
.limit(Config.Server.Duplicates.listingLimit)
|
||||
.getMany();
|
||||
|
||||
const duplicateParis: DuplicatesDTO[] = [];
|
||||
const processDuplicates = (duplicateList: MediaEntity[],
|
||||
equalFn: (a: MediaEntity, b: MediaEntity) => boolean,
|
||||
checkDuplicates: boolean = false): void => {
|
||||
const processDuplicates = (
|
||||
duplicateList: MediaEntity[],
|
||||
equalFn: (a: MediaEntity, b: MediaEntity) => boolean,
|
||||
checkDuplicates: boolean = false
|
||||
): void => {
|
||||
let i = duplicateList.length - 1;
|
||||
while (i >= 0) {
|
||||
const list = [duplicateList[i]];
|
||||
@ -156,12 +215,17 @@ export class GalleryManager implements IGalleryManager, ISQLGalleryManager {
|
||||
}
|
||||
if (checkDuplicates) {
|
||||
// ad to group if one already existed
|
||||
const foundDuplicates = duplicateParis.find((dp): boolean =>
|
||||
!!dp.media.find((m): boolean =>
|
||||
!!list.find((lm): boolean => lm.id === m.id)));
|
||||
const foundDuplicates = duplicateParis.find(
|
||||
(dp): boolean =>
|
||||
!!dp.media.find(
|
||||
(m): boolean => !!list.find((lm): boolean => lm.id === m.id)
|
||||
)
|
||||
);
|
||||
if (foundDuplicates) {
|
||||
list.forEach((lm): void => {
|
||||
if (!!foundDuplicates.media.find((m): boolean => m.id === lm.id)) {
|
||||
if (
|
||||
!!foundDuplicates.media.find((m): boolean => m.id === lm.id)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
foundDuplicates.media.push(lm);
|
||||
@ -170,117 +234,160 @@ export class GalleryManager implements IGalleryManager, ISQLGalleryManager {
|
||||
}
|
||||
}
|
||||
|
||||
duplicateParis.push({media: list});
|
||||
duplicateParis.push({ media: list });
|
||||
}
|
||||
};
|
||||
|
||||
processDuplicates(duplicates,
|
||||
(a, b): boolean => a.name === b.name &&
|
||||
a.metadata.fileSize === b.metadata.fileSize);
|
||||
processDuplicates(
|
||||
duplicates,
|
||||
(a, b): boolean =>
|
||||
a.name === b.name && a.metadata.fileSize === b.metadata.fileSize
|
||||
);
|
||||
|
||||
|
||||
duplicates = await mediaRepository.createQueryBuilder('media')
|
||||
.innerJoin((query): any => query.from(MediaEntity, 'innerMedia')
|
||||
.select(['innerMedia.metadata.creationDate as creationDate', 'innerMedia.metadata.fileSize as fileSize', 'count(*)'])
|
||||
.groupBy('innerMedia.metadata.creationDate, innerMedia.metadata.fileSize')
|
||||
.having('count(*)>1'),
|
||||
duplicates = await mediaRepository
|
||||
.createQueryBuilder('media')
|
||||
.innerJoin(
|
||||
(query): any =>
|
||||
query
|
||||
.from(MediaEntity, 'innerMedia')
|
||||
.select([
|
||||
'innerMedia.metadata.creationDate as creationDate',
|
||||
'innerMedia.metadata.fileSize as fileSize',
|
||||
'count(*)',
|
||||
])
|
||||
.groupBy(
|
||||
'innerMedia.metadata.creationDate, innerMedia.metadata.fileSize'
|
||||
)
|
||||
.having('count(*)>1'),
|
||||
'innerMedia',
|
||||
'media.metadata.creationDate=innerMedia.creationDate AND media.metadata.fileSize = innerMedia.fileSize')
|
||||
'media.metadata.creationDate=innerMedia.creationDate AND media.metadata.fileSize = innerMedia.fileSize'
|
||||
)
|
||||
.innerJoinAndSelect('media.directory', 'directory')
|
||||
.orderBy('media.metadata.creationDate, media.metadata.fileSize')
|
||||
.limit(Config.Server.Duplicates.listingLimit).getMany();
|
||||
.limit(Config.Server.Duplicates.listingLimit)
|
||||
.getMany();
|
||||
|
||||
processDuplicates(duplicates,
|
||||
(a, b): boolean => a.metadata.creationDate === b.metadata.creationDate &&
|
||||
a.metadata.fileSize === b.metadata.fileSize, true);
|
||||
processDuplicates(
|
||||
duplicates,
|
||||
(a, b): boolean =>
|
||||
a.metadata.creationDate === b.metadata.creationDate &&
|
||||
a.metadata.fileSize === b.metadata.fileSize,
|
||||
true
|
||||
);
|
||||
|
||||
return duplicateParis;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns with the directories only, does not include media or metafiles
|
||||
*/
|
||||
public async selectDirStructure(relativeDirectoryName: string): Promise<DirectoryEntity> {
|
||||
const directoryPath = GalleryManager.parseRelativeDirePath(relativeDirectoryName);
|
||||
public async selectDirStructure(
|
||||
relativeDirectoryName: string
|
||||
): Promise<DirectoryEntity> {
|
||||
const directoryPath = GalleryManager.parseRelativeDirePath(
|
||||
relativeDirectoryName
|
||||
);
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const query = connection
|
||||
.getRepository(DirectoryEntity)
|
||||
.createQueryBuilder('directory')
|
||||
.where('directory.name = :name AND directory.path = :path', {
|
||||
name: directoryPath.name,
|
||||
path: directoryPath.parent
|
||||
path: directoryPath.parent,
|
||||
})
|
||||
.leftJoinAndSelect('directory.directories', 'directories');
|
||||
|
||||
return await query.getOne();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sets preview for the directory and caches it in the DB
|
||||
*/
|
||||
public async fillPreviewForSubDir(connection: Connection, dir: SubDirectoryDTO): Promise<void> {
|
||||
|
||||
public async fillPreviewForSubDir(
|
||||
connection: Connection,
|
||||
dir: SubDirectoryDTO
|
||||
): Promise<void> {
|
||||
if (!dir.validPreview) {
|
||||
dir.preview = await ObjectManagers.getInstance().PreviewManager.setAndGetPreviewForDirectory(dir);
|
||||
dir.preview =
|
||||
await ObjectManagers.getInstance().PreviewManager.setAndGetPreviewForDirectory(
|
||||
dir
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
dir.media = [];
|
||||
dir.isPartial = true;
|
||||
}
|
||||
|
||||
|
||||
protected async selectParentDir(connection: Connection, directoryName: string, directoryParent: string): Promise<ParentDirectoryDTO> {
|
||||
protected async selectParentDir(
|
||||
connection: Connection,
|
||||
directoryName: string,
|
||||
directoryParent: string
|
||||
): Promise<ParentDirectoryDTO> {
|
||||
const query = connection
|
||||
.getRepository(DirectoryEntity)
|
||||
.createQueryBuilder('directory')
|
||||
.where('directory.name = :name AND directory.path = :path', {
|
||||
name: directoryName,
|
||||
path: directoryParent
|
||||
path: directoryParent,
|
||||
})
|
||||
.leftJoinAndSelect('directory.directories', 'directories')
|
||||
.leftJoinAndSelect('directory.media', 'media')
|
||||
.leftJoinAndSelect('directories.preview', 'preview')
|
||||
.leftJoinAndSelect('preview.directory', 'previewDirectory')
|
||||
.select(['directory',
|
||||
.select([
|
||||
'directory',
|
||||
'directories',
|
||||
'media',
|
||||
'preview.name',
|
||||
'previewDirectory.name',
|
||||
'previewDirectory.path']);
|
||||
|
||||
'previewDirectory.path',
|
||||
]);
|
||||
|
||||
// TODO: do better filtering
|
||||
// NOTE: it should not cause an issue as it also do not shave to the DB
|
||||
if (Config.Client.MetaFile.gpx === true ||
|
||||
if (
|
||||
Config.Client.MetaFile.gpx === true ||
|
||||
Config.Client.MetaFile.pg2conf === true ||
|
||||
Config.Client.MetaFile.markdown === true) {
|
||||
Config.Client.MetaFile.markdown === true
|
||||
) {
|
||||
query.leftJoinAndSelect('directory.metaFile', 'metaFile');
|
||||
}
|
||||
|
||||
return await query.getOne();
|
||||
}
|
||||
|
||||
protected async fillParentDir(connection: Connection, dir: ParentDirectoryDTO): Promise<void> {
|
||||
protected async fillParentDir(
|
||||
connection: Connection,
|
||||
dir: ParentDirectoryDTO
|
||||
): Promise<void> {
|
||||
if (dir.media) {
|
||||
const indexedFaces = await connection.getRepository(FaceRegionEntry)
|
||||
const indexedFaces = await connection
|
||||
.getRepository(FaceRegionEntry)
|
||||
.createQueryBuilder('face')
|
||||
.leftJoinAndSelect('face.media', 'media')
|
||||
.where('media.directory = :directory', {
|
||||
directory: dir.id
|
||||
directory: dir.id,
|
||||
})
|
||||
.leftJoinAndSelect('face.person', 'person')
|
||||
.select(['face.id', 'face.box.left',
|
||||
'face.box.top', 'face.box.width', 'face.box.height',
|
||||
'media.id', 'person.name', 'person.id'])
|
||||
.select([
|
||||
'face.id',
|
||||
'face.box.left',
|
||||
'face.box.top',
|
||||
'face.box.width',
|
||||
'face.box.height',
|
||||
'media.id',
|
||||
'person.name',
|
||||
'person.id',
|
||||
])
|
||||
.getMany();
|
||||
for (const item of dir.media) {
|
||||
item.directory = dir;
|
||||
(item as PhotoDTO).metadata.faces = indexedFaces
|
||||
.filter((fe): boolean => fe.media.id === item.id)
|
||||
.map((f): { name: any; box: any } => ({box: f.box, name: f.person.name}));
|
||||
.map((f): { name: any; box: any } => ({
|
||||
box: f.box,
|
||||
name: f.person.name,
|
||||
}));
|
||||
}
|
||||
}
|
||||
if (dir.metaFile) {
|
||||
|
@ -1,14 +1,16 @@
|
||||
import {ParentDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import {IGalleryManager} from '../interfaces/IGalleryManager';
|
||||
import {DuplicatesDTO} from '../../../../common/entities/DuplicatesDTO';
|
||||
import {Connection} from 'typeorm';
|
||||
import {DirectoryEntity} from './enitites/DirectoryEntity';
|
||||
import {FileDTO} from '../../../../common/entities/FileDTO';
|
||||
import { ParentDirectoryDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
import { IGalleryManager } from '../interfaces/IGalleryManager';
|
||||
import { DuplicatesDTO } from '../../../../common/entities/DuplicatesDTO';
|
||||
import { Connection } from 'typeorm';
|
||||
import { DirectoryEntity } from './enitites/DirectoryEntity';
|
||||
import { FileDTO } from '../../../../common/entities/FileDTO';
|
||||
|
||||
export interface ISQLGalleryManager extends IGalleryManager {
|
||||
listDirectory(relativeDirectoryName: string,
|
||||
knownLastModified?: number,
|
||||
knownLastScanned?: number): Promise<ParentDirectoryDTO>;
|
||||
listDirectory(
|
||||
relativeDirectoryName: string,
|
||||
knownLastModified?: number,
|
||||
knownLastScanned?: number
|
||||
): Promise<ParentDirectoryDTO>;
|
||||
|
||||
countDirectories(): Promise<number>;
|
||||
|
||||
@ -22,5 +24,8 @@ export interface ISQLGalleryManager extends IGalleryManager {
|
||||
|
||||
selectDirStructure(directory: string): Promise<ParentDirectoryDTO<FileDTO>>;
|
||||
|
||||
fillPreviewForSubDir(connection: Connection, dir: DirectoryEntity): Promise<void>;
|
||||
fillPreviewForSubDir(
|
||||
connection: Connection,
|
||||
dir: DirectoryEntity
|
||||
): Promise<void>;
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {IPersonManager} from '../interfaces/IPersonManager';
|
||||
import { IPersonManager } from '../interfaces/IPersonManager';
|
||||
|
||||
export interface ISQLPersonManager extends IPersonManager {
|
||||
countFaces(): Promise<number>;
|
||||
|
@ -1,12 +1,18 @@
|
||||
import {SearchQueryDTO, SearchQueryTypes} from '../../../../common/entities/SearchQueryDTO';
|
||||
import {ISearchManager} from '../interfaces/ISearchManager';
|
||||
import {AutoCompleteItem} from '../../../../common/entities/AutoCompleteItem';
|
||||
import {SearchResultDTO} from '../../../../common/entities/SearchResultDTO';
|
||||
import {PhotoDTO} from '../../../../common/entities/PhotoDTO';
|
||||
import {Brackets} from 'typeorm';
|
||||
import {
|
||||
SearchQueryDTO,
|
||||
SearchQueryTypes,
|
||||
} from '../../../../common/entities/SearchQueryDTO';
|
||||
import { ISearchManager } from '../interfaces/ISearchManager';
|
||||
import { AutoCompleteItem } from '../../../../common/entities/AutoCompleteItem';
|
||||
import { SearchResultDTO } from '../../../../common/entities/SearchResultDTO';
|
||||
import { PhotoDTO } from '../../../../common/entities/PhotoDTO';
|
||||
import { Brackets } from 'typeorm';
|
||||
|
||||
export interface ISQLSearchManager extends ISearchManager {
|
||||
autocomplete(text: string, type: SearchQueryTypes): Promise<AutoCompleteItem[]>;
|
||||
autocomplete(
|
||||
text: string,
|
||||
type: SearchQueryTypes
|
||||
): Promise<AutoCompleteItem[]>;
|
||||
|
||||
search(query: SearchQueryDTO): Promise<SearchResultDTO>;
|
||||
|
||||
@ -15,5 +21,8 @@ export interface ISQLSearchManager extends ISearchManager {
|
||||
// "Protected" functions. only called from other Managers, not from middlewares
|
||||
getCount(query: SearchQueryDTO): Promise<number>;
|
||||
|
||||
prepareAndBuildWhereQuery(query: SearchQueryDTO, directoryOnly?: boolean): Promise<Brackets>;
|
||||
prepareAndBuildWhereQuery(
|
||||
query: SearchQueryDTO,
|
||||
directoryOnly?: boolean
|
||||
): Promise<Brackets>;
|
||||
}
|
||||
|
@ -1,33 +1,38 @@
|
||||
import {ParentDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import {DirectoryEntity} from './enitites/DirectoryEntity';
|
||||
import {SQLConnection} from './SQLConnection';
|
||||
import {DiskManager} from '../../DiskManger';
|
||||
import {PhotoEntity, PhotoMetadataEntity} from './enitites/PhotoEntity';
|
||||
import {Utils} from '../../../../common/Utils';
|
||||
import {FaceRegion, PhotoMetadata} from '../../../../common/entities/PhotoDTO';
|
||||
import {Connection, Repository} from 'typeorm';
|
||||
import {MediaEntity} from './enitites/MediaEntity';
|
||||
import {MediaDTO, MediaDTOUtils} from '../../../../common/entities/MediaDTO';
|
||||
import {VideoEntity} from './enitites/VideoEntity';
|
||||
import {FileEntity} from './enitites/FileEntity';
|
||||
import {FileDTO} from '../../../../common/entities/FileDTO';
|
||||
import {NotificationManager} from '../../NotifocationManager';
|
||||
import {FaceRegionEntry} from './enitites/FaceRegionEntry';
|
||||
import {ObjectManagers} from '../../ObjectManagers';
|
||||
import {IIndexingManager} from '../interfaces/IIndexingManager';
|
||||
import {DiskMangerWorker} from '../../threading/DiskMangerWorker';
|
||||
import {Logger} from '../../../Logger';
|
||||
import {ServerPG2ConfMap, ServerSidePG2ConfAction} from '../../../../common/PG2ConfMap';
|
||||
import {ProjectPath} from '../../../ProjectPath';
|
||||
import { ParentDirectoryDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
import { DirectoryEntity } from './enitites/DirectoryEntity';
|
||||
import { SQLConnection } from './SQLConnection';
|
||||
import { DiskManager } from '../../DiskManger';
|
||||
import { PhotoEntity, PhotoMetadataEntity } from './enitites/PhotoEntity';
|
||||
import { Utils } from '../../../../common/Utils';
|
||||
import {
|
||||
FaceRegion,
|
||||
PhotoMetadata,
|
||||
} from '../../../../common/entities/PhotoDTO';
|
||||
import { Connection, Repository } from 'typeorm';
|
||||
import { MediaEntity } from './enitites/MediaEntity';
|
||||
import { MediaDTO, MediaDTOUtils } from '../../../../common/entities/MediaDTO';
|
||||
import { VideoEntity } from './enitites/VideoEntity';
|
||||
import { FileEntity } from './enitites/FileEntity';
|
||||
import { FileDTO } from '../../../../common/entities/FileDTO';
|
||||
import { NotificationManager } from '../../NotifocationManager';
|
||||
import { FaceRegionEntry } from './enitites/FaceRegionEntry';
|
||||
import { ObjectManagers } from '../../ObjectManagers';
|
||||
import { IIndexingManager } from '../interfaces/IIndexingManager';
|
||||
import { DiskMangerWorker } from '../../threading/DiskMangerWorker';
|
||||
import { Logger } from '../../../Logger';
|
||||
import {
|
||||
ServerPG2ConfMap,
|
||||
ServerSidePG2ConfAction,
|
||||
} from '../../../../common/PG2ConfMap';
|
||||
import { ProjectPath } from '../../../ProjectPath';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import {SearchQueryDTO} from '../../../../common/entities/SearchQueryDTO';
|
||||
import {PersonEntry} from './enitites/PersonEntry';
|
||||
import { SearchQueryDTO } from '../../../../common/entities/SearchQueryDTO';
|
||||
import { PersonEntry } from './enitites/PersonEntry';
|
||||
|
||||
const LOG_TAG = '[IndexingManager]';
|
||||
|
||||
export class IndexingManager implements IIndexingManager {
|
||||
|
||||
SavingReady: Promise<void> = null;
|
||||
private SavingReadyPR: () => void = null;
|
||||
private savingQueue: ParentDirectoryDTO[] = [];
|
||||
@ -37,16 +42,31 @@ export class IndexingManager implements IIndexingManager {
|
||||
return this.SavingReady !== null;
|
||||
}
|
||||
|
||||
private static async processServerSidePG2Conf(files: FileDTO[]): Promise<void> {
|
||||
private static async processServerSidePG2Conf(
|
||||
files: FileDTO[]
|
||||
): Promise<void> {
|
||||
for (const f of files) {
|
||||
if (ServerPG2ConfMap[f.name] === ServerSidePG2ConfAction.SAVED_SEARCH) {
|
||||
const fullMediaPath = path.join(ProjectPath.ImageFolder, f.directory.path, f.directory.name, f.name);
|
||||
const fullMediaPath = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
f.directory.path,
|
||||
f.directory.name,
|
||||
f.name
|
||||
);
|
||||
|
||||
Logger.silly(LOG_TAG, 'Saving saved-searches to DB from:', fullMediaPath);
|
||||
const savedSearches: { name: string, searchQuery: SearchQueryDTO }[] =
|
||||
Logger.silly(
|
||||
LOG_TAG,
|
||||
'Saving saved-searches to DB from:',
|
||||
fullMediaPath
|
||||
);
|
||||
const savedSearches: { name: string; searchQuery: SearchQueryDTO }[] =
|
||||
JSON.parse(await fs.promises.readFile(fullMediaPath, 'utf8'));
|
||||
for (const s of savedSearches) {
|
||||
await ObjectManagers.getInstance().AlbumManager.addIfNotExistSavedSearch(s.name, s.searchQuery, true);
|
||||
await ObjectManagers.getInstance().AlbumManager.addIfNotExistSavedSearch(
|
||||
s.name,
|
||||
s.searchQuery,
|
||||
true
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -56,26 +76,34 @@ export class IndexingManager implements IIndexingManager {
|
||||
* Indexes a dir, but returns early with the scanned version,
|
||||
* does not wait for the DB to be saved
|
||||
*/
|
||||
public indexDirectory(relativeDirectoryName: string): Promise<ParentDirectoryDTO> {
|
||||
public indexDirectory(
|
||||
relativeDirectoryName: string
|
||||
): Promise<ParentDirectoryDTO> {
|
||||
// eslint-disable-next-line no-async-promise-executor
|
||||
return new Promise(async (resolve, reject): Promise<void> => {
|
||||
try {
|
||||
const scannedDirectory = await DiskManager.scanDirectory(relativeDirectoryName);
|
||||
const scannedDirectory = await DiskManager.scanDirectory(
|
||||
relativeDirectoryName
|
||||
);
|
||||
|
||||
const dirClone = Utils.shallowClone(scannedDirectory);
|
||||
// filter server side only config from returning
|
||||
dirClone.metaFile = dirClone.metaFile.filter(m => !ServerPG2ConfMap[m.name]);
|
||||
dirClone.metaFile = dirClone.metaFile.filter(
|
||||
(m) => !ServerPG2ConfMap[m.name]
|
||||
);
|
||||
|
||||
resolve(dirClone);
|
||||
|
||||
// save directory to DB
|
||||
this.queueForSave(scannedDirectory).catch(console.error);
|
||||
|
||||
} catch (error) {
|
||||
NotificationManager.warning('Unknown indexing error for: ' + relativeDirectoryName, error.toString());
|
||||
NotificationManager.warning(
|
||||
'Unknown indexing error for: ' + relativeDirectoryName,
|
||||
error.toString()
|
||||
);
|
||||
console.error(error);
|
||||
return reject(error);
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
@ -93,9 +121,16 @@ export class IndexingManager implements IIndexingManager {
|
||||
this.isSaving = true;
|
||||
try {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const serverSideConfigs = scannedDirectory.metaFile.filter(m => !!ServerPG2ConfMap[m.name]);
|
||||
scannedDirectory.metaFile = scannedDirectory.metaFile.filter(m => !ServerPG2ConfMap[m.name]);
|
||||
const currentDirId: number = await this.saveParentDir(connection, scannedDirectory);
|
||||
const serverSideConfigs = scannedDirectory.metaFile.filter(
|
||||
(m) => !!ServerPG2ConfMap[m.name]
|
||||
);
|
||||
scannedDirectory.metaFile = scannedDirectory.metaFile.filter(
|
||||
(m) => !ServerPG2ConfMap[m.name]
|
||||
);
|
||||
const currentDirId: number = await this.saveParentDir(
|
||||
connection,
|
||||
scannedDirectory
|
||||
);
|
||||
await this.saveChildDirs(connection, currentDirId, scannedDirectory);
|
||||
await this.saveMedia(connection, currentDirId, scannedDirectory.media);
|
||||
await this.saveMetaFiles(connection, currentDirId, scannedDirectory);
|
||||
@ -110,14 +145,23 @@ export class IndexingManager implements IIndexingManager {
|
||||
/**
|
||||
* Queues up a directory to save to the DB.
|
||||
*/
|
||||
protected async queueForSave(scannedDirectory: ParentDirectoryDTO): Promise<void> {
|
||||
protected async queueForSave(
|
||||
scannedDirectory: ParentDirectoryDTO
|
||||
): Promise<void> {
|
||||
// Is this dir already queued for saving?
|
||||
if (this.savingQueue.findIndex((dir): boolean => dir.name === scannedDirectory.name &&
|
||||
dir.path === scannedDirectory.path &&
|
||||
dir.lastModified === scannedDirectory.lastModified &&
|
||||
dir.lastScanned === scannedDirectory.lastScanned &&
|
||||
(dir.media || dir.media.length) === (scannedDirectory.media || scannedDirectory.media.length) &&
|
||||
(dir.metaFile || dir.metaFile.length) === (scannedDirectory.metaFile || scannedDirectory.metaFile.length)) !== -1) {
|
||||
if (
|
||||
this.savingQueue.findIndex(
|
||||
(dir): boolean =>
|
||||
dir.name === scannedDirectory.name &&
|
||||
dir.path === scannedDirectory.path &&
|
||||
dir.lastModified === scannedDirectory.lastModified &&
|
||||
dir.lastScanned === scannedDirectory.lastScanned &&
|
||||
(dir.media || dir.media.length) ===
|
||||
(scannedDirectory.media || scannedDirectory.media.length) &&
|
||||
(dir.metaFile || dir.metaFile.length) ===
|
||||
(scannedDirectory.metaFile || scannedDirectory.metaFile.length)
|
||||
) !== -1
|
||||
) {
|
||||
return;
|
||||
}
|
||||
this.savingQueue.push(scannedDirectory);
|
||||
@ -139,86 +183,114 @@ export class IndexingManager implements IIndexingManager {
|
||||
this.SavingReady = null;
|
||||
this.SavingReadyPR();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected async saveParentDir(connection: Connection, scannedDirectory: ParentDirectoryDTO): Promise<number> {
|
||||
protected async saveParentDir(
|
||||
connection: Connection,
|
||||
scannedDirectory: ParentDirectoryDTO
|
||||
): Promise<number> {
|
||||
const directoryRepository = connection.getRepository(DirectoryEntity);
|
||||
|
||||
const currentDir: DirectoryEntity = await directoryRepository.createQueryBuilder('directory')
|
||||
const currentDir: DirectoryEntity = await directoryRepository
|
||||
.createQueryBuilder('directory')
|
||||
.where('directory.name = :name AND directory.path = :path', {
|
||||
name: scannedDirectory.name,
|
||||
path: scannedDirectory.path
|
||||
}).getOne();
|
||||
if (!!currentDir) {// Updated parent dir (if it was in the DB previously)
|
||||
path: scannedDirectory.path,
|
||||
})
|
||||
.getOne();
|
||||
if (currentDir) {
|
||||
// Updated parent dir (if it was in the DB previously)
|
||||
currentDir.lastModified = scannedDirectory.lastModified;
|
||||
currentDir.lastScanned = scannedDirectory.lastScanned;
|
||||
currentDir.mediaCount = scannedDirectory.mediaCount;
|
||||
await directoryRepository.save(currentDir);
|
||||
return currentDir.id;
|
||||
|
||||
} else {
|
||||
return (await directoryRepository.insert({
|
||||
mediaCount: scannedDirectory.mediaCount,
|
||||
lastModified: scannedDirectory.lastModified,
|
||||
lastScanned: scannedDirectory.lastScanned,
|
||||
name: scannedDirectory.name,
|
||||
path: scannedDirectory.path
|
||||
} as DirectoryEntity)).identifiers[0].id;
|
||||
return (
|
||||
await directoryRepository.insert({
|
||||
mediaCount: scannedDirectory.mediaCount,
|
||||
lastModified: scannedDirectory.lastModified,
|
||||
lastScanned: scannedDirectory.lastScanned,
|
||||
name: scannedDirectory.name,
|
||||
path: scannedDirectory.path,
|
||||
} as DirectoryEntity)
|
||||
).identifiers[0]['id'];
|
||||
}
|
||||
}
|
||||
|
||||
protected async saveChildDirs(connection: Connection, currentDirId: number, scannedDirectory: ParentDirectoryDTO): Promise<void> {
|
||||
protected async saveChildDirs(
|
||||
connection: Connection,
|
||||
currentDirId: number,
|
||||
scannedDirectory: ParentDirectoryDTO
|
||||
): Promise<void> {
|
||||
const directoryRepository = connection.getRepository(DirectoryEntity);
|
||||
|
||||
// update subdirectories that does not have a parent
|
||||
await directoryRepository
|
||||
.createQueryBuilder()
|
||||
.update(DirectoryEntity)
|
||||
.set({parent: currentDirId as any})
|
||||
.where('path = :path',
|
||||
{path: DiskMangerWorker.pathFromParent(scannedDirectory)})
|
||||
.andWhere('name NOT LIKE :root', {root: DiskMangerWorker.dirName('.')})
|
||||
.set({ parent: currentDirId as any })
|
||||
.where('path = :path', {
|
||||
path: DiskMangerWorker.pathFromParent(scannedDirectory),
|
||||
})
|
||||
.andWhere('name NOT LIKE :root', { root: DiskMangerWorker.dirName('.') })
|
||||
.andWhere('parent IS NULL')
|
||||
.execute();
|
||||
|
||||
// save subdirectories
|
||||
const childDirectories = await directoryRepository.createQueryBuilder('directory')
|
||||
const childDirectories = await directoryRepository
|
||||
.createQueryBuilder('directory')
|
||||
.leftJoinAndSelect('directory.parent', 'parent')
|
||||
.where('directory.parent = :dir', {
|
||||
dir: currentDirId
|
||||
}).getMany();
|
||||
dir: currentDirId,
|
||||
})
|
||||
.getMany();
|
||||
|
||||
for (const directory of scannedDirectory.directories) {
|
||||
// Was this child Dir already indexed before?
|
||||
const dirIndex = childDirectories.findIndex((d): boolean => d.name === directory.name);
|
||||
const dirIndex = childDirectories.findIndex(
|
||||
(d): boolean => d.name === directory.name
|
||||
);
|
||||
|
||||
if (dirIndex !== -1) { // directory found
|
||||
if (dirIndex !== -1) {
|
||||
// directory found
|
||||
childDirectories.splice(dirIndex, 1);
|
||||
} else { // dir does not exists yet
|
||||
directory.parent = ({id: currentDirId} as any);
|
||||
} else {
|
||||
// dir does not exists yet
|
||||
directory.parent = { id: currentDirId } as any;
|
||||
(directory as DirectoryEntity).lastScanned = null; // new child dir, not fully scanned yet
|
||||
const d = await directoryRepository.insert(directory as DirectoryEntity);
|
||||
const d = await directoryRepository.insert(
|
||||
directory as DirectoryEntity
|
||||
);
|
||||
|
||||
await this.saveMedia(connection, d.identifiers[0].id, directory.media);
|
||||
await this.saveMedia(
|
||||
connection,
|
||||
d.identifiers[0]['id'],
|
||||
directory.media
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove child Dirs that are not anymore in the parent dir
|
||||
await directoryRepository.remove(childDirectories, {chunk: Math.max(Math.ceil(childDirectories.length / 500), 1)});
|
||||
|
||||
await directoryRepository.remove(childDirectories, {
|
||||
chunk: Math.max(Math.ceil(childDirectories.length / 500), 1),
|
||||
});
|
||||
}
|
||||
|
||||
protected async saveMetaFiles(connection: Connection, currentDirID: number, scannedDirectory: ParentDirectoryDTO): Promise<void> {
|
||||
protected async saveMetaFiles(
|
||||
connection: Connection,
|
||||
currentDirID: number,
|
||||
scannedDirectory: ParentDirectoryDTO
|
||||
): Promise<void> {
|
||||
const fileRepository = connection.getRepository(FileEntity);
|
||||
// save files
|
||||
const indexedMetaFiles = await fileRepository.createQueryBuilder('file')
|
||||
const indexedMetaFiles = await fileRepository
|
||||
.createQueryBuilder('file')
|
||||
.where('file.directory = :dir', {
|
||||
dir: currentDirID
|
||||
}).getMany();
|
||||
|
||||
dir: currentDirID,
|
||||
})
|
||||
.getMany();
|
||||
|
||||
const metaFilesToSave = [];
|
||||
for (const item of scannedDirectory.metaFile) {
|
||||
@ -230,37 +302,47 @@ export class IndexingManager implements IIndexingManager {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (metaFile == null) { // not in DB yet
|
||||
if (metaFile == null) {
|
||||
// not in DB yet
|
||||
item.directory = null;
|
||||
metaFile = Utils.clone(item);
|
||||
item.directory = scannedDirectory;
|
||||
metaFile.directory = ({id: currentDirID} as any);
|
||||
metaFile.directory = { id: currentDirID } as any;
|
||||
metaFilesToSave.push(metaFile);
|
||||
}
|
||||
}
|
||||
await fileRepository.save(metaFilesToSave, {chunk: Math.max(Math.ceil(metaFilesToSave.length / 500), 1)});
|
||||
await fileRepository.remove(indexedMetaFiles, {chunk: Math.max(Math.ceil(indexedMetaFiles.length / 500), 1)});
|
||||
await fileRepository.save(metaFilesToSave, {
|
||||
chunk: Math.max(Math.ceil(metaFilesToSave.length / 500), 1),
|
||||
});
|
||||
await fileRepository.remove(indexedMetaFiles, {
|
||||
chunk: Math.max(Math.ceil(indexedMetaFiles.length / 500), 1),
|
||||
});
|
||||
}
|
||||
|
||||
protected async saveMedia(connection: Connection, parentDirId: number, media: MediaDTO[]): Promise<void> {
|
||||
protected async saveMedia(
|
||||
connection: Connection,
|
||||
parentDirId: number,
|
||||
media: MediaDTO[]
|
||||
): Promise<void> {
|
||||
const mediaRepository = connection.getRepository(MediaEntity);
|
||||
const photoRepository = connection.getRepository(PhotoEntity);
|
||||
const videoRepository = connection.getRepository(VideoEntity);
|
||||
// save media
|
||||
let indexedMedia = (await mediaRepository.createQueryBuilder('media')
|
||||
let indexedMedia = await mediaRepository
|
||||
.createQueryBuilder('media')
|
||||
.where('media.directory = :dir', {
|
||||
dir: parentDirId
|
||||
dir: parentDirId,
|
||||
})
|
||||
.getMany());
|
||||
.getMany();
|
||||
|
||||
const mediaChange: any = {
|
||||
saveP: [], // save/update photo
|
||||
saveV: [], // save/update video
|
||||
insertP: [], // insert photo
|
||||
insertV: [] // insert video
|
||||
insertV: [], // insert video
|
||||
};
|
||||
const facesPerPhoto: { faces: FaceRegionEntry[], mediaName: string }[] = [];
|
||||
// tslint:disable-next-line:prefer-for-of
|
||||
const facesPerPhoto: { faces: FaceRegionEntry[]; mediaName: string }[] = [];
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-for-of
|
||||
for (let i = 0; i < media.length; i++) {
|
||||
let mediaItem: MediaEntity = null;
|
||||
for (let j = 0; j < indexedMedia.length; j++) {
|
||||
@ -272,27 +354,42 @@ export class IndexingManager implements IIndexingManager {
|
||||
}
|
||||
|
||||
const scannedFaces = (media[i].metadata as PhotoMetadata).faces || [];
|
||||
if ((media[i].metadata as PhotoMetadata).faces) { // if it has faces, cache them
|
||||
if ((media[i].metadata as PhotoMetadata).faces) {
|
||||
// if it has faces, cache them
|
||||
// make the list distinct (some photos may contain the same person multiple times)
|
||||
(media[i].metadata as PhotoMetadataEntity).persons = [...new Set((media[i].metadata as PhotoMetadata).faces.map(f => f.name))];
|
||||
(media[i].metadata as PhotoMetadataEntity).persons = [
|
||||
...new Set(
|
||||
(media[i].metadata as PhotoMetadata).faces.map((f) => f.name)
|
||||
),
|
||||
];
|
||||
}
|
||||
delete (media[i].metadata as PhotoMetadata).faces; // this is a separated DB, lets save separately
|
||||
|
||||
if (mediaItem == null) { // not in DB yet
|
||||
if (mediaItem == null) {
|
||||
// not in DB yet
|
||||
media[i].directory = null;
|
||||
mediaItem = (Utils.clone(media[i]) as any);
|
||||
mediaItem.directory = ({id: parentDirId} as any);
|
||||
(MediaDTOUtils.isPhoto(mediaItem) ? mediaChange.insertP : mediaChange.insertV).push(mediaItem);
|
||||
} else { // already in the DB, only needs to be updated
|
||||
mediaItem = Utils.clone(media[i]) as any;
|
||||
mediaItem.directory = { id: parentDirId } as any;
|
||||
(MediaDTOUtils.isPhoto(mediaItem)
|
||||
? mediaChange.insertP
|
||||
: mediaChange.insertV
|
||||
).push(mediaItem);
|
||||
} else {
|
||||
// already in the DB, only needs to be updated
|
||||
delete (mediaItem.metadata as PhotoMetadata).faces;
|
||||
if (!Utils.equalsFilter(mediaItem.metadata, media[i].metadata)) {
|
||||
mediaItem.metadata = (media[i].metadata as any);
|
||||
(MediaDTOUtils.isPhoto(mediaItem) ? mediaChange.saveP : mediaChange.saveV).push(mediaItem);
|
||||
|
||||
mediaItem.metadata = media[i].metadata as any;
|
||||
(MediaDTOUtils.isPhoto(mediaItem)
|
||||
? mediaChange.saveP
|
||||
: mediaChange.saveV
|
||||
).push(mediaItem);
|
||||
}
|
||||
}
|
||||
|
||||
facesPerPhoto.push({faces: scannedFaces as FaceRegionEntry[], mediaName: mediaItem.name});
|
||||
facesPerPhoto.push({
|
||||
faces: scannedFaces as FaceRegionEntry[],
|
||||
mediaName: mediaItem.name,
|
||||
});
|
||||
}
|
||||
|
||||
await this.saveChunk(photoRepository, mediaChange.saveP, 100);
|
||||
@ -300,17 +397,23 @@ export class IndexingManager implements IIndexingManager {
|
||||
await this.saveChunk(photoRepository, mediaChange.insertP, 100);
|
||||
await this.saveChunk(videoRepository, mediaChange.insertV, 100);
|
||||
|
||||
indexedMedia = (await mediaRepository.createQueryBuilder('media')
|
||||
indexedMedia = await mediaRepository
|
||||
.createQueryBuilder('media')
|
||||
.where('media.directory = :dir', {
|
||||
dir: parentDirId
|
||||
dir: parentDirId,
|
||||
})
|
||||
.select(['media.name', 'media.id'])
|
||||
.getMany());
|
||||
.getMany();
|
||||
|
||||
const faces: FaceRegionEntry[] = [];
|
||||
facesPerPhoto.forEach((group): void => {
|
||||
const mIndex = indexedMedia.findIndex((m): boolean => m.name === group.mediaName);
|
||||
group.faces.forEach((sf: FaceRegionEntry): any => sf.media = ({id: indexedMedia[mIndex].id} as any));
|
||||
const mIndex = indexedMedia.findIndex(
|
||||
(m): boolean => m.name === group.mediaName
|
||||
);
|
||||
group.faces.forEach(
|
||||
(sf: FaceRegionEntry): any =>
|
||||
(sf.media = { id: indexedMedia[mIndex].id } as any)
|
||||
);
|
||||
|
||||
faces.push(...group.faces);
|
||||
indexedMedia.splice(mIndex, 1);
|
||||
@ -320,42 +423,47 @@ export class IndexingManager implements IIndexingManager {
|
||||
await mediaRepository.remove(indexedMedia);
|
||||
}
|
||||
|
||||
protected async saveFaces(connection: Connection, parentDirId: number, scannedFaces: FaceRegion[]): Promise<void> {
|
||||
protected async saveFaces(
|
||||
connection: Connection,
|
||||
parentDirId: number,
|
||||
scannedFaces: FaceRegion[]
|
||||
): Promise<void> {
|
||||
const faceRepository = connection.getRepository(FaceRegionEntry);
|
||||
const personRepository = connection.getRepository(PersonEntry);
|
||||
|
||||
const persons: { name: string, faceRegion: FaceRegion }[] = [];
|
||||
const persons: { name: string; faceRegion: FaceRegion }[] = [];
|
||||
|
||||
for (const face of scannedFaces) {
|
||||
if (persons.findIndex(f => f.name === face.name) === -1) {
|
||||
persons.push({name: face.name, faceRegion: face});
|
||||
if (persons.findIndex((f) => f.name === face.name) === -1) {
|
||||
persons.push({ name: face.name, faceRegion: face });
|
||||
}
|
||||
}
|
||||
await ObjectManagers.getInstance().PersonManager.saveAll(persons);
|
||||
// get saved persons without triggering denormalized data update (i.e.: do not use PersonManager.get).
|
||||
const savedPersons = await personRepository.find();
|
||||
|
||||
const indexedFaces = await faceRepository.createQueryBuilder('face')
|
||||
const indexedFaces = await faceRepository
|
||||
.createQueryBuilder('face')
|
||||
.leftJoin('face.media', 'media')
|
||||
.where('media.directory = :directory', {
|
||||
directory: parentDirId
|
||||
directory: parentDirId,
|
||||
})
|
||||
.leftJoinAndSelect('face.person', 'person')
|
||||
.getMany();
|
||||
|
||||
|
||||
const faceToInsert = [];
|
||||
// tslint:disable-next-line:prefer-for-of
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-for-of
|
||||
for (let i = 0; i < scannedFaces.length; i++) {
|
||||
|
||||
// was the face region already indexed
|
||||
let face: FaceRegionEntry = null;
|
||||
for (let j = 0; j < indexedFaces.length; j++) {
|
||||
if (indexedFaces[j].box.height === scannedFaces[i].box.height &&
|
||||
if (
|
||||
indexedFaces[j].box.height === scannedFaces[i].box.height &&
|
||||
indexedFaces[j].box.width === scannedFaces[i].box.width &&
|
||||
indexedFaces[j].box.left === scannedFaces[i].box.left &&
|
||||
indexedFaces[j].box.top === scannedFaces[i].box.top &&
|
||||
indexedFaces[j].person.name === scannedFaces[i].name) {
|
||||
indexedFaces[j].person.name === scannedFaces[i].name
|
||||
) {
|
||||
face = indexedFaces[j];
|
||||
indexedFaces.splice(j, 1);
|
||||
break; // region found, stop processing
|
||||
@ -363,18 +471,25 @@ export class IndexingManager implements IIndexingManager {
|
||||
}
|
||||
|
||||
if (face == null) {
|
||||
(scannedFaces[i] as FaceRegionEntry).person = savedPersons.find(p => p.name === scannedFaces[i].name);
|
||||
(scannedFaces[i] as FaceRegionEntry).person = savedPersons.find(
|
||||
(p) => p.name === scannedFaces[i].name
|
||||
);
|
||||
faceToInsert.push(scannedFaces[i]);
|
||||
}
|
||||
}
|
||||
if (faceToInsert.length > 0) {
|
||||
await this.insertChunk(faceRepository, faceToInsert, 100);
|
||||
}
|
||||
await faceRepository.remove(indexedFaces, {chunk: Math.max(Math.ceil(indexedFaces.length / 500), 1)});
|
||||
|
||||
await faceRepository.remove(indexedFaces, {
|
||||
chunk: Math.max(Math.ceil(indexedFaces.length / 500), 1),
|
||||
});
|
||||
}
|
||||
|
||||
private async saveChunk<T>(repository: Repository<any>, entities: T[], size: number): Promise<T[]> {
|
||||
private async saveChunk<T>(
|
||||
repository: Repository<any>,
|
||||
entities: T[],
|
||||
size: number
|
||||
): Promise<T[]> {
|
||||
if (entities.length === 0) {
|
||||
return [];
|
||||
}
|
||||
@ -383,21 +498,33 @@ export class IndexingManager implements IIndexingManager {
|
||||
}
|
||||
let list: T[] = [];
|
||||
for (let i = 0; i < entities.length / size; i++) {
|
||||
list = list.concat(await repository.save(entities.slice(i * size, (i + 1) * size)));
|
||||
list = list.concat(
|
||||
await repository.save(entities.slice(i * size, (i + 1) * size))
|
||||
);
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private async insertChunk<T>(repository: Repository<any>, entities: T[], size: number): Promise<number[]> {
|
||||
private async insertChunk<T>(
|
||||
repository: Repository<any>,
|
||||
entities: T[],
|
||||
size: number
|
||||
): Promise<number[]> {
|
||||
if (entities.length === 0) {
|
||||
return [];
|
||||
}
|
||||
if (entities.length < size) {
|
||||
return (await repository.insert(entities)).identifiers.map((i: any) => i.id);
|
||||
return (await repository.insert(entities)).identifiers.map(
|
||||
(i: any) => i.id
|
||||
);
|
||||
}
|
||||
let list: number[] = [];
|
||||
for (let i = 0; i < entities.length / size; i++) {
|
||||
list = list.concat((await repository.insert(entities.slice(i * size, (i + 1) * size))).identifiers.map(ids => ids.id));
|
||||
list = list.concat(
|
||||
(
|
||||
await repository.insert(entities.slice(i * size, (i + 1) * size))
|
||||
).identifiers.map((ids) => ids['id'])
|
||||
);
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
@ -1,12 +1,11 @@
|
||||
import {SQLConnection} from './SQLConnection';
|
||||
import {PersonEntry} from './enitites/PersonEntry';
|
||||
import {FaceRegionEntry} from './enitites/FaceRegionEntry';
|
||||
import {PersonDTO} from '../../../../common/entities/PersonDTO';
|
||||
import {ISQLPersonManager} from './IPersonManager';
|
||||
import {Logger} from '../../../Logger';
|
||||
import {FaceRegion} from '../../../../common/entities/PhotoDTO';
|
||||
import {SQL_COLLATE} from './enitites/EntityUtils';
|
||||
|
||||
import { SQLConnection } from './SQLConnection';
|
||||
import { PersonEntry } from './enitites/PersonEntry';
|
||||
import { FaceRegionEntry } from './enitites/FaceRegionEntry';
|
||||
import { PersonDTO } from '../../../../common/entities/PersonDTO';
|
||||
import { ISQLPersonManager } from './IPersonManager';
|
||||
import { Logger } from '../../../Logger';
|
||||
import { FaceRegion } from '../../../../common/entities/PhotoDTO';
|
||||
import { SQL_COLLATE } from './enitites/EntityUtils';
|
||||
|
||||
const LOG_TAG = '[PersonManager]';
|
||||
|
||||
@ -19,8 +18,10 @@ export class PersonManager implements ISQLPersonManager {
|
||||
|
||||
private static async updateCounts(): Promise<void> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
await connection.query('UPDATE person_entry SET count = ' +
|
||||
' (SELECT COUNT(1) FROM face_region_entry WHERE face_region_entry.personId = person_entry.id)');
|
||||
await connection.query(
|
||||
'UPDATE person_entry SET count = ' +
|
||||
' (SELECT COUNT(1) FROM face_region_entry WHERE face_region_entry.personId = person_entry.id)'
|
||||
);
|
||||
|
||||
// remove persons without photo
|
||||
await connection
|
||||
@ -33,23 +34,28 @@ export class PersonManager implements ISQLPersonManager {
|
||||
|
||||
private static async updateSamplePhotos(): Promise<void> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
await connection.query('update person_entry set sampleRegionId = ' +
|
||||
'(Select face_region_entry.id from media_entity ' +
|
||||
'left join face_region_entry on media_entity.id = face_region_entry.mediaId ' +
|
||||
'where face_region_entry.personId=person_entry.id ' +
|
||||
'order by media_entity.metadataCreationdate desc ' +
|
||||
'limit 1)');
|
||||
|
||||
await connection.query(
|
||||
'update person_entry set sampleRegionId = ' +
|
||||
'(Select face_region_entry.id from media_entity ' +
|
||||
'left join face_region_entry on media_entity.id = face_region_entry.mediaId ' +
|
||||
'where face_region_entry.personId=person_entry.id ' +
|
||||
'order by media_entity.metadataCreationdate desc ' +
|
||||
'limit 1)'
|
||||
);
|
||||
}
|
||||
|
||||
async updatePerson(name: string, partialPerson: PersonDTO): Promise<PersonEntry> {
|
||||
async updatePerson(
|
||||
name: string,
|
||||
partialPerson: PersonDTO
|
||||
): Promise<PersonEntry> {
|
||||
this.isDBValid = false;
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const repository = connection.getRepository(PersonEntry);
|
||||
const person = await repository.createQueryBuilder('person')
|
||||
const person = await repository
|
||||
.createQueryBuilder('person')
|
||||
.limit(1)
|
||||
.where('person.name LIKE :name COLLATE ' + SQL_COLLATE, {name}).getOne();
|
||||
|
||||
.where('person.name LIKE :name COLLATE ' + SQL_COLLATE, { name })
|
||||
.getOne();
|
||||
|
||||
if (typeof partialPerson.name !== 'undefined') {
|
||||
person.name = partialPerson.name;
|
||||
@ -76,7 +82,8 @@ export class PersonManager implements ISQLPersonManager {
|
||||
*/
|
||||
public async countFaces(): Promise<number> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
return await connection.getRepository(FaceRegionEntry)
|
||||
return await connection
|
||||
.getRepository(FaceRegionEntry)
|
||||
.createQueryBuilder('faceRegion')
|
||||
.getCount();
|
||||
}
|
||||
@ -88,8 +95,10 @@ export class PersonManager implements ISQLPersonManager {
|
||||
return this.persons.find((p): boolean => p.name === name);
|
||||
}
|
||||
|
||||
public async saveAll(persons: { name: string, faceRegion: FaceRegion }[]): Promise<void> {
|
||||
const toSave: { name: string, faceRegion: FaceRegion }[] = [];
|
||||
public async saveAll(
|
||||
persons: { name: string; faceRegion: FaceRegion }[]
|
||||
): Promise<void> {
|
||||
const toSave: { name: string; faceRegion: FaceRegion }[] = [];
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const personRepository = connection.getRepository(PersonEntry);
|
||||
const faceRegionRepository = connection.getRepository(FaceRegionEntry);
|
||||
@ -97,27 +106,28 @@ export class PersonManager implements ISQLPersonManager {
|
||||
const savedPersons = await personRepository.find();
|
||||
// filter already existing persons
|
||||
for (const personToSave of persons) {
|
||||
|
||||
const person = savedPersons.find((p): boolean => p.name === personToSave.name);
|
||||
const person = savedPersons.find(
|
||||
(p): boolean => p.name === personToSave.name
|
||||
);
|
||||
if (!person) {
|
||||
toSave.push(personToSave);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (toSave.length > 0) {
|
||||
for (let i = 0; i < toSave.length / 200; i++) {
|
||||
const saving = toSave.slice(i * 200, (i + 1) * 200);
|
||||
const inserted = await personRepository.insert(saving.map(p => ({name: p.name})));
|
||||
const inserted = await personRepository.insert(
|
||||
saving.map((p) => ({ name: p.name }))
|
||||
);
|
||||
// setting Person id
|
||||
inserted.identifiers.forEach((idObj: { id: number }, j: number) => {
|
||||
(saving[j].faceRegion as FaceRegionEntry).person = idObj as any;
|
||||
});
|
||||
await faceRegionRepository.insert(saving.map(p => p.faceRegion));
|
||||
await faceRegionRepository.insert(saving.map((p) => p.faceRegion));
|
||||
}
|
||||
}
|
||||
this.isDBValid = false;
|
||||
|
||||
}
|
||||
|
||||
public async onNewDataVersion(): Promise<void> {
|
||||
@ -134,9 +144,11 @@ export class PersonManager implements ISQLPersonManager {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const personRepository = connection.getRepository(PersonEntry);
|
||||
this.persons = await personRepository.find({
|
||||
relations: ['sampleRegion',
|
||||
relations: [
|
||||
'sampleRegion',
|
||||
'sampleRegion.media',
|
||||
'sampleRegion.media.directory']
|
||||
'sampleRegion.media.directory',
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
@ -153,5 +165,4 @@ export class PersonManager implements ISQLPersonManager {
|
||||
await PersonManager.updateSamplePhotos();
|
||||
this.isDBValid = false;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,26 +1,34 @@
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {Brackets, SelectQueryBuilder, WhereExpression} from 'typeorm';
|
||||
import {MediaEntity} from './enitites/MediaEntity';
|
||||
import {DiskMangerWorker} from '../../threading/DiskMangerWorker';
|
||||
import {ObjectManagers} from '../../ObjectManagers';
|
||||
import {DatabaseType} from '../../../../common/config/private/PrivateConfig';
|
||||
import {SortingMethods} from '../../../../common/entities/SortingMethods';
|
||||
import {ISQLSearchManager} from './ISearchManager';
|
||||
import {IPreviewManager, PreviewPhotoDTOWithID} from '../interfaces/IPreviewManager';
|
||||
import {SQLConnection} from './SQLConnection';
|
||||
import {SearchQueryDTO, SearchQueryTypes, TextSearch} from '../../../../common/entities/SearchQueryDTO';
|
||||
import {DirectoryEntity} from './enitites/DirectoryEntity';
|
||||
import {ParentDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { Brackets, SelectQueryBuilder, WhereExpression } from 'typeorm';
|
||||
import { MediaEntity } from './enitites/MediaEntity';
|
||||
import { DiskMangerWorker } from '../../threading/DiskMangerWorker';
|
||||
import { ObjectManagers } from '../../ObjectManagers';
|
||||
import { DatabaseType } from '../../../../common/config/private/PrivateConfig';
|
||||
import { SortingMethods } from '../../../../common/entities/SortingMethods';
|
||||
import { ISQLSearchManager } from './ISearchManager';
|
||||
import {
|
||||
IPreviewManager,
|
||||
PreviewPhotoDTOWithID,
|
||||
} from '../interfaces/IPreviewManager';
|
||||
import { SQLConnection } from './SQLConnection';
|
||||
import {
|
||||
SearchQueryDTO,
|
||||
SearchQueryTypes,
|
||||
TextSearch,
|
||||
} from '../../../../common/entities/SearchQueryDTO';
|
||||
import { DirectoryEntity } from './enitites/DirectoryEntity';
|
||||
import { ParentDirectoryDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
import * as path from 'path';
|
||||
import {Utils} from '../../../../common/Utils';
|
||||
import { Utils } from '../../../../common/Utils';
|
||||
|
||||
const LOG_TAG = '[PreviewManager]';
|
||||
|
||||
export class PreviewManager implements IPreviewManager {
|
||||
private static DIRECTORY_SELECT = ['directory.name', 'directory.path'];
|
||||
|
||||
private static setSorting<T>(query: SelectQueryBuilder<T>): SelectQueryBuilder<T> {
|
||||
|
||||
private static setSorting<T>(
|
||||
query: SelectQueryBuilder<T>
|
||||
): SelectQueryBuilder<T> {
|
||||
for (const sort of Config.Server.Preview.Sorting) {
|
||||
switch (sort) {
|
||||
case SortingMethods.descDate:
|
||||
@ -41,7 +49,6 @@ export class PreviewManager implements IPreviewManager {
|
||||
case SortingMethods.ascName:
|
||||
query.addOrderBy('media.name', 'ASC');
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -50,18 +57,22 @@ export class PreviewManager implements IPreviewManager {
|
||||
|
||||
public async resetPreviews(): Promise<void> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
await connection.createQueryBuilder()
|
||||
await connection
|
||||
.createQueryBuilder()
|
||||
.update(DirectoryEntity)
|
||||
.set({validPreview: false}).execute();
|
||||
.set({ validPreview: false })
|
||||
.execute();
|
||||
}
|
||||
|
||||
public async onNewDataVersion(changedDir: ParentDirectoryDTO): Promise<void> {
|
||||
// Invalidating Album preview
|
||||
let fullPath = DiskMangerWorker.normalizeDirPath(path.join(changedDir.path, changedDir.name));
|
||||
let fullPath = DiskMangerWorker.normalizeDirPath(
|
||||
path.join(changedDir.path, changedDir.name)
|
||||
);
|
||||
const query = (await SQLConnection.getConnection())
|
||||
.createQueryBuilder()
|
||||
.update(DirectoryEntity)
|
||||
.set({validPreview: false});
|
||||
.set({ validPreview: false });
|
||||
|
||||
let i = 0;
|
||||
const root = DiskMangerWorker.pathFromRelativeDirName('.');
|
||||
@ -70,32 +81,37 @@ export class PreviewManager implements IPreviewManager {
|
||||
const parentPath = DiskMangerWorker.pathFromRelativeDirName(fullPath);
|
||||
fullPath = parentPath;
|
||||
++i;
|
||||
query.orWhere(new Brackets((q: WhereExpression) => {
|
||||
const param: { [key: string]: string } = {};
|
||||
param['name' + i] = name;
|
||||
param['path' + i] = parentPath;
|
||||
q.where(`path = :path${i}`, param);
|
||||
q.andWhere(`name = :name${i}`, param);
|
||||
}));
|
||||
query.orWhere(
|
||||
new Brackets((q: WhereExpression) => {
|
||||
const param: { [key: string]: string } = {};
|
||||
param['name' + i] = name;
|
||||
param['path' + i] = parentPath;
|
||||
q.where(`path = :path${i}`, param);
|
||||
q.andWhere(`name = :name${i}`, param);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
++i;
|
||||
query.orWhere(new Brackets((q: WhereExpression) => {
|
||||
const param: { [key: string]: string } = {};
|
||||
param['name' + i] = DiskMangerWorker.dirName('.');
|
||||
param['path' + i] = DiskMangerWorker.pathFromRelativeDirName('.');
|
||||
q.where(`path = :path${i}`, param);
|
||||
q.andWhere(`name = :name${i}`, param);
|
||||
}));
|
||||
|
||||
query.orWhere(
|
||||
new Brackets((q: WhereExpression) => {
|
||||
const param: { [key: string]: string } = {};
|
||||
param['name' + i] = DiskMangerWorker.dirName('.');
|
||||
param['path' + i] = DiskMangerWorker.pathFromRelativeDirName('.');
|
||||
q.where(`path = :path${i}`, param);
|
||||
q.andWhere(`name = :name${i}`, param);
|
||||
})
|
||||
);
|
||||
|
||||
await query.execute();
|
||||
}
|
||||
|
||||
public async getAlbumPreview(album: { searchQuery: SearchQueryDTO }): Promise<PreviewPhotoDTOWithID> {
|
||||
|
||||
const albumQuery: Brackets = await (ObjectManagers.getInstance().SearchManager as ISQLSearchManager)
|
||||
.prepareAndBuildWhereQuery(album.searchQuery);
|
||||
public async getAlbumPreview(album: {
|
||||
searchQuery: SearchQueryDTO;
|
||||
}): Promise<PreviewPhotoDTOWithID> {
|
||||
const albumQuery: Brackets = await (
|
||||
ObjectManagers.getInstance().SearchManager as ISQLSearchManager
|
||||
).prepareAndBuildWhereQuery(album.searchQuery);
|
||||
const connection = await SQLConnection.getConnection();
|
||||
|
||||
const previewQuery = (): SelectQueryBuilder<MediaEntity> => {
|
||||
@ -110,10 +126,16 @@ export class PreviewManager implements IPreviewManager {
|
||||
};
|
||||
|
||||
let previewMedia = null;
|
||||
if (Config.Server.Preview.SearchQuery &&
|
||||
!Utils.equalsFilter(Config.Server.Preview.SearchQuery, {type: SearchQueryTypes.any_text, text: ''} as TextSearch)) {
|
||||
const previewFilterQuery = await (ObjectManagers.getInstance().SearchManager as ISQLSearchManager)
|
||||
.prepareAndBuildWhereQuery(Config.Server.Preview.SearchQuery);
|
||||
if (
|
||||
Config.Server.Preview.SearchQuery &&
|
||||
!Utils.equalsFilter(Config.Server.Preview.SearchQuery, {
|
||||
type: SearchQueryTypes.any_text,
|
||||
text: '',
|
||||
} as TextSearch)
|
||||
) {
|
||||
const previewFilterQuery = await (
|
||||
ObjectManagers.getInstance().SearchManager as ISQLSearchManager
|
||||
).prepareAndBuildWhereQuery(Config.Server.Preview.SearchQuery);
|
||||
previewMedia = await previewQuery()
|
||||
.andWhere(previewFilterQuery)
|
||||
.limit(1)
|
||||
@ -121,24 +143,28 @@ export class PreviewManager implements IPreviewManager {
|
||||
}
|
||||
|
||||
if (!previewMedia) {
|
||||
previewMedia = await previewQuery()
|
||||
.limit(1)
|
||||
.getOne();
|
||||
previewMedia = await previewQuery().limit(1).getOne();
|
||||
}
|
||||
return previewMedia || null;
|
||||
}
|
||||
|
||||
|
||||
public async getPartialDirsWithoutPreviews(): Promise<{ id: number, name: string, path: string }[]> {
|
||||
public async getPartialDirsWithoutPreviews(): Promise<
|
||||
{ id: number; name: string; path: string }[]
|
||||
> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
return await connection
|
||||
.getRepository(DirectoryEntity)
|
||||
.createQueryBuilder('directory')
|
||||
.where('directory.validPreview = :validPreview', {validPreview: 0}) // 0 === false
|
||||
.select(['name', 'id', 'path']).getRawMany();
|
||||
.where('directory.validPreview = :validPreview', { validPreview: 0 }) // 0 === false
|
||||
.select(['name', 'id', 'path'])
|
||||
.getRawMany();
|
||||
}
|
||||
|
||||
public async setAndGetPreviewForDirectory(dir: { id: number, name: string, path: string }): Promise<PreviewPhotoDTOWithID> {
|
||||
public async setAndGetPreviewForDirectory(dir: {
|
||||
id: number;
|
||||
name: string;
|
||||
path: string;
|
||||
}): Promise<PreviewPhotoDTOWithID> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const previewQuery = (): SelectQueryBuilder<MediaEntity> => {
|
||||
const query = connection
|
||||
@ -146,56 +172,65 @@ export class PreviewManager implements IPreviewManager {
|
||||
.createQueryBuilder('media')
|
||||
.innerJoin('media.directory', 'directory')
|
||||
.select(['media.name', 'media.id', ...PreviewManager.DIRECTORY_SELECT])
|
||||
.where(new Brackets((q: WhereExpression) => {
|
||||
q.where('media.directory = :dir', {
|
||||
dir: dir.id
|
||||
});
|
||||
if (Config.Server.Database.type === DatabaseType.mysql) {
|
||||
q.orWhere('directory.path like :path || \'%\'', {
|
||||
path: (DiskMangerWorker.pathFromParent(dir))
|
||||
.where(
|
||||
new Brackets((q: WhereExpression) => {
|
||||
q.where('media.directory = :dir', {
|
||||
dir: dir.id,
|
||||
});
|
||||
} else {
|
||||
q.orWhere('directory.path GLOB :path', {
|
||||
path: DiskMangerWorker.pathFromParent(dir) + '*'
|
||||
});
|
||||
}
|
||||
}));
|
||||
if (Config.Server.Database.type === DatabaseType.mysql) {
|
||||
q.orWhere("directory.path like :path || '%'", {
|
||||
path: DiskMangerWorker.pathFromParent(dir),
|
||||
});
|
||||
} else {
|
||||
q.orWhere('directory.path GLOB :path', {
|
||||
path: DiskMangerWorker.pathFromParent(dir) + '*',
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
// Select from the directory if any otherwise from any subdirectories.
|
||||
// (There is no priority between subdirectories)
|
||||
query.orderBy(`CASE WHEN directory.id = ${dir.id} THEN 0 ELSE 1 END`, 'ASC');
|
||||
|
||||
query.orderBy(
|
||||
`CASE WHEN directory.id = ${dir.id} THEN 0 ELSE 1 END`,
|
||||
'ASC'
|
||||
);
|
||||
|
||||
PreviewManager.setSorting(query);
|
||||
return query;
|
||||
};
|
||||
|
||||
|
||||
let previewMedia: PreviewPhotoDTOWithID = null;
|
||||
if (Config.Server.Preview.SearchQuery &&
|
||||
if (
|
||||
Config.Server.Preview.SearchQuery &&
|
||||
!Utils.equalsFilter(Config.Server.Preview.SearchQuery, {
|
||||
type: SearchQueryTypes.any_text,
|
||||
text: ''
|
||||
} as TextSearch)) {
|
||||
text: '',
|
||||
} as TextSearch)
|
||||
) {
|
||||
previewMedia = await previewQuery()
|
||||
.andWhere(await (ObjectManagers.getInstance().SearchManager as ISQLSearchManager)
|
||||
.prepareAndBuildWhereQuery(Config.Server.Preview.SearchQuery))
|
||||
.andWhere(
|
||||
await (
|
||||
ObjectManagers.getInstance().SearchManager as ISQLSearchManager
|
||||
).prepareAndBuildWhereQuery(Config.Server.Preview.SearchQuery)
|
||||
)
|
||||
.limit(1)
|
||||
.getOne();
|
||||
}
|
||||
|
||||
if (!previewMedia) {
|
||||
previewMedia = await previewQuery()
|
||||
.limit(1)
|
||||
.getOne();
|
||||
previewMedia = await previewQuery().limit(1).getOne();
|
||||
}
|
||||
|
||||
// set validPreview bit to true even if there is no preview (to prevent future updates)
|
||||
await connection.createQueryBuilder()
|
||||
.update(DirectoryEntity).set({preview: previewMedia, validPreview: true}).where('id = :dir', {
|
||||
dir: dir.id
|
||||
}).execute();
|
||||
await connection
|
||||
.createQueryBuilder()
|
||||
.update(DirectoryEntity)
|
||||
.set({ preview: previewMedia, validPreview: true })
|
||||
.where('id = :dir', {
|
||||
dir: dir.id,
|
||||
})
|
||||
.execute();
|
||||
|
||||
return previewMedia || null;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,37 +1,43 @@
|
||||
import 'reflect-metadata';
|
||||
import {Connection, DataSourceOptions, createConnection, getConnection} from 'typeorm';
|
||||
import {UserEntity} from './enitites/UserEntity';
|
||||
import {UserRoles} from '../../../../common/entities/UserDTO';
|
||||
import {PhotoEntity} from './enitites/PhotoEntity';
|
||||
import {DirectoryEntity} from './enitites/DirectoryEntity';
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {SharingEntity} from './enitites/SharingEntity';
|
||||
import {PasswordHelper} from '../../PasswordHelper';
|
||||
import {ProjectPath} from '../../../ProjectPath';
|
||||
import {VersionEntity} from './enitites/VersionEntity';
|
||||
import {Logger} from '../../../Logger';
|
||||
import {MediaEntity} from './enitites/MediaEntity';
|
||||
import {VideoEntity} from './enitites/VideoEntity';
|
||||
import {DataStructureVersion} from '../../../../common/DataStructureVersion';
|
||||
import {FileEntity} from './enitites/FileEntity';
|
||||
import {FaceRegionEntry} from './enitites/FaceRegionEntry';
|
||||
import {PersonEntry} from './enitites/PersonEntry';
|
||||
import {Utils} from '../../../../common/Utils';
|
||||
import {
|
||||
Connection,
|
||||
createConnection,
|
||||
DataSourceOptions,
|
||||
getConnection,
|
||||
} from 'typeorm';
|
||||
import { UserEntity } from './enitites/UserEntity';
|
||||
import { UserRoles } from '../../../../common/entities/UserDTO';
|
||||
import { PhotoEntity } from './enitites/PhotoEntity';
|
||||
import { DirectoryEntity } from './enitites/DirectoryEntity';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { SharingEntity } from './enitites/SharingEntity';
|
||||
import { PasswordHelper } from '../../PasswordHelper';
|
||||
import { ProjectPath } from '../../../ProjectPath';
|
||||
import { VersionEntity } from './enitites/VersionEntity';
|
||||
import { Logger } from '../../../Logger';
|
||||
import { MediaEntity } from './enitites/MediaEntity';
|
||||
import { VideoEntity } from './enitites/VideoEntity';
|
||||
import { DataStructureVersion } from '../../../../common/DataStructureVersion';
|
||||
import { FileEntity } from './enitites/FileEntity';
|
||||
import { FaceRegionEntry } from './enitites/FaceRegionEntry';
|
||||
import { PersonEntry } from './enitites/PersonEntry';
|
||||
import { Utils } from '../../../../common/Utils';
|
||||
import * as path from 'path';
|
||||
import {DatabaseType, ServerDataBaseConfig, SQLLogLevel} from '../../../../common/config/private/PrivateConfig';
|
||||
import {AlbumBaseEntity} from './enitites/album/AlbumBaseEntity';
|
||||
import {SavedSearchEntity} from './enitites/album/SavedSearchEntity';
|
||||
import {NotificationManager} from '../../NotifocationManager';
|
||||
import {
|
||||
DatabaseType,
|
||||
ServerDataBaseConfig,
|
||||
SQLLogLevel,
|
||||
} from '../../../../common/config/private/PrivateConfig';
|
||||
import { AlbumBaseEntity } from './enitites/album/AlbumBaseEntity';
|
||||
import { SavedSearchEntity } from './enitites/album/SavedSearchEntity';
|
||||
import { NotificationManager } from '../../NotifocationManager';
|
||||
|
||||
const LOG_TAG = '[SQLConnection]';
|
||||
|
||||
export class SQLConnection {
|
||||
|
||||
|
||||
private static connection: Connection = null;
|
||||
|
||||
constructor() {
|
||||
}
|
||||
constructor() {}
|
||||
|
||||
public static async getConnection(): Promise<Connection> {
|
||||
if (this.connection == null) {
|
||||
@ -49,24 +55,30 @@ export class SQLConnection {
|
||||
SharingEntity,
|
||||
AlbumBaseEntity,
|
||||
SavedSearchEntity,
|
||||
VersionEntity
|
||||
VersionEntity,
|
||||
];
|
||||
options.synchronize = false;
|
||||
if (Config.Server.Log.sqlLevel !== SQLLogLevel.none) {
|
||||
options.logging = SQLLogLevel[Config.Server.Log.sqlLevel];
|
||||
}
|
||||
Logger.debug(LOG_TAG, 'Creating connection: ' + DatabaseType[Config.Server.Database.type], ', with driver:', options.type);
|
||||
Logger.debug(
|
||||
LOG_TAG,
|
||||
'Creating connection: ' + DatabaseType[Config.Server.Database.type],
|
||||
', with driver:',
|
||||
options.type
|
||||
);
|
||||
this.connection = await this.createConnection(options);
|
||||
await SQLConnection.schemeSync(this.connection);
|
||||
}
|
||||
return this.connection;
|
||||
}
|
||||
|
||||
public static async tryConnection(config: ServerDataBaseConfig): Promise<boolean> {
|
||||
public static async tryConnection(
|
||||
config: ServerDataBaseConfig
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
await getConnection('test').close();
|
||||
} catch (err) {
|
||||
}
|
||||
} catch (err) {}
|
||||
const options: any = this.getDriver(config);
|
||||
options.name = 'test';
|
||||
options.entities = [
|
||||
@ -81,7 +93,7 @@ export class SQLConnection {
|
||||
SharingEntity,
|
||||
AlbumBaseEntity,
|
||||
SavedSearchEntity,
|
||||
VersionEntity
|
||||
VersionEntity,
|
||||
];
|
||||
options.synchronize = false;
|
||||
if (Config.Server.Log.sqlLevel !== SQLLogLevel.none) {
|
||||
@ -101,10 +113,12 @@ export class SQLConnection {
|
||||
}
|
||||
// Adding enforced users to the db
|
||||
const userRepository = connection.getRepository(UserEntity);
|
||||
if (Array.isArray(Config.Server.Database.enforcedUsers) &&
|
||||
Config.Server.Database.enforcedUsers.length > 0) {
|
||||
if (
|
||||
Array.isArray(Config.Server.Database.enforcedUsers) &&
|
||||
Config.Server.Database.enforcedUsers.length > 0
|
||||
) {
|
||||
for (const uc of Config.Server.Database.enforcedUsers) {
|
||||
const user = await userRepository.findOneBy({name: uc.name});
|
||||
const user = await userRepository.findOneBy({ name: uc.name });
|
||||
if (!user) {
|
||||
Logger.info(LOG_TAG, 'Saving enforced user: ' + uc.name);
|
||||
const a = new UserEntity();
|
||||
@ -123,7 +137,7 @@ export class SQLConnection {
|
||||
}
|
||||
|
||||
// Add dummy Admin to the db
|
||||
const admins = await userRepository.findBy({role: UserRoles.Admin});
|
||||
const admins = await userRepository.findBy({ role: UserRoles.Admin });
|
||||
if (admins.length === 0) {
|
||||
const a = new UserEntity();
|
||||
a.name = 'admin';
|
||||
@ -132,11 +146,19 @@ export class SQLConnection {
|
||||
await userRepository.save(a);
|
||||
}
|
||||
|
||||
const defAdmin = await userRepository.findOneBy({name: 'admin', role: UserRoles.Admin});
|
||||
if (defAdmin && PasswordHelper.comparePassword('admin', defAdmin.password)) {
|
||||
NotificationManager.error('Using default admin user!', 'You are using the default admin/admin user/password, please change or remove it.');
|
||||
const defAdmin = await userRepository.findOneBy({
|
||||
name: 'admin',
|
||||
role: UserRoles.Admin,
|
||||
});
|
||||
if (
|
||||
defAdmin &&
|
||||
PasswordHelper.comparePassword('admin', defAdmin.password)
|
||||
) {
|
||||
NotificationManager.error(
|
||||
'Using default admin user!',
|
||||
'You are using the default admin/admin user/password, please change or remove it.'
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static async close(): Promise<void> {
|
||||
@ -155,20 +177,24 @@ export class SQLConnection {
|
||||
return path.join(ProjectPath.getAbsolutePath(config.dbFolder), 'sqlite.db');
|
||||
}
|
||||
|
||||
private static async createConnection(options: DataSourceOptions): Promise<Connection> {
|
||||
private static async createConnection(
|
||||
options: DataSourceOptions
|
||||
): Promise<Connection> {
|
||||
if (options.type === 'sqlite' || options.type === 'better-sqlite3') {
|
||||
return await createConnection(options);
|
||||
}
|
||||
try {
|
||||
return await createConnection(options);
|
||||
} catch (e) {
|
||||
if (e.sqlMessage === 'Unknown database \'' + options.database + '\'') {
|
||||
if (e.sqlMessage === "Unknown database '" + options.database + "'") {
|
||||
Logger.debug(LOG_TAG, 'creating database: ' + options.database);
|
||||
const tmpOption = Utils.clone(options);
|
||||
// @ts-ignore
|
||||
delete tmpOption.database;
|
||||
const tmpConn = await createConnection(tmpOption);
|
||||
await tmpConn.query('CREATE DATABASE IF NOT EXISTS ' + options.database);
|
||||
await tmpConn.query(
|
||||
'CREATE DATABASE IF NOT EXISTS ' + options.database
|
||||
);
|
||||
await tmpConn.close();
|
||||
return await createConnection(options);
|
||||
}
|
||||
@ -180,8 +206,7 @@ export class SQLConnection {
|
||||
let version = null;
|
||||
try {
|
||||
version = (await connection.getRepository(VersionEntity).find())[0];
|
||||
} catch (ex) {
|
||||
}
|
||||
} catch (ex) {}
|
||||
if (version && version.version === DataStructureVersion) {
|
||||
return;
|
||||
}
|
||||
@ -193,9 +218,11 @@ export class SQLConnection {
|
||||
|
||||
let users: UserEntity[] = [];
|
||||
try {
|
||||
users = await connection.getRepository(UserEntity).createQueryBuilder('user').getMany();
|
||||
} catch (ex) {
|
||||
}
|
||||
users = await connection
|
||||
.getRepository(UserEntity)
|
||||
.createQueryBuilder('user')
|
||||
.getMany();
|
||||
} catch (ex) {}
|
||||
await connection.dropDatabase();
|
||||
await connection.synchronize();
|
||||
await connection.getRepository(VersionEntity).save(version);
|
||||
@ -205,7 +232,11 @@ export class SQLConnection {
|
||||
await connection.dropDatabase();
|
||||
await connection.synchronize();
|
||||
await connection.getRepository(VersionEntity).save(version);
|
||||
Logger.warn(LOG_TAG, 'Could not move users to the new db scheme, deleting them. Details:' + e.toString());
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Could not move users to the new db scheme, deleting them. Details:' +
|
||||
e.toString()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -219,16 +250,17 @@ export class SQLConnection {
|
||||
username: config.mysql.username,
|
||||
password: config.mysql.password,
|
||||
database: config.mysql.database,
|
||||
charset: 'utf8mb4'
|
||||
charset: 'utf8mb4',
|
||||
};
|
||||
} else if (config.type === DatabaseType.sqlite) {
|
||||
driver = {
|
||||
type: 'better-sqlite3',
|
||||
database: path.join(ProjectPath.getAbsolutePath(config.dbFolder), config.sqlite.DBFileName)
|
||||
database: path.join(
|
||||
ProjectPath.getAbsolutePath(config.dbFolder),
|
||||
config.sqlite.DBFileName
|
||||
),
|
||||
};
|
||||
|
||||
}
|
||||
return driver;
|
||||
}
|
||||
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,35 +1,38 @@
|
||||
import {ISharingManager} from '../interfaces/ISharingManager';
|
||||
import {SharingDTO} from '../../../../common/entities/SharingDTO';
|
||||
import {SQLConnection} from './SQLConnection';
|
||||
import {SharingEntity} from './enitites/SharingEntity';
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {PasswordHelper} from '../../PasswordHelper';
|
||||
import {DeleteResult, FindOptionsWhere} from 'typeorm';
|
||||
import { ISharingManager } from '../interfaces/ISharingManager';
|
||||
import { SharingDTO } from '../../../../common/entities/SharingDTO';
|
||||
import { SQLConnection } from './SQLConnection';
|
||||
import { SharingEntity } from './enitites/SharingEntity';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { PasswordHelper } from '../../PasswordHelper';
|
||||
import { DeleteResult, FindOptionsWhere } from 'typeorm';
|
||||
|
||||
export class SharingManager implements ISharingManager {
|
||||
|
||||
private static async removeExpiredLink(): Promise<DeleteResult> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
return await connection
|
||||
.getRepository(SharingEntity)
|
||||
.createQueryBuilder('share')
|
||||
.where('expires < :now', {now: Date.now()})
|
||||
.where('expires < :now', { now: Date.now() })
|
||||
.delete()
|
||||
.execute();
|
||||
}
|
||||
|
||||
async deleteSharing(sharingKey: string): Promise<void> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const sharing = await connection.getRepository(SharingEntity).findOneBy({sharingKey});
|
||||
const sharing = await connection
|
||||
.getRepository(SharingEntity)
|
||||
.findOneBy({ sharingKey });
|
||||
await connection.getRepository(SharingEntity).remove(sharing);
|
||||
}
|
||||
|
||||
async listAll(): Promise<SharingDTO[]> {
|
||||
await SharingManager.removeExpiredLink();
|
||||
const connection = await SQLConnection.getConnection();
|
||||
return await connection.getRepository(SharingEntity)
|
||||
return await connection
|
||||
.getRepository(SharingEntity)
|
||||
.createQueryBuilder('share')
|
||||
.leftJoinAndSelect('share.creator', 'creator').getMany();
|
||||
.leftJoinAndSelect('share.creator', 'creator')
|
||||
.getMany();
|
||||
}
|
||||
|
||||
async findOne(filter: FindOptionsWhere<SharingDTO>): Promise<SharingDTO> {
|
||||
@ -47,17 +50,23 @@ export class SharingManager implements ISharingManager {
|
||||
return connection.getRepository(SharingEntity).save(sharing);
|
||||
}
|
||||
|
||||
async updateSharing(inSharing: SharingDTO, forceUpdate: boolean): Promise<SharingDTO> {
|
||||
async updateSharing(
|
||||
inSharing: SharingDTO,
|
||||
forceUpdate: boolean
|
||||
): Promise<SharingDTO> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
|
||||
const sharing = await connection.getRepository(SharingEntity).findOneBy({
|
||||
id: inSharing.id,
|
||||
creator: inSharing.creator.id as any,
|
||||
path: inSharing.path
|
||||
path: inSharing.path,
|
||||
});
|
||||
|
||||
if (sharing.timeStamp < Date.now() - Config.Server.Sharing.updateTimeout && forceUpdate !== true) {
|
||||
throw new Error('Sharing is locked, can\'t update anymore');
|
||||
if (
|
||||
sharing.timeStamp < Date.now() - Config.Server.Sharing.updateTimeout &&
|
||||
forceUpdate !== true
|
||||
) {
|
||||
throw new Error("Sharing is locked, can't update anymore");
|
||||
}
|
||||
if (inSharing.password == null) {
|
||||
sharing.password = null;
|
||||
@ -69,6 +78,4 @@ export class SharingManager implements ISharingManager {
|
||||
|
||||
return connection.getRepository(SharingEntity).save(sharing);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,28 +1,23 @@
|
||||
import {UserDTO, UserRoles} from '../../../../common/entities/UserDTO';
|
||||
import {IUserManager} from '../interfaces/IUserManager';
|
||||
import {UserEntity} from './enitites/UserEntity';
|
||||
import {SQLConnection} from './SQLConnection';
|
||||
import {PasswordHelper} from '../../PasswordHelper';
|
||||
import {FindOptionsWhere} from 'typeorm';
|
||||
|
||||
import { UserDTO, UserRoles } from '../../../../common/entities/UserDTO';
|
||||
import { IUserManager } from '../interfaces/IUserManager';
|
||||
import { UserEntity } from './enitites/UserEntity';
|
||||
import { SQLConnection } from './SQLConnection';
|
||||
import { PasswordHelper } from '../../PasswordHelper';
|
||||
import { FindOptionsWhere } from 'typeorm';
|
||||
|
||||
export class UserManager implements IUserManager {
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
constructor() {}
|
||||
|
||||
public async findOne(filter: FindOptionsWhere<UserEntity>): Promise<any> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const pass = filter.password as string;
|
||||
delete filter.password;
|
||||
const user = (await connection.getRepository(UserEntity).findOneBy(filter));
|
||||
const user = await connection.getRepository(UserEntity).findOneBy(filter);
|
||||
|
||||
if (pass && !PasswordHelper.comparePassword(pass, user.password)) {
|
||||
throw new Error('No entry found');
|
||||
}
|
||||
return user;
|
||||
|
||||
}
|
||||
|
||||
public async find(filter: FindOptionsWhere<UserDTO>): Promise<any> {
|
||||
@ -38,22 +33,19 @@ export class UserManager implements IUserManager {
|
||||
|
||||
public async deleteUser(id: number): Promise<any> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const user = await connection.getRepository(UserEntity).findOneBy({id});
|
||||
const user = await connection.getRepository(UserEntity).findOneBy({ id });
|
||||
return await connection.getRepository(UserEntity).remove(user);
|
||||
}
|
||||
|
||||
public async changeRole(id: number, newRole: UserRoles): Promise<any> {
|
||||
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const userRepository = connection.getRepository(UserEntity);
|
||||
const user = await userRepository.findOneBy({id});
|
||||
const user = await userRepository.findOneBy({ id });
|
||||
user.role = newRole;
|
||||
return userRepository.save(user);
|
||||
|
||||
}
|
||||
|
||||
public async changePassword(request: any): Promise<void> {
|
||||
throw new Error('not implemented'); // TODO: implement
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,17 +1,16 @@
|
||||
import * as crypto from 'crypto';
|
||||
import {IVersionManager} from '../interfaces/IVersionManager';
|
||||
import {DataStructureVersion} from '../../../../common/DataStructureVersion';
|
||||
import {SQLConnection} from './SQLConnection';
|
||||
import {DirectoryEntity} from './enitites/DirectoryEntity';
|
||||
import {MediaEntity} from './enitites/MediaEntity';
|
||||
import { IVersionManager } from '../interfaces/IVersionManager';
|
||||
import { DataStructureVersion } from '../../../../common/DataStructureVersion';
|
||||
import { SQLConnection } from './SQLConnection';
|
||||
import { DirectoryEntity } from './enitites/DirectoryEntity';
|
||||
import { MediaEntity } from './enitites/MediaEntity';
|
||||
|
||||
export class VersionManager implements IVersionManager {
|
||||
|
||||
private allMediaCount = 0;
|
||||
private latestDirectoryStatus: {
|
||||
name: string,
|
||||
lastModified: number,
|
||||
mediaCount: number
|
||||
name: string;
|
||||
lastModified: number;
|
||||
mediaCount: number;
|
||||
} = null;
|
||||
|
||||
async getDataVersion(): Promise<string> {
|
||||
@ -23,22 +22,31 @@ export class VersionManager implements IVersionManager {
|
||||
return DataStructureVersion.toString();
|
||||
}
|
||||
|
||||
const versionString = DataStructureVersion + '_' +
|
||||
this.latestDirectoryStatus.name + '_' +
|
||||
this.latestDirectoryStatus.lastModified + '_' +
|
||||
this.latestDirectoryStatus.mediaCount + '_' +
|
||||
const versionString =
|
||||
DataStructureVersion +
|
||||
'_' +
|
||||
this.latestDirectoryStatus.name +
|
||||
'_' +
|
||||
this.latestDirectoryStatus.lastModified +
|
||||
'_' +
|
||||
this.latestDirectoryStatus.mediaCount +
|
||||
'_' +
|
||||
this.allMediaCount;
|
||||
return crypto.createHash('md5').update(versionString).digest('hex');
|
||||
}
|
||||
|
||||
async onNewDataVersion(): Promise<void> {
|
||||
const connection = await SQLConnection.getConnection();
|
||||
const dir = await connection.getRepository(DirectoryEntity)
|
||||
const dir = await connection
|
||||
.getRepository(DirectoryEntity)
|
||||
.createQueryBuilder('directory')
|
||||
.limit(1)
|
||||
.orderBy('directory.lastModified').getOne();
|
||||
this.allMediaCount = await connection.getRepository(MediaEntity)
|
||||
.createQueryBuilder('media').getCount();
|
||||
.orderBy('directory.lastModified')
|
||||
.getOne();
|
||||
this.allMediaCount = await connection
|
||||
.getRepository(MediaEntity)
|
||||
.createQueryBuilder('media')
|
||||
.getCount();
|
||||
|
||||
if (!dir) {
|
||||
return;
|
||||
@ -46,9 +54,7 @@ export class VersionManager implements IVersionManager {
|
||||
this.latestDirectoryStatus = {
|
||||
mediaCount: dir.mediaCount,
|
||||
lastModified: dir.lastModified,
|
||||
name: dir.name
|
||||
name: dir.name,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,16 +1,28 @@
|
||||
import {Column, Entity, Index, ManyToOne, OneToMany, PrimaryGeneratedColumn, Unique} from 'typeorm';
|
||||
import {ParentDirectoryDTO, SubDirectoryDTO} from '../../../../../common/entities/DirectoryDTO';
|
||||
import {MediaEntity} from './MediaEntity';
|
||||
import {FileEntity} from './FileEntity';
|
||||
import {columnCharsetCS} from './EntityUtils';
|
||||
import {MediaDTO} from '../../../../../common/entities/MediaDTO';
|
||||
import {
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
OneToMany,
|
||||
PrimaryGeneratedColumn,
|
||||
Unique,
|
||||
} from 'typeorm';
|
||||
import {
|
||||
ParentDirectoryDTO,
|
||||
SubDirectoryDTO,
|
||||
} from '../../../../../common/entities/DirectoryDTO';
|
||||
import { MediaEntity } from './MediaEntity';
|
||||
import { FileEntity } from './FileEntity';
|
||||
import { columnCharsetCS } from './EntityUtils';
|
||||
import { MediaDTO } from '../../../../../common/entities/MediaDTO';
|
||||
|
||||
@Entity()
|
||||
@Unique(['name', 'path'])
|
||||
export class DirectoryEntity implements ParentDirectoryDTO<MediaDTO>, SubDirectoryDTO<MediaDTO> {
|
||||
|
||||
export class DirectoryEntity
|
||||
implements ParentDirectoryDTO<MediaDTO>, SubDirectoryDTO<MediaDTO>
|
||||
{
|
||||
@Index()
|
||||
@PrimaryGeneratedColumn({unsigned: true})
|
||||
@PrimaryGeneratedColumn({ unsigned: true })
|
||||
id: number;
|
||||
|
||||
@Index()
|
||||
@ -25,10 +37,11 @@ export class DirectoryEntity implements ParentDirectoryDTO<MediaDTO>, SubDirecto
|
||||
* last time the directory was modified (from outside, eg.: a new media was added)
|
||||
*/
|
||||
@Column('bigint', {
|
||||
unsigned: true, transformer: {
|
||||
from: v => parseInt(v, 10),
|
||||
to: v => v
|
||||
}
|
||||
unsigned: true,
|
||||
transformer: {
|
||||
from: (v) => parseInt(v, 10),
|
||||
to: (v) => v,
|
||||
},
|
||||
})
|
||||
public lastModified: number;
|
||||
|
||||
@ -36,37 +49,41 @@ export class DirectoryEntity implements ParentDirectoryDTO<MediaDTO>, SubDirecto
|
||||
* Last time the directory was fully scanned, not only for a few media to create a preview
|
||||
*/
|
||||
@Column({
|
||||
type: 'bigint', nullable: true, unsigned: true, transformer: {
|
||||
from: v => parseInt(v, 10) || null,
|
||||
to: v => v
|
||||
}
|
||||
type: 'bigint',
|
||||
nullable: true,
|
||||
unsigned: true,
|
||||
transformer: {
|
||||
from: (v) => parseInt(v, 10) || null,
|
||||
to: (v) => v,
|
||||
},
|
||||
})
|
||||
public lastScanned: number;
|
||||
|
||||
isPartial?: boolean;
|
||||
|
||||
@Column('smallint', {unsigned: true})
|
||||
@Column('smallint', { unsigned: true })
|
||||
mediaCount: number;
|
||||
|
||||
@Index()
|
||||
@ManyToOne(type => DirectoryEntity, directory => directory.directories, {onDelete: 'CASCADE'})
|
||||
@ManyToOne((type) => DirectoryEntity, (directory) => directory.directories, {
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
public parent: DirectoryEntity;
|
||||
|
||||
@OneToMany(type => DirectoryEntity, dir => dir.parent)
|
||||
@OneToMany((type) => DirectoryEntity, (dir) => dir.parent)
|
||||
public directories: DirectoryEntity[];
|
||||
|
||||
// not saving to database, it is only assigned when querying the DB
|
||||
@ManyToOne(type => MediaEntity, {onDelete: 'SET NULL'})
|
||||
@ManyToOne((type) => MediaEntity, { onDelete: 'SET NULL' })
|
||||
public preview: MediaEntity;
|
||||
|
||||
// On galley change, preview will be invalid
|
||||
@Column({type: 'boolean', default: false})
|
||||
@Column({ type: 'boolean', default: false })
|
||||
validPreview: boolean;
|
||||
|
||||
@OneToMany(type => MediaEntity, media => media.directory)
|
||||
@OneToMany((type) => MediaEntity, (media) => media.directory)
|
||||
public media: MediaEntity[];
|
||||
|
||||
@OneToMany(type => FileEntity, file => file.directory)
|
||||
@OneToMany((type) => FileEntity, (file) => file.directory)
|
||||
public metaFile: FileEntity[];
|
||||
|
||||
}
|
||||
|
@ -1,16 +1,18 @@
|
||||
import {Config} from '../../../../../common/config/private/Config';
|
||||
import {ColumnOptions} from 'typeorm/decorator/options/ColumnOptions';
|
||||
import {DatabaseType} from '../../../../../common/config/private/PrivateConfig';
|
||||
import { Config } from '../../../../../common/config/private/Config';
|
||||
import { ColumnOptions } from 'typeorm/decorator/options/ColumnOptions';
|
||||
import { DatabaseType } from '../../../../../common/config/private/PrivateConfig';
|
||||
|
||||
export class ColumnCharsetCS implements ColumnOptions {
|
||||
|
||||
public get charset(): string {
|
||||
return Config.Server.Database.type === DatabaseType.mysql ? 'utf8mb4' : 'utf8';
|
||||
return Config.Server.Database.type === DatabaseType.mysql
|
||||
? 'utf8mb4'
|
||||
: 'utf8';
|
||||
}
|
||||
|
||||
public get collation(): string {
|
||||
return Config.Server.Database.type === DatabaseType.mysql ? 'utf8mb4_bin' : null;
|
||||
|
||||
return Config.Server.Database.type === DatabaseType.mysql
|
||||
? 'utf8mb4_bin'
|
||||
: null;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import {FaceRegion, FaceRegionBox} from '../../../../../common/entities/PhotoDTO';
|
||||
import {Column, Entity, ManyToOne, PrimaryGeneratedColumn} from 'typeorm';
|
||||
import {PersonEntry} from './PersonEntry';
|
||||
import {MediaEntity} from './MediaEntity';
|
||||
import { FaceRegionBox } from '../../../../../common/entities/PhotoDTO';
|
||||
import { Column, Entity, ManyToOne, PrimaryGeneratedColumn } from 'typeorm';
|
||||
import { PersonEntry } from './PersonEntry';
|
||||
import { MediaEntity } from './MediaEntity';
|
||||
|
||||
export class FaceRegionBoxEntry implements FaceRegionBox {
|
||||
@Column('int')
|
||||
@ -19,19 +19,23 @@ export class FaceRegionBoxEntry implements FaceRegionBox {
|
||||
*/
|
||||
@Entity()
|
||||
export class FaceRegionEntry {
|
||||
|
||||
@PrimaryGeneratedColumn({unsigned: true})
|
||||
@PrimaryGeneratedColumn({ unsigned: true })
|
||||
id: number;
|
||||
|
||||
@Column(type => FaceRegionBoxEntry)
|
||||
@Column((type) => FaceRegionBoxEntry)
|
||||
box: FaceRegionBoxEntry;
|
||||
|
||||
@ManyToOne(type => MediaEntity, media => media.metadata.faces, {onDelete: 'CASCADE', nullable: false})
|
||||
@ManyToOne((type) => MediaEntity, (media) => media.metadata.faces, {
|
||||
onDelete: 'CASCADE',
|
||||
nullable: false,
|
||||
})
|
||||
media: MediaEntity;
|
||||
|
||||
@ManyToOne(type => PersonEntry, person => person.faces, {onDelete: 'CASCADE', nullable: false})
|
||||
@ManyToOne((type) => PersonEntry, (person) => person.faces, {
|
||||
onDelete: 'CASCADE',
|
||||
nullable: false,
|
||||
})
|
||||
person: PersonEntry;
|
||||
|
||||
name: string;
|
||||
|
||||
}
|
||||
|
@ -1,20 +1,27 @@
|
||||
import {Column, Entity, Index, ManyToOne, PrimaryGeneratedColumn} from 'typeorm';
|
||||
import {DirectoryEntity} from './DirectoryEntity';
|
||||
import {FileDTO} from '../../../../../common/entities/FileDTO';
|
||||
import {columnCharsetCS} from './EntityUtils';
|
||||
|
||||
import {
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
PrimaryGeneratedColumn,
|
||||
} from 'typeorm';
|
||||
import { DirectoryEntity } from './DirectoryEntity';
|
||||
import { FileDTO } from '../../../../../common/entities/FileDTO';
|
||||
import { columnCharsetCS } from './EntityUtils';
|
||||
|
||||
@Entity()
|
||||
export class FileEntity implements FileDTO {
|
||||
|
||||
@Index()
|
||||
@PrimaryGeneratedColumn({unsigned: true})
|
||||
@PrimaryGeneratedColumn({ unsigned: true })
|
||||
id: number;
|
||||
|
||||
@Column(columnCharsetCS)
|
||||
name: string;
|
||||
|
||||
@Index()
|
||||
@ManyToOne(type => DirectoryEntity, directory => directory.metaFile, {onDelete: 'CASCADE', nullable: false})
|
||||
@ManyToOne((type) => DirectoryEntity, (directory) => directory.metaFile, {
|
||||
onDelete: 'CASCADE',
|
||||
nullable: false,
|
||||
})
|
||||
directory: DirectoryEntity;
|
||||
}
|
||||
|
@ -1,12 +1,28 @@
|
||||
import {Column, Entity, Index, ManyToOne, OneToMany, PrimaryGeneratedColumn, TableInheritance, Unique} from 'typeorm';
|
||||
import {DirectoryEntity} from './DirectoryEntity';
|
||||
import {MediaDimension, MediaDTO, MediaMetadata} from '../../../../../common/entities/MediaDTO';
|
||||
import {FaceRegionEntry} from './FaceRegionEntry';
|
||||
import {columnCharsetCS} from './EntityUtils';
|
||||
import {CameraMetadata, GPSMetadata, PositionMetaData} from '../../../../../common/entities/PhotoDTO';
|
||||
import {
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
OneToMany,
|
||||
PrimaryGeneratedColumn,
|
||||
TableInheritance,
|
||||
Unique,
|
||||
} from 'typeorm';
|
||||
import { DirectoryEntity } from './DirectoryEntity';
|
||||
import {
|
||||
MediaDimension,
|
||||
MediaDTO,
|
||||
MediaMetadata,
|
||||
} from '../../../../../common/entities/MediaDTO';
|
||||
import { FaceRegionEntry } from './FaceRegionEntry';
|
||||
import { columnCharsetCS } from './EntityUtils';
|
||||
import {
|
||||
CameraMetadata,
|
||||
GPSMetadata,
|
||||
PositionMetaData,
|
||||
} from '../../../../../common/entities/PhotoDTO';
|
||||
|
||||
export class MediaDimensionEntity implements MediaDimension {
|
||||
|
||||
@Column('int')
|
||||
width: number;
|
||||
|
||||
@ -14,84 +30,80 @@ export class MediaDimensionEntity implements MediaDimension {
|
||||
height: number;
|
||||
}
|
||||
|
||||
|
||||
export class CameraMetadataEntity implements CameraMetadata {
|
||||
|
||||
@Column('int', {nullable: true, unsigned: true})
|
||||
@Column('int', { nullable: true, unsigned: true })
|
||||
ISO: number;
|
||||
|
||||
|
||||
@Column({
|
||||
type: 'text', nullable: true,
|
||||
type: 'text',
|
||||
nullable: true,
|
||||
charset: columnCharsetCS.charset,
|
||||
collation: columnCharsetCS.collation
|
||||
collation: columnCharsetCS.collation,
|
||||
})
|
||||
model: string;
|
||||
|
||||
|
||||
@Column({
|
||||
type: 'text', nullable: true,
|
||||
type: 'text',
|
||||
nullable: true,
|
||||
charset: columnCharsetCS.charset,
|
||||
collation: columnCharsetCS.collation
|
||||
collation: columnCharsetCS.collation,
|
||||
})
|
||||
make: string;
|
||||
|
||||
@Column('float', {nullable: true})
|
||||
@Column('float', { nullable: true })
|
||||
fStop: number;
|
||||
|
||||
@Column('float', {nullable: true})
|
||||
@Column('float', { nullable: true })
|
||||
exposure: number;
|
||||
|
||||
@Column('float', {nullable: true})
|
||||
@Column('float', { nullable: true })
|
||||
focalLength: number;
|
||||
|
||||
@Column('text', {nullable: true})
|
||||
@Column('text', { nullable: true })
|
||||
lens: string;
|
||||
}
|
||||
|
||||
|
||||
export class GPSMetadataEntity implements GPSMetadata {
|
||||
|
||||
@Column('float', {nullable: true})
|
||||
@Column('float', { nullable: true })
|
||||
latitude: number;
|
||||
@Column('float', {nullable: true})
|
||||
@Column('float', { nullable: true })
|
||||
longitude: number;
|
||||
}
|
||||
|
||||
|
||||
export class PositionMetaDataEntity implements PositionMetaData {
|
||||
|
||||
@Column(type => GPSMetadataEntity)
|
||||
@Column((type) => GPSMetadataEntity)
|
||||
GPSData: GPSMetadataEntity;
|
||||
|
||||
@Column({
|
||||
type: 'text', nullable: true,
|
||||
type: 'text',
|
||||
nullable: true,
|
||||
charset: columnCharsetCS.charset,
|
||||
collation: columnCharsetCS.collation
|
||||
collation: columnCharsetCS.collation,
|
||||
})
|
||||
country: string;
|
||||
|
||||
@Column({
|
||||
type: 'text', nullable: true,
|
||||
type: 'text',
|
||||
nullable: true,
|
||||
charset: columnCharsetCS.charset,
|
||||
collation: columnCharsetCS.collation
|
||||
collation: columnCharsetCS.collation,
|
||||
})
|
||||
state: string;
|
||||
|
||||
@Column({
|
||||
type: 'text', nullable: true,
|
||||
type: 'text',
|
||||
nullable: true,
|
||||
charset: columnCharsetCS.charset,
|
||||
collation: columnCharsetCS.collation
|
||||
collation: columnCharsetCS.collation,
|
||||
})
|
||||
city: string;
|
||||
}
|
||||
|
||||
|
||||
export class MediaMetadataEntity implements MediaMetadata {
|
||||
@Column('text')
|
||||
caption: string;
|
||||
|
||||
@Column(type => MediaDimensionEntity)
|
||||
@Column((type) => MediaDimensionEntity)
|
||||
size: MediaDimensionEntity;
|
||||
|
||||
/**
|
||||
@ -101,70 +113,73 @@ export class MediaMetadataEntity implements MediaMetadata {
|
||||
*/
|
||||
@Column('bigint', {
|
||||
transformer: {
|
||||
from: v => parseInt(v, 10),
|
||||
to: v => v
|
||||
}
|
||||
from: (v) => parseInt(v, 10),
|
||||
to: (v) => v,
|
||||
},
|
||||
})
|
||||
creationDate: number;
|
||||
|
||||
@Column('int', {unsigned: true})
|
||||
@Column('int', { unsigned: true })
|
||||
fileSize: number;
|
||||
|
||||
@Column({
|
||||
type: 'simple-array',
|
||||
charset: columnCharsetCS.charset,
|
||||
collation: columnCharsetCS.collation
|
||||
collation: columnCharsetCS.collation,
|
||||
})
|
||||
keywords: string[];
|
||||
|
||||
@Column(type => CameraMetadataEntity)
|
||||
@Column((type) => CameraMetadataEntity)
|
||||
cameraData: CameraMetadataEntity;
|
||||
|
||||
@Column(type => PositionMetaDataEntity)
|
||||
@Column((type) => PositionMetaDataEntity)
|
||||
positionData: PositionMetaDataEntity;
|
||||
|
||||
@Column('tinyint', {unsigned: true})
|
||||
@Column('tinyint', { unsigned: true })
|
||||
rating: 0 | 1 | 2 | 3 | 4 | 5;
|
||||
|
||||
@OneToMany(type => FaceRegionEntry, faceRegion => faceRegion.media)
|
||||
@OneToMany((type) => FaceRegionEntry, (faceRegion) => faceRegion.media)
|
||||
faces: FaceRegionEntry[];
|
||||
|
||||
/**
|
||||
* Caches the list of persons. Only used for searching
|
||||
*/
|
||||
@Column({
|
||||
type: 'simple-array', select: false, nullable: true,
|
||||
type: 'simple-array',
|
||||
select: false,
|
||||
nullable: true,
|
||||
charset: columnCharsetCS.charset,
|
||||
collation: columnCharsetCS.collation
|
||||
collation: columnCharsetCS.collation,
|
||||
})
|
||||
persons: string[];
|
||||
|
||||
@Column('int', {unsigned: true})
|
||||
@Column('int', { unsigned: true })
|
||||
bitRate: number;
|
||||
|
||||
@Column('int', {unsigned: true})
|
||||
@Column('int', { unsigned: true })
|
||||
duration: number;
|
||||
}
|
||||
|
||||
|
||||
// TODO: fix inheritance once its working in typeorm
|
||||
@Entity()
|
||||
@Unique(['name', 'directory'])
|
||||
@TableInheritance({column: {type: 'varchar', name: 'type', length: 16}})
|
||||
@TableInheritance({ column: { type: 'varchar', name: 'type', length: 16 } })
|
||||
export abstract class MediaEntity implements MediaDTO {
|
||||
|
||||
@Index()
|
||||
@PrimaryGeneratedColumn({unsigned: true})
|
||||
@PrimaryGeneratedColumn({ unsigned: true })
|
||||
id: number;
|
||||
|
||||
@Column(columnCharsetCS)
|
||||
name: string;
|
||||
|
||||
@Index()
|
||||
@ManyToOne(type => DirectoryEntity, directory => directory.media, {onDelete: 'CASCADE', nullable: false})
|
||||
@ManyToOne((type) => DirectoryEntity, (directory) => directory.media, {
|
||||
onDelete: 'CASCADE',
|
||||
nullable: false,
|
||||
})
|
||||
directory: DirectoryEntity;
|
||||
|
||||
@Column(type => MediaMetadataEntity)
|
||||
@Column((type) => MediaMetadataEntity)
|
||||
metadata: MediaMetadataEntity;
|
||||
|
||||
missingThumbnails: number;
|
||||
|
@ -1,31 +1,38 @@
|
||||
import {Column, Entity, Index, ManyToOne, OneToMany, PrimaryGeneratedColumn, Unique} from 'typeorm';
|
||||
import {FaceRegionEntry} from './FaceRegionEntry';
|
||||
import {columnCharsetCS} from './EntityUtils';
|
||||
import {PersonWithSampleRegion} from '../../../../../common/entities/PersonDTO';
|
||||
|
||||
import {
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
OneToMany,
|
||||
PrimaryGeneratedColumn,
|
||||
Unique,
|
||||
} from 'typeorm';
|
||||
import { FaceRegionEntry } from './FaceRegionEntry';
|
||||
import { columnCharsetCS } from './EntityUtils';
|
||||
import { PersonWithSampleRegion } from '../../../../../common/entities/PersonDTO';
|
||||
|
||||
@Entity()
|
||||
@Unique(['name'])
|
||||
export class PersonEntry implements PersonWithSampleRegion {
|
||||
|
||||
@Index()
|
||||
@PrimaryGeneratedColumn({unsigned: true})
|
||||
@PrimaryGeneratedColumn({ unsigned: true })
|
||||
id: number;
|
||||
|
||||
@Column(columnCharsetCS)
|
||||
name: string;
|
||||
|
||||
@Column('int', {unsigned: true, default: 0})
|
||||
@Column('int', { unsigned: true, default: 0 })
|
||||
count: number;
|
||||
|
||||
@Column({default: false})
|
||||
@Column({ default: false })
|
||||
isFavourite: boolean;
|
||||
|
||||
@OneToMany(type => FaceRegionEntry, faceRegion => faceRegion.person)
|
||||
@OneToMany((type) => FaceRegionEntry, (faceRegion) => faceRegion.person)
|
||||
public faces: FaceRegionEntry[];
|
||||
|
||||
@ManyToOne(type => FaceRegionEntry, {onDelete: 'SET NULL', nullable: true})
|
||||
@ManyToOne((type) => FaceRegionEntry, {
|
||||
onDelete: 'SET NULL',
|
||||
nullable: true,
|
||||
})
|
||||
sampleRegion: FaceRegionEntry;
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,12 +1,13 @@
|
||||
import {ChildEntity, Column} from 'typeorm';
|
||||
import {CameraMetadata, GPSMetadata, PhotoDTO, PhotoMetadata, PositionMetaData} from '../../../../../common/entities/PhotoDTO';
|
||||
import {MediaEntity, MediaMetadataEntity} from './MediaEntity';
|
||||
import {columnCharsetCS} from './EntityUtils';
|
||||
import { ChildEntity, Column } from 'typeorm';
|
||||
import {
|
||||
PhotoDTO,
|
||||
PhotoMetadata,
|
||||
} from '../../../../../common/entities/PhotoDTO';
|
||||
import { MediaEntity, MediaMetadataEntity } from './MediaEntity';
|
||||
|
||||
|
||||
|
||||
|
||||
export class PhotoMetadataEntity extends MediaMetadataEntity implements PhotoMetadata {
|
||||
export class PhotoMetadataEntity
|
||||
extends MediaMetadataEntity
|
||||
implements PhotoMetadata {
|
||||
/*
|
||||
@Column('simple-array')
|
||||
keywords: string[];
|
||||
@ -22,9 +23,8 @@ export class PhotoMetadataEntity extends MediaMetadataEntity implements PhotoMet
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
@ChildEntity()
|
||||
export class PhotoEntity extends MediaEntity implements PhotoDTO {
|
||||
@Column(type => PhotoMetadataEntity)
|
||||
@Column((type) => PhotoMetadataEntity)
|
||||
metadata: PhotoMetadataEntity;
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
import {Column, Entity, ManyToOne, PrimaryGeneratedColumn} from 'typeorm';
|
||||
import {SharingDTO} from '../../../../../common/entities/SharingDTO';
|
||||
import {UserEntity} from './UserEntity';
|
||||
import {UserDTO} from '../../../../../common/entities/UserDTO';
|
||||
import { Column, Entity, ManyToOne, PrimaryGeneratedColumn } from 'typeorm';
|
||||
import { SharingDTO } from '../../../../../common/entities/SharingDTO';
|
||||
import { UserEntity } from './UserEntity';
|
||||
import { UserDTO } from '../../../../../common/entities/UserDTO';
|
||||
|
||||
@Entity()
|
||||
export class SharingEntity implements SharingDTO {
|
||||
@PrimaryGeneratedColumn({unsigned: true})
|
||||
@PrimaryGeneratedColumn({ unsigned: true })
|
||||
id: number;
|
||||
|
||||
@Column()
|
||||
@ -14,28 +14,30 @@ export class SharingEntity implements SharingDTO {
|
||||
@Column()
|
||||
path: string;
|
||||
|
||||
@Column({type: 'text', nullable: true})
|
||||
@Column({ type: 'text', nullable: true })
|
||||
password: string;
|
||||
|
||||
@Column('bigint', {
|
||||
unsigned: true, transformer: {
|
||||
from: v => parseInt(v, 10),
|
||||
to: v => v
|
||||
}
|
||||
unsigned: true,
|
||||
transformer: {
|
||||
from: (v) => parseInt(v, 10),
|
||||
to: (v) => v,
|
||||
},
|
||||
})
|
||||
expires: number;
|
||||
|
||||
@Column('bigint', {
|
||||
unsigned: true, transformer: {
|
||||
from: v => parseInt(v, 10),
|
||||
to: v => v
|
||||
}
|
||||
unsigned: true,
|
||||
transformer: {
|
||||
from: (v) => parseInt(v, 10),
|
||||
to: (v) => v,
|
||||
},
|
||||
})
|
||||
timeStamp: number;
|
||||
|
||||
@Column()
|
||||
includeSubfolders: boolean;
|
||||
|
||||
@ManyToOne(type => UserEntity, {onDelete: 'CASCADE', nullable: false})
|
||||
@ManyToOne((type) => UserEntity, { onDelete: 'CASCADE', nullable: false })
|
||||
creator: UserDTO;
|
||||
}
|
||||
|
@ -1,10 +1,9 @@
|
||||
import {UserDTO, UserRoles} from '../../../../../common/entities/UserDTO';
|
||||
import {Column, Entity, PrimaryGeneratedColumn, Unique} from 'typeorm';
|
||||
import { UserDTO, UserRoles } from '../../../../../common/entities/UserDTO';
|
||||
import { Column, Entity, PrimaryGeneratedColumn, Unique } from 'typeorm';
|
||||
|
||||
@Entity()
|
||||
@Unique(['name'])
|
||||
export class UserEntity implements UserDTO {
|
||||
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number;
|
||||
|
||||
@ -17,7 +16,6 @@ export class UserEntity implements UserDTO {
|
||||
@Column('smallint')
|
||||
role: UserRoles;
|
||||
|
||||
@Column('simple-array', {nullable: true})
|
||||
@Column('simple-array', { nullable: true })
|
||||
permissions: string[];
|
||||
|
||||
}
|
||||
|
@ -1,12 +1,10 @@
|
||||
import {Column, Entity, PrimaryGeneratedColumn} from 'typeorm';
|
||||
import { Column, Entity, PrimaryGeneratedColumn } from 'typeorm';
|
||||
|
||||
@Entity()
|
||||
export class VersionEntity {
|
||||
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number;
|
||||
|
||||
@Column()
|
||||
version: number;
|
||||
|
||||
}
|
||||
|
@ -1,29 +1,33 @@
|
||||
import {ChildEntity, Column} from 'typeorm';
|
||||
import {MediaEntity, MediaMetadataEntity} from './MediaEntity';
|
||||
import {VideoDTO, VideoMetadata} from '../../../../../common/entities/VideoDTO';
|
||||
|
||||
|
||||
export class VideoMetadataEntity extends MediaMetadataEntity implements VideoMetadata {
|
||||
import { ChildEntity, Column } from 'typeorm';
|
||||
import { MediaEntity, MediaMetadataEntity } from './MediaEntity';
|
||||
import {
|
||||
VideoDTO,
|
||||
VideoMetadata,
|
||||
} from '../../../../../common/entities/VideoDTO';
|
||||
|
||||
export class VideoMetadataEntity
|
||||
extends MediaMetadataEntity
|
||||
implements VideoMetadata
|
||||
{
|
||||
@Column('int')
|
||||
bitRate: number;
|
||||
|
||||
@Column('bigint', {
|
||||
unsigned: true, nullable: true, transformer: {
|
||||
from: v => parseInt(v, 10) || null,
|
||||
to: v => v
|
||||
}
|
||||
unsigned: true,
|
||||
nullable: true,
|
||||
transformer: {
|
||||
from: (v) => parseInt(v, 10) || null,
|
||||
to: (v) => v,
|
||||
},
|
||||
})
|
||||
duration: number;
|
||||
|
||||
@Column('int')
|
||||
fps: number;
|
||||
|
||||
}
|
||||
|
||||
|
||||
@ChildEntity()
|
||||
export class VideoEntity extends MediaEntity implements VideoDTO {
|
||||
@Column(type => VideoMetadataEntity)
|
||||
@Column((type) => VideoMetadataEntity)
|
||||
metadata: VideoMetadataEntity;
|
||||
}
|
||||
|
@ -1,14 +1,20 @@
|
||||
import {Column, Entity, Index, ManyToOne, PrimaryGeneratedColumn, TableInheritance} from 'typeorm';
|
||||
import {MediaEntity} from '../MediaEntity';
|
||||
import {columnCharsetCS} from '../EntityUtils';
|
||||
import {AlbumBaseDTO} from '../../../../../../common/entities/album/AlbumBaseDTO';
|
||||
import {
|
||||
Column,
|
||||
Entity,
|
||||
Index,
|
||||
ManyToOne,
|
||||
PrimaryGeneratedColumn,
|
||||
TableInheritance,
|
||||
} from 'typeorm';
|
||||
import { MediaEntity } from '../MediaEntity';
|
||||
import { columnCharsetCS } from '../EntityUtils';
|
||||
import { AlbumBaseDTO } from '../../../../../../common/entities/album/AlbumBaseDTO';
|
||||
|
||||
@Entity()
|
||||
@TableInheritance({column: {type: 'varchar', name: 'type', length: 24}})
|
||||
@TableInheritance({ column: { type: 'varchar', name: 'type', length: 24 } })
|
||||
export class AlbumBaseEntity implements AlbumBaseDTO {
|
||||
|
||||
@Index()
|
||||
@PrimaryGeneratedColumn({unsigned: true})
|
||||
@PrimaryGeneratedColumn({ unsigned: true })
|
||||
id: number;
|
||||
|
||||
@Index()
|
||||
@ -18,13 +24,12 @@ export class AlbumBaseEntity implements AlbumBaseDTO {
|
||||
/**
|
||||
* Locked albums are not possible to remove
|
||||
*/
|
||||
@Column({default: false})
|
||||
@Column({ default: false })
|
||||
locked: boolean;
|
||||
|
||||
@Column('int', {unsigned: true, default: 0})
|
||||
@Column('int', { unsigned: true, default: 0 })
|
||||
count: number;
|
||||
|
||||
@ManyToOne(type => MediaEntity, {onDelete: 'SET NULL', nullable: true})
|
||||
@ManyToOne((type) => MediaEntity, { onDelete: 'SET NULL', nullable: true })
|
||||
public preview: MediaEntity;
|
||||
|
||||
}
|
||||
|
@ -1,10 +1,13 @@
|
||||
import {ChildEntity, Column} from 'typeorm';
|
||||
import {AlbumBaseEntity} from './AlbumBaseEntity';
|
||||
import {SavedSearchDTO} from '../../../../../../common/entities/album/SavedSearchDTO';
|
||||
import {SearchQueryDTO} from '../../../../../../common/entities/SearchQueryDTO';
|
||||
import { ChildEntity, Column } from 'typeorm';
|
||||
import { AlbumBaseEntity } from './AlbumBaseEntity';
|
||||
import { SavedSearchDTO } from '../../../../../../common/entities/album/SavedSearchDTO';
|
||||
import { SearchQueryDTO } from '../../../../../../common/entities/SearchQueryDTO';
|
||||
|
||||
@ChildEntity()
|
||||
export class SavedSearchEntity extends AlbumBaseEntity implements SavedSearchDTO {
|
||||
export class SavedSearchEntity
|
||||
extends AlbumBaseEntity
|
||||
implements SavedSearchDTO
|
||||
{
|
||||
@Column({
|
||||
type: 'text',
|
||||
nullable: false,
|
||||
@ -16,8 +19,8 @@ export class SavedSearchEntity extends AlbumBaseEntity implements SavedSearchDTO
|
||||
// used to serialize your data to db field
|
||||
to: (val: object) => {
|
||||
return JSON.stringify(val);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
searchQuery: SearchQueryDTO;
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {Logger} from '../../Logger';
|
||||
import {NotificationManager} from '../NotifocationManager';
|
||||
import {SQLConnection} from '../database/sql/SQLConnection';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import { Logger } from '../../Logger';
|
||||
import { NotificationManager } from '../NotifocationManager';
|
||||
import { SQLConnection } from '../database/sql/SQLConnection';
|
||||
import * as fs from 'fs';
|
||||
import {FFmpegFactory} from '../FFmpegFactory';
|
||||
import { FFmpegFactory } from '../FFmpegFactory';
|
||||
import {
|
||||
ClientAlbumConfig,
|
||||
ClientFacesConfig,
|
||||
@ -16,7 +16,7 @@ import {
|
||||
ClientThumbnailConfig,
|
||||
ClientVideoConfig,
|
||||
MapLayers,
|
||||
MapProviders
|
||||
MapProviders,
|
||||
} from '../../../common/config/public/ClientConfig';
|
||||
import {
|
||||
DatabaseType,
|
||||
@ -25,26 +25,34 @@ import {
|
||||
ServerJobConfig,
|
||||
ServerPhotoConfig,
|
||||
ServerPreviewConfig,
|
||||
ServerVideoConfig
|
||||
ServerThumbnailConfig,
|
||||
ServerVideoConfig,
|
||||
} from '../../../common/config/private/PrivateConfig';
|
||||
import {SearchQueryParser} from '../../../common/SearchQueryParser';
|
||||
import {SearchQueryTypes, TextSearch} from '../../../common/entities/SearchQueryDTO';
|
||||
import {Utils} from '../../../common/Utils';
|
||||
import { SearchQueryParser } from '../../../common/SearchQueryParser';
|
||||
import {
|
||||
SearchQueryTypes,
|
||||
TextSearch,
|
||||
} from '../../../common/entities/SearchQueryDTO';
|
||||
import { Utils } from '../../../common/Utils';
|
||||
|
||||
const LOG_TAG = '[ConfigDiagnostics]';
|
||||
|
||||
|
||||
export class ConfigDiagnostics {
|
||||
static testAlbumsConfig(albumConfig: ClientAlbumConfig, original: IPrivateConfig): void {
|
||||
if (albumConfig.enabled === true &&
|
||||
original.Server.Database.type === DatabaseType.memory) {
|
||||
static testAlbumsConfig(
|
||||
albumConfig: ClientAlbumConfig,
|
||||
original: IPrivateConfig
|
||||
): void {
|
||||
if (
|
||||
albumConfig.enabled === true &&
|
||||
original.Server.Database.type === DatabaseType.memory
|
||||
) {
|
||||
throw new Error('Memory Database does not support albums');
|
||||
}
|
||||
}
|
||||
|
||||
static checkReadWritePermission(path: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
// tslint:disable-next-line:no-bitwise
|
||||
// eslint-disable-next-line no-bitwise
|
||||
fs.access(path, fs.constants.R_OK | fs.constants.W_OK, (err) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
@ -54,28 +62,35 @@ export class ConfigDiagnostics {
|
||||
});
|
||||
}
|
||||
|
||||
static async testDatabase(databaseConfig: ServerDataBaseConfig): Promise<void> {
|
||||
static async testDatabase(
|
||||
databaseConfig: ServerDataBaseConfig
|
||||
): Promise<void> {
|
||||
if (databaseConfig.type !== DatabaseType.memory) {
|
||||
await SQLConnection.tryConnection(databaseConfig);
|
||||
}
|
||||
if (databaseConfig.type === DatabaseType.sqlite) {
|
||||
try {
|
||||
await this.checkReadWritePermission(SQLConnection.getSQLiteDB(databaseConfig));
|
||||
await this.checkReadWritePermission(
|
||||
SQLConnection.getSQLiteDB(databaseConfig)
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error('Cannot read or write sqlite storage file: ' + SQLConnection.getSQLiteDB(databaseConfig));
|
||||
throw new Error(
|
||||
'Cannot read or write sqlite storage file: ' +
|
||||
SQLConnection.getSQLiteDB(databaseConfig)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static async testMetaFileConfig(metaFileConfig: ClientMetaFileConfig, config: IPrivateConfig): Promise<void> {
|
||||
if (metaFileConfig.gpx === true &&
|
||||
config.Client.Map.enabled === false) {
|
||||
static async testMetaFileConfig(
|
||||
metaFileConfig: ClientMetaFileConfig,
|
||||
config: IPrivateConfig
|
||||
): Promise<void> {
|
||||
if (metaFileConfig.gpx === true && config.Client.Map.enabled === false) {
|
||||
throw new Error('*.gpx meta files are not supported without MAP');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static testClientVideoConfig(videoConfig: ClientVideoConfig): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
@ -83,11 +98,21 @@ export class ConfigDiagnostics {
|
||||
const ffmpeg = FFmpegFactory.get();
|
||||
ffmpeg().getAvailableCodecs((err: Error) => {
|
||||
if (err) {
|
||||
return reject(new Error('Error accessing ffmpeg, cant find executable: ' + err.toString()));
|
||||
return reject(
|
||||
new Error(
|
||||
'Error accessing ffmpeg, cant find executable: ' +
|
||||
err.toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
ffmpeg(__dirname + '/blank.jpg').ffprobe((err2: Error) => {
|
||||
if (err2) {
|
||||
return reject(new Error('Error accessing ffmpeg-probe, cant find executable: ' + err2.toString()));
|
||||
return reject(
|
||||
new Error(
|
||||
'Error accessing ffmpeg-probe, cant find executable: ' +
|
||||
err2.toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
return resolve();
|
||||
});
|
||||
@ -101,7 +126,10 @@ export class ConfigDiagnostics {
|
||||
});
|
||||
}
|
||||
|
||||
static async testServerVideoConfig(videoConfig: ServerVideoConfig, config: IPrivateConfig): Promise<void> {
|
||||
static async testServerVideoConfig(
|
||||
videoConfig: ServerVideoConfig,
|
||||
config: IPrivateConfig
|
||||
): Promise<void> {
|
||||
if (config.Client.Media.Video.enabled === true) {
|
||||
if (videoConfig.transcoding.fps <= 0) {
|
||||
throw new Error('fps should be grater than 0');
|
||||
@ -114,7 +142,6 @@ export class ConfigDiagnostics {
|
||||
sharp();
|
||||
}
|
||||
|
||||
|
||||
static async testTempFolder(folder: string): Promise<void> {
|
||||
await this.checkReadWritePermission(folder);
|
||||
}
|
||||
@ -122,36 +149,43 @@ export class ConfigDiagnostics {
|
||||
static testImageFolder(folder: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!fs.existsSync(folder)) {
|
||||
reject('Images folder not exists: \'' + folder + '\'');
|
||||
reject("Images folder not exists: '" + folder + "'");
|
||||
}
|
||||
fs.access(folder, fs.constants.R_OK, (err) => {
|
||||
if (err) {
|
||||
reject({message: 'Error during getting read access to images folder', error: err.toString()});
|
||||
reject({
|
||||
message: 'Error during getting read access to images folder',
|
||||
error: err.toString(),
|
||||
});
|
||||
}
|
||||
});
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
static async testServerPhotoConfig(server: ServerPhotoConfig): Promise<void> {
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static async testClientPhotoConfig(client: ClientPhotoConfig): Promise<void> {
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
public static async testServerThumbnailConfig(server: ServerThumbnailConfig): Promise<void> {
|
||||
public static async testServerThumbnailConfig(
|
||||
server: ServerThumbnailConfig
|
||||
): Promise<void> {
|
||||
if (server.personFaceMargin < 0 || server.personFaceMargin > 1) {
|
||||
throw new Error('personFaceMargin should be between 0 and 1');
|
||||
}
|
||||
}
|
||||
|
||||
static async testClientThumbnailConfig(thumbnailConfig: ClientThumbnailConfig): Promise<void> {
|
||||
static async testClientThumbnailConfig(
|
||||
thumbnailConfig: ClientThumbnailConfig
|
||||
): Promise<void> {
|
||||
if (isNaN(thumbnailConfig.iconSize) || thumbnailConfig.iconSize <= 0) {
|
||||
throw new Error('IconSize has to be >= 0 integer, got: ' + thumbnailConfig.iconSize);
|
||||
throw new Error(
|
||||
'IconSize has to be >= 0 integer, got: ' + thumbnailConfig.iconSize
|
||||
);
|
||||
}
|
||||
|
||||
if (!thumbnailConfig.thumbnailSizes.length) {
|
||||
@ -164,12 +198,17 @@ export class ConfigDiagnostics {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static async testTasksConfig(task: ServerJobConfig, config: IPrivateConfig): Promise<void> {
|
||||
|
||||
static async testTasksConfig(
|
||||
task: ServerJobConfig,
|
||||
config: IPrivateConfig
|
||||
): Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
static async testFacesConfig(faces: ClientFacesConfig, config: IPrivateConfig): Promise<void> {
|
||||
static async testFacesConfig(
|
||||
faces: ClientFacesConfig,
|
||||
config: IPrivateConfig
|
||||
): Promise<void> {
|
||||
if (faces.enabled === true) {
|
||||
if (config.Server.Database.type === DatabaseType.memory) {
|
||||
throw new Error('Memory Database do not support faces');
|
||||
@ -180,43 +219,62 @@ export class ConfigDiagnostics {
|
||||
}
|
||||
}
|
||||
|
||||
static async testSearchConfig(search: ClientSearchConfig, config: IPrivateConfig): Promise<void> {
|
||||
if (search.enabled === true &&
|
||||
config.Server.Database.type === DatabaseType.memory) {
|
||||
static async testSearchConfig(
|
||||
search: ClientSearchConfig,
|
||||
config: IPrivateConfig
|
||||
): Promise<void> {
|
||||
if (
|
||||
search.enabled === true &&
|
||||
config.Server.Database.type === DatabaseType.memory
|
||||
) {
|
||||
throw new Error('Memory Database do not support searching');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static async testSharingConfig(sharing: ClientSharingConfig, config: IPrivateConfig): Promise<void> {
|
||||
if (sharing.enabled === true &&
|
||||
config.Server.Database.type === DatabaseType.memory) {
|
||||
static async testSharingConfig(
|
||||
sharing: ClientSharingConfig,
|
||||
config: IPrivateConfig
|
||||
): Promise<void> {
|
||||
if (
|
||||
sharing.enabled === true &&
|
||||
config.Server.Database.type === DatabaseType.memory
|
||||
) {
|
||||
throw new Error('Memory Database do not support sharing');
|
||||
}
|
||||
if (sharing.enabled === true &&
|
||||
config.Client.authenticationRequired === false) {
|
||||
if (
|
||||
sharing.enabled === true &&
|
||||
config.Client.authenticationRequired === false
|
||||
) {
|
||||
throw new Error('In case of no authentication, sharing is not supported');
|
||||
}
|
||||
}
|
||||
|
||||
static async testRandomPhotoConfig(sharing: ClientRandomPhotoConfig, config: IPrivateConfig): Promise<void> {
|
||||
if (sharing.enabled === true &&
|
||||
config.Server.Database.type === DatabaseType.memory) {
|
||||
static async testRandomPhotoConfig(
|
||||
sharing: ClientRandomPhotoConfig,
|
||||
config: IPrivateConfig
|
||||
): Promise<void> {
|
||||
if (
|
||||
sharing.enabled === true &&
|
||||
config.Server.Database.type === DatabaseType.memory
|
||||
) {
|
||||
throw new Error('Memory Database do not support random photo');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static async testMapConfig(map: ClientMapConfig): Promise<void> {
|
||||
if (map.enabled === false) {
|
||||
return;
|
||||
}
|
||||
if (map.mapProvider === MapProviders.Mapbox &&
|
||||
(!map.mapboxAccessToken || map.mapboxAccessToken.length === 0)) {
|
||||
if (
|
||||
map.mapProvider === MapProviders.Mapbox &&
|
||||
(!map.mapboxAccessToken || map.mapboxAccessToken.length === 0)
|
||||
) {
|
||||
throw new Error('Mapbox needs a valid api key.');
|
||||
}
|
||||
if (map.mapProvider === MapProviders.Custom &&
|
||||
(!map.customLayers || map.customLayers.length === 0)) {
|
||||
if (
|
||||
map.mapProvider === MapProviders.Custom &&
|
||||
(!map.customLayers || map.customLayers.length === 0)
|
||||
) {
|
||||
throw new Error('Custom maps need at least one valid layer');
|
||||
}
|
||||
if (map.mapProvider === MapProviders.Custom) {
|
||||
@ -228,23 +286,29 @@ export class ConfigDiagnostics {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static async testPreviewConfig(settings: ServerPreviewConfig): Promise<void> {
|
||||
const sp = new SearchQueryParser();
|
||||
if (!Utils.equalsFilter(sp.parse(sp.stringify(settings.SearchQuery)), settings.SearchQuery)) {
|
||||
if (
|
||||
!Utils.equalsFilter(
|
||||
sp.parse(sp.stringify(settings.SearchQuery)),
|
||||
settings.SearchQuery
|
||||
)
|
||||
) {
|
||||
throw new Error('SearchQuery is not valid');
|
||||
}
|
||||
}
|
||||
|
||||
static async runDiagnostics(): Promise<void> {
|
||||
|
||||
if (Config.Server.Database.type !== DatabaseType.memory) {
|
||||
try {
|
||||
await ConfigDiagnostics.testDatabase(Config.Server.Database);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
Logger.warn(LOG_TAG, '[SQL error]', err.toString());
|
||||
Logger.error(LOG_TAG, 'Error during initializing SQL DB, check DB connection and settings');
|
||||
Logger.error(
|
||||
LOG_TAG,
|
||||
'Error during initializing SQL DB, check DB connection and settings'
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
@ -254,14 +318,20 @@ export class ConfigDiagnostics {
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
|
||||
Logger.warn(LOG_TAG, '[Thumbnail hardware acceleration] module error: ', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Thumbnail hardware acceleration is not possible.' +
|
||||
' \'sharp\' node module is not found.' +
|
||||
' Falling back temporally to JS based thumbnail generation');
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'[Thumbnail hardware acceleration] module error: ',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Thumbnail hardware acceleration is not possible.' +
|
||||
" 'sharp' node module is not found." +
|
||||
' Falling back temporally to JS based thumbnail generation'
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testTempFolder(Config.Server.Media.tempFolder);
|
||||
} catch (ex) {
|
||||
@ -270,23 +340,42 @@ export class ConfigDiagnostics {
|
||||
Logger.error(LOG_TAG, 'Thumbnail folder error', err.toString());
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testClientVideoConfig(Config.Client.Media.Video);
|
||||
await ConfigDiagnostics.testServerVideoConfig(Config.Server.Media.Video, Config);
|
||||
await ConfigDiagnostics.testServerVideoConfig(
|
||||
Config.Server.Media.Video,
|
||||
Config
|
||||
);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Video support error, switching off..', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Video support error, switching off..', err.toString());
|
||||
NotificationManager.warning(
|
||||
'Video support error, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Video support error, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Config.Client.Media.Video.enabled = false;
|
||||
}
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testMetaFileConfig(Config.Client.MetaFile, Config);
|
||||
await ConfigDiagnostics.testMetaFileConfig(
|
||||
Config.Client.MetaFile,
|
||||
Config
|
||||
);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Meta file support error, switching off gpx..', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Meta file support error, switching off..', err.toString());
|
||||
NotificationManager.warning(
|
||||
'Meta file support error, switching off gpx..',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Meta file support error, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Config.Client.MetaFile.gpx = false;
|
||||
}
|
||||
|
||||
@ -294,12 +383,18 @@ export class ConfigDiagnostics {
|
||||
await ConfigDiagnostics.testAlbumsConfig(Config.Client.Album, Config);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Albums support error, switching off..', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Meta file support error, switching off..', err.toString());
|
||||
NotificationManager.warning(
|
||||
'Albums support error, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Meta file support error, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Config.Client.Album.enabled = false;
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testImageFolder(Config.Server.Media.folder);
|
||||
} catch (ex) {
|
||||
@ -308,52 +403,82 @@ export class ConfigDiagnostics {
|
||||
Logger.error(LOG_TAG, 'Images folder error', err.toString());
|
||||
}
|
||||
try {
|
||||
await ConfigDiagnostics.testClientThumbnailConfig(Config.Client.Media.Thumbnail);
|
||||
await ConfigDiagnostics.testClientThumbnailConfig(
|
||||
Config.Client.Media.Thumbnail
|
||||
);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.error('Thumbnail settings error', err.toString());
|
||||
Logger.error(LOG_TAG, 'Thumbnail settings error', err.toString());
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testSearchConfig(Config.Client.Search, Config);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Search is not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Search is not supported with these settings, switching off..', err.toString());
|
||||
NotificationManager.warning(
|
||||
'Search is not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Search is not supported with these settings, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Config.Client.Search.enabled = false;
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testPreviewConfig(Config.Server.Preview);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Preview settings are not valid, resetting search query', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Preview settings are not valid, resetting search query', err.toString());
|
||||
Config.Server.Preview.SearchQuery = {type: SearchQueryTypes.any_text, text: ''} as TextSearch;
|
||||
NotificationManager.warning(
|
||||
'Preview settings are not valid, resetting search query',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Preview settings are not valid, resetting search query',
|
||||
err.toString()
|
||||
);
|
||||
Config.Server.Preview.SearchQuery = {
|
||||
type: SearchQueryTypes.any_text,
|
||||
text: '',
|
||||
} as TextSearch;
|
||||
}
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testFacesConfig(Config.Client.Faces, Config);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Faces are not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Faces are not supported with these settings, switching off..', err.toString());
|
||||
NotificationManager.warning(
|
||||
'Faces are not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Faces are not supported with these settings, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Config.Client.Faces.enabled = false;
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testTasksConfig(Config.Server.Jobs, Config);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Some Tasks are not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Some Tasks not supported with these settings, switching off..', err.toString());
|
||||
NotificationManager.warning(
|
||||
'Some Tasks are not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Some Tasks not supported with these settings, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Config.Client.Faces.enabled = false;
|
||||
}
|
||||
|
||||
@ -361,34 +486,55 @@ export class ConfigDiagnostics {
|
||||
await ConfigDiagnostics.testSharingConfig(Config.Client.Sharing, Config);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Sharing is not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Sharing is not supported with these settings, switching off..', err.toString());
|
||||
NotificationManager.warning(
|
||||
'Sharing is not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Sharing is not supported with these settings, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Config.Client.Sharing.enabled = false;
|
||||
}
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testRandomPhotoConfig(Config.Client.Sharing, Config);
|
||||
await ConfigDiagnostics.testRandomPhotoConfig(
|
||||
Config.Client.Sharing,
|
||||
Config
|
||||
);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Random Media is not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Random Media is not supported with these settings, switching off..', err.toString());
|
||||
NotificationManager.warning(
|
||||
'Random Media is not supported with these settings. Disabling temporally. ' +
|
||||
'Please adjust the config properly.',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Random Media is not supported with these settings, switching off..',
|
||||
err.toString()
|
||||
);
|
||||
Config.Client.Sharing.enabled = false;
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
await ConfigDiagnostics.testMapConfig(Config.Client.Map);
|
||||
} catch (ex) {
|
||||
const err: Error = ex;
|
||||
NotificationManager.warning('Maps is not supported with these settings. Using open street maps temporally. ' +
|
||||
'Please adjust the config properly.', err.toString());
|
||||
Logger.warn(LOG_TAG, 'Maps is not supported with these settings. Using open street maps temporally ' +
|
||||
'Please adjust the config properly.', err.toString());
|
||||
NotificationManager.warning(
|
||||
'Maps is not supported with these settings. Using open street maps temporally. ' +
|
||||
'Please adjust the config properly.',
|
||||
err.toString()
|
||||
);
|
||||
Logger.warn(
|
||||
LOG_TAG,
|
||||
'Maps is not supported with these settings. Using open street maps temporally ' +
|
||||
'Please adjust the config properly.',
|
||||
err.toString()
|
||||
);
|
||||
Config.Client.Map.mapProvider = MapProviders.OpenStreetMap;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,18 +1,20 @@
|
||||
import * as path from 'path';
|
||||
import {constants as fsConstants, promises as fsp} from 'fs';
|
||||
import { constants as fsConstants, promises as fsp } from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as crypto from 'crypto';
|
||||
import {ProjectPath} from '../../ProjectPath';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {PhotoWorker, RendererInput, ThumbnailSourceType} from '../threading/PhotoWorker';
|
||||
import {ITaskExecuter, TaskExecuter} from '../threading/TaskExecuter';
|
||||
import {FaceRegion, PhotoDTO} from '../../../common/entities/PhotoDTO';
|
||||
import {SupportedFormats} from '../../../common/SupportedFormats';
|
||||
import {PersonWithSampleRegion} from '../../../common/entities/PersonDTO';
|
||||
|
||||
import { ProjectPath } from '../../ProjectPath';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import {
|
||||
PhotoWorker,
|
||||
RendererInput,
|
||||
ThumbnailSourceType,
|
||||
} from '../threading/PhotoWorker';
|
||||
import { ITaskExecuter, TaskExecuter } from '../threading/TaskExecuter';
|
||||
import { FaceRegion, PhotoDTO } from '../../../common/entities/PhotoDTO';
|
||||
import { SupportedFormats } from '../../../common/SupportedFormats';
|
||||
import { PersonWithSampleRegion } from '../../../common/entities/PersonDTO';
|
||||
|
||||
export class PhotoProcessing {
|
||||
|
||||
private static initDone = false;
|
||||
private static taskQue: ITaskExecuter<RendererInput, void> = null;
|
||||
|
||||
@ -21,49 +23,64 @@ export class PhotoProcessing {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (Config.Server.Threading.enabled === true) {
|
||||
if (Config.Server.Threading.thumbnailThreads > 0) {
|
||||
Config.Client.Media.Thumbnail.concurrentThumbnailGenerations = Config.Server.Threading.thumbnailThreads;
|
||||
Config.Client.Media.Thumbnail.concurrentThumbnailGenerations =
|
||||
Config.Server.Threading.thumbnailThreads;
|
||||
} else {
|
||||
Config.Client.Media.Thumbnail.concurrentThumbnailGenerations = Math.max(1, os.cpus().length - 1);
|
||||
Config.Client.Media.Thumbnail.concurrentThumbnailGenerations = Math.max(
|
||||
1,
|
||||
os.cpus().length - 1
|
||||
);
|
||||
}
|
||||
} else {
|
||||
Config.Client.Media.Thumbnail.concurrentThumbnailGenerations = 1;
|
||||
}
|
||||
|
||||
|
||||
this.taskQue = new TaskExecuter(Config.Client.Media.Thumbnail.concurrentThumbnailGenerations,
|
||||
((input): Promise<void> => PhotoWorker.render(input)));
|
||||
this.taskQue = new TaskExecuter(
|
||||
Config.Client.Media.Thumbnail.concurrentThumbnailGenerations,
|
||||
(input): Promise<void> => PhotoWorker.render(input)
|
||||
);
|
||||
|
||||
this.initDone = true;
|
||||
}
|
||||
|
||||
|
||||
public static async generatePersonThumbnail(person: PersonWithSampleRegion): Promise<string> {
|
||||
|
||||
public static async generatePersonThumbnail(
|
||||
person: PersonWithSampleRegion
|
||||
): Promise<string> {
|
||||
// load parameters
|
||||
const photo: PhotoDTO = person.sampleRegion.media;
|
||||
const mediaPath = path.join(ProjectPath.ImageFolder, photo.directory.path, photo.directory.name, photo.name);
|
||||
const mediaPath = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
photo.directory.path,
|
||||
photo.directory.name,
|
||||
photo.name
|
||||
);
|
||||
const size: number = Config.Client.Media.Thumbnail.personThumbnailSize;
|
||||
// generate thumbnail path
|
||||
const thPath = PhotoProcessing.generatePersonThumbnailPath(mediaPath, person.sampleRegion, size);
|
||||
|
||||
const thPath = PhotoProcessing.generatePersonThumbnailPath(
|
||||
mediaPath,
|
||||
person.sampleRegion,
|
||||
size
|
||||
);
|
||||
|
||||
// check if thumbnail already exist
|
||||
try {
|
||||
await fsp.access(thPath, fsConstants.R_OK);
|
||||
return thPath;
|
||||
} catch (e) {
|
||||
}
|
||||
|
||||
} catch (e) {}
|
||||
|
||||
const margin = {
|
||||
x: Math.round(person.sampleRegion.box.width * (Config.Server.Media.Thumbnail.personFaceMargin)),
|
||||
y: Math.round(person.sampleRegion.box.height * (Config.Server.Media.Thumbnail.personFaceMargin))
|
||||
x: Math.round(
|
||||
person.sampleRegion.box.width *
|
||||
Config.Server.Media.Thumbnail.personFaceMargin
|
||||
),
|
||||
y: Math.round(
|
||||
person.sampleRegion.box.height *
|
||||
Config.Server.Media.Thumbnail.personFaceMargin
|
||||
),
|
||||
};
|
||||
|
||||
|
||||
// run on other thread
|
||||
const input = {
|
||||
type: ThumbnailSourceType.Photo,
|
||||
@ -72,49 +89,88 @@ export class PhotoProcessing {
|
||||
outPath: thPath,
|
||||
makeSquare: false,
|
||||
cut: {
|
||||
left: Math.round(Math.max(0, person.sampleRegion.box.left - margin.x / 2)),
|
||||
top: Math.round(Math.max(0, person.sampleRegion.box.top - margin.y / 2)),
|
||||
left: Math.round(
|
||||
Math.max(0, person.sampleRegion.box.left - margin.x / 2)
|
||||
),
|
||||
top: Math.round(
|
||||
Math.max(0, person.sampleRegion.box.top - margin.y / 2)
|
||||
),
|
||||
width: person.sampleRegion.box.width + margin.x,
|
||||
height: person.sampleRegion.box.height + margin.y
|
||||
height: person.sampleRegion.box.height + margin.y,
|
||||
},
|
||||
qualityPriority: Config.Server.Media.Thumbnail.qualityPriority
|
||||
qualityPriority: Config.Server.Media.Thumbnail.qualityPriority,
|
||||
} as RendererInput;
|
||||
input.cut.width = Math.min(input.cut.width, photo.metadata.size.width - input.cut.left);
|
||||
input.cut.height = Math.min(input.cut.height, photo.metadata.size.height - input.cut.top);
|
||||
input.cut.width = Math.min(
|
||||
input.cut.width,
|
||||
photo.metadata.size.width - input.cut.left
|
||||
);
|
||||
input.cut.height = Math.min(
|
||||
input.cut.height,
|
||||
photo.metadata.size.height - input.cut.top
|
||||
);
|
||||
|
||||
await fsp.mkdir(ProjectPath.FacesFolder, {recursive: true});
|
||||
await fsp.mkdir(ProjectPath.FacesFolder, { recursive: true });
|
||||
await PhotoProcessing.taskQue.execute(input);
|
||||
return thPath;
|
||||
}
|
||||
|
||||
|
||||
public static generateConvertedPath(mediaPath: string, size: number): string {
|
||||
const file = path.basename(mediaPath);
|
||||
return path.join(ProjectPath.TranscodedFolder,
|
||||
return path.join(
|
||||
ProjectPath.TranscodedFolder,
|
||||
ProjectPath.getRelativePathToImages(path.dirname(mediaPath)),
|
||||
file + '_' + size + '.jpg');
|
||||
file + '_' + size + '.jpg'
|
||||
);
|
||||
}
|
||||
|
||||
public static generatePersonThumbnailPath(mediaPath: string, faceRegion: FaceRegion, size: number): string {
|
||||
return path.join(ProjectPath.FacesFolder,
|
||||
crypto.createHash('md5').update(mediaPath + '_' + faceRegion.name + '_' + faceRegion.box.left + '_' + faceRegion.box.top)
|
||||
.digest('hex') + '_' + size + '.jpg');
|
||||
public static generatePersonThumbnailPath(
|
||||
mediaPath: string,
|
||||
faceRegion: FaceRegion,
|
||||
size: number
|
||||
): string {
|
||||
return path.join(
|
||||
ProjectPath.FacesFolder,
|
||||
crypto
|
||||
.createHash('md5')
|
||||
.update(
|
||||
mediaPath +
|
||||
'_' +
|
||||
faceRegion.name +
|
||||
'_' +
|
||||
faceRegion.box.left +
|
||||
'_' +
|
||||
faceRegion.box.top
|
||||
)
|
||||
.digest('hex') +
|
||||
'_' +
|
||||
size +
|
||||
'.jpg'
|
||||
);
|
||||
}
|
||||
|
||||
public static async isValidConvertedPath(
|
||||
convertedPath: string
|
||||
): Promise<boolean> {
|
||||
const origFilePath = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
path.relative(
|
||||
ProjectPath.TranscodedFolder,
|
||||
convertedPath.substring(0, convertedPath.lastIndexOf('_'))
|
||||
)
|
||||
);
|
||||
|
||||
public static async isValidConvertedPath(convertedPath: string): Promise<boolean> {
|
||||
const origFilePath = path.join(ProjectPath.ImageFolder,
|
||||
path.relative(ProjectPath.TranscodedFolder,
|
||||
convertedPath.substring(0, convertedPath.lastIndexOf('_'))));
|
||||
|
||||
const sizeStr = convertedPath.substring(convertedPath.lastIndexOf('_') + 1,
|
||||
convertedPath.length - path.extname(convertedPath).length);
|
||||
const sizeStr = convertedPath.substring(
|
||||
convertedPath.lastIndexOf('_') + 1,
|
||||
convertedPath.length - path.extname(convertedPath).length
|
||||
);
|
||||
|
||||
const size = parseInt(sizeStr, 10);
|
||||
|
||||
if ((size + '').length !== sizeStr.length ||
|
||||
if (
|
||||
(size + '').length !== sizeStr.length ||
|
||||
(Config.Client.Media.Thumbnail.thumbnailSizes.indexOf(size) === -1 &&
|
||||
Config.Server.Media.Photo.Converting.resolution !== size)) {
|
||||
Config.Server.Media.Photo.Converting.resolution !== size)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -124,49 +180,47 @@ export class PhotoProcessing {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public static async convertPhoto(mediaPath: string): Promise<string> {
|
||||
return this.generateThumbnail(mediaPath,
|
||||
return this.generateThumbnail(
|
||||
mediaPath,
|
||||
Config.Server.Media.Photo.Converting.resolution,
|
||||
ThumbnailSourceType.Photo,
|
||||
false);
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
static async convertedPhotoExist(mediaPath: string, size: number): Promise<boolean> {
|
||||
|
||||
static async convertedPhotoExist(
|
||||
mediaPath: string,
|
||||
size: number
|
||||
): Promise<boolean> {
|
||||
// generate thumbnail path
|
||||
const outPath = PhotoProcessing.generateConvertedPath(mediaPath, size);
|
||||
|
||||
|
||||
// check if file already exist
|
||||
try {
|
||||
await fsp.access(outPath, fsConstants.R_OK);
|
||||
return true;
|
||||
} catch (e) {
|
||||
}
|
||||
} catch (e) {}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static async generateThumbnail(mediaPath: string,
|
||||
size: number,
|
||||
sourceType: ThumbnailSourceType,
|
||||
makeSquare: boolean): Promise<string> {
|
||||
public static async generateThumbnail(
|
||||
mediaPath: string,
|
||||
size: number,
|
||||
sourceType: ThumbnailSourceType,
|
||||
makeSquare: boolean
|
||||
): Promise<string> {
|
||||
// generate thumbnail path
|
||||
const outPath = PhotoProcessing.generateConvertedPath(mediaPath, size);
|
||||
|
||||
|
||||
// check if file already exist
|
||||
try {
|
||||
await fsp.access(outPath, fsConstants.R_OK);
|
||||
return outPath;
|
||||
} catch (e) {
|
||||
}
|
||||
|
||||
} catch (e) {}
|
||||
|
||||
// run on other thread
|
||||
const input = {
|
||||
@ -175,12 +229,12 @@ export class PhotoProcessing {
|
||||
size,
|
||||
outPath,
|
||||
makeSquare,
|
||||
qualityPriority: Config.Server.Media.Thumbnail.qualityPriority
|
||||
qualityPriority: Config.Server.Media.Thumbnail.qualityPriority,
|
||||
} as RendererInput;
|
||||
|
||||
const outDir = path.dirname(input.outPath);
|
||||
|
||||
await fsp.mkdir(outDir, {recursive: true});
|
||||
await fsp.mkdir(outDir, { recursive: true });
|
||||
await this.taskQue.execute(input);
|
||||
return outPath;
|
||||
}
|
||||
@ -189,6 +243,5 @@ export class PhotoProcessing {
|
||||
const extension = path.extname(fullPath).toLowerCase();
|
||||
return SupportedFormats.WithDots.Photos.indexOf(extension) !== -1;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -1,30 +1,45 @@
|
||||
import * as path from 'path';
|
||||
import {constants as fsConstants, promises as fsp} from 'fs';
|
||||
import {ITaskExecuter, TaskExecuter} from '../threading/TaskExecuter';
|
||||
import {VideoConverterInput, VideoConverterWorker} from '../threading/VideoConverterWorker';
|
||||
import {MetadataLoader} from '../threading/MetadataLoader';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {ProjectPath} from '../../ProjectPath';
|
||||
import {SupportedFormats} from '../../../common/SupportedFormats';
|
||||
|
||||
import { constants as fsConstants, promises as fsp } from 'fs';
|
||||
import { ITaskExecuter, TaskExecuter } from '../threading/TaskExecuter';
|
||||
import {
|
||||
VideoConverterInput,
|
||||
VideoConverterWorker,
|
||||
} from '../threading/VideoConverterWorker';
|
||||
import { MetadataLoader } from '../threading/MetadataLoader';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import { ProjectPath } from '../../ProjectPath';
|
||||
import { SupportedFormats } from '../../../common/SupportedFormats';
|
||||
|
||||
export class VideoProcessing {
|
||||
private static taskQue: ITaskExecuter<VideoConverterInput, void> =
|
||||
new TaskExecuter(1, ((input): Promise<void> => VideoConverterWorker.convert(input)));
|
||||
new TaskExecuter(
|
||||
1,
|
||||
(input): Promise<void> => VideoConverterWorker.convert(input)
|
||||
);
|
||||
|
||||
public static generateConvertedFilePath(videoPath: string): string {
|
||||
return path.join(ProjectPath.TranscodedFolder,
|
||||
return path.join(
|
||||
ProjectPath.TranscodedFolder,
|
||||
ProjectPath.getRelativePathToImages(path.dirname(videoPath)),
|
||||
path.basename(videoPath) + '_' + this.getConvertedFilePostFix());
|
||||
path.basename(videoPath) + '_' + this.getConvertedFilePostFix()
|
||||
);
|
||||
}
|
||||
|
||||
public static async isValidConvertedPath(convertedPath: string): Promise<boolean> {
|
||||
public static async isValidConvertedPath(
|
||||
convertedPath: string
|
||||
): Promise<boolean> {
|
||||
const origFilePath = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
path.relative(
|
||||
ProjectPath.TranscodedFolder,
|
||||
convertedPath.substring(0, convertedPath.lastIndexOf('_'))
|
||||
)
|
||||
);
|
||||
|
||||
const origFilePath = path.join(ProjectPath.ImageFolder,
|
||||
path.relative(ProjectPath.TranscodedFolder,
|
||||
convertedPath.substring(0, convertedPath.lastIndexOf('_'))));
|
||||
|
||||
const postfix = convertedPath.substring(convertedPath.lastIndexOf('_') + 1, convertedPath.length);
|
||||
const postfix = convertedPath.substring(
|
||||
convertedPath.lastIndexOf('_') + 1,
|
||||
convertedPath.length
|
||||
);
|
||||
|
||||
if (postfix !== this.getConvertedFilePostFix()) {
|
||||
return false;
|
||||
@ -36,33 +51,27 @@ export class VideoProcessing {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
static async convertedVideoExist(videoPath: string): Promise<boolean> {
|
||||
const outPath = this.generateConvertedFilePath(videoPath);
|
||||
|
||||
try {
|
||||
await fsp.access(outPath, fsConstants.R_OK);
|
||||
return true;
|
||||
} catch (e) {
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public static async convertVideo(videoPath: string): Promise<void> {
|
||||
|
||||
|
||||
const outPath = this.generateConvertedFilePath(videoPath);
|
||||
|
||||
try {
|
||||
await fsp.access(outPath, fsConstants.R_OK);
|
||||
return;
|
||||
} catch (e) {
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
const metaData = await MetadataLoader.loadVideoMetadata(videoPath);
|
||||
|
||||
@ -75,25 +84,28 @@ export class VideoProcessing {
|
||||
crf: Config.Server.Media.Video.transcoding.crf,
|
||||
preset: Config.Server.Media.Video.transcoding.preset,
|
||||
customOptions: Config.Server.Media.Video.transcoding.customOptions,
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
if (metaData.bitRate > Config.Server.Media.Video.transcoding.bitRate) {
|
||||
renderInput.output.bitRate = Config.Server.Media.Video.transcoding.bitRate;
|
||||
renderInput.output.bitRate =
|
||||
Config.Server.Media.Video.transcoding.bitRate;
|
||||
}
|
||||
if (metaData.fps > Config.Server.Media.Video.transcoding.fps) {
|
||||
renderInput.output.fps = Config.Server.Media.Video.transcoding.fps;
|
||||
}
|
||||
|
||||
if (Config.Server.Media.Video.transcoding.resolution < metaData.size.height) {
|
||||
renderInput.output.resolution = Config.Server.Media.Video.transcoding.resolution;
|
||||
if (
|
||||
Config.Server.Media.Video.transcoding.resolution < metaData.size.height
|
||||
) {
|
||||
renderInput.output.resolution =
|
||||
Config.Server.Media.Video.transcoding.resolution;
|
||||
}
|
||||
|
||||
const outDir = path.dirname(renderInput.output.path);
|
||||
|
||||
await fsp.mkdir(outDir, {recursive: true});
|
||||
await fsp.mkdir(outDir, { recursive: true });
|
||||
await VideoProcessing.taskQue.execute(renderInput);
|
||||
|
||||
}
|
||||
|
||||
public static isVideo(fullPath: string): boolean {
|
||||
@ -102,11 +114,14 @@ export class VideoProcessing {
|
||||
}
|
||||
|
||||
protected static getConvertedFilePostFix(): string {
|
||||
return Math.round(Config.Server.Media.Video.transcoding.bitRate / 1024) + 'k' +
|
||||
return (
|
||||
Math.round(Config.Server.Media.Video.transcoding.bitRate / 1024) +
|
||||
'k' +
|
||||
Config.Server.Media.Video.transcoding.codec.toString().toLowerCase() +
|
||||
Config.Server.Media.Video.transcoding.resolution +
|
||||
'.' + Config.Server.Media.Video.transcoding.format.toLowerCase();
|
||||
'.' +
|
||||
Config.Server.Media.Video.transcoding.format.toLowerCase()
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -1,20 +1,27 @@
|
||||
import {IJobManager} from '../database/interfaces/IJobManager';
|
||||
import {JobProgressDTO, JobProgressStates} from '../../../common/entities/job/JobProgressDTO';
|
||||
import {IJob} from './jobs/IJob';
|
||||
import {JobRepository} from './JobRepository';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {AfterJobTrigger, JobScheduleDTO, JobScheduleDTOUtils, JobTriggerType} from '../../../common/entities/job/JobScheduleDTO';
|
||||
import {Logger} from '../../Logger';
|
||||
import {NotificationManager} from '../NotifocationManager';
|
||||
import {IJobListener} from './jobs/IJobListener';
|
||||
import {JobProgress} from './jobs/JobProgress';
|
||||
import {JobProgressManager} from './JobProgressManager';
|
||||
|
||||
import { IJobManager } from '../database/interfaces/IJobManager';
|
||||
import {
|
||||
JobProgressDTO,
|
||||
JobProgressStates,
|
||||
} from '../../../common/entities/job/JobProgressDTO';
|
||||
import { IJob } from './jobs/IJob';
|
||||
import { JobRepository } from './JobRepository';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import {
|
||||
AfterJobTrigger,
|
||||
JobScheduleDTO,
|
||||
JobScheduleDTOUtils,
|
||||
JobTriggerType,
|
||||
} from '../../../common/entities/job/JobScheduleDTO';
|
||||
import { Logger } from '../../Logger';
|
||||
import { NotificationManager } from '../NotifocationManager';
|
||||
import { IJobListener } from './jobs/IJobListener';
|
||||
import { JobProgress } from './jobs/JobProgress';
|
||||
import { JobProgressManager } from './JobProgressManager';
|
||||
|
||||
const LOG_TAG = '[JobManager]';
|
||||
|
||||
export class JobManager implements IJobManager, IJobListener {
|
||||
protected timers: { schedule: JobScheduleDTO, timer: NodeJS.Timeout }[] = [];
|
||||
protected timers: { schedule: JobScheduleDTO; timer: NodeJS.Timeout }[] = [];
|
||||
protected progressManager: JobProgressManager = null;
|
||||
|
||||
constructor() {
|
||||
@ -23,21 +30,36 @@ export class JobManager implements IJobManager, IJobListener {
|
||||
}
|
||||
|
||||
protected get JobRunning(): boolean {
|
||||
return JobRepository.Instance.getAvailableJobs().findIndex((j): boolean => j.InProgress === true) !== -1;
|
||||
return (
|
||||
JobRepository.Instance.getAvailableJobs().findIndex(
|
||||
(j): boolean => j.InProgress === true
|
||||
) !== -1
|
||||
);
|
||||
}
|
||||
|
||||
protected get JobNoParallelRunning(): boolean {
|
||||
return JobRepository.Instance.getAvailableJobs()
|
||||
.findIndex((j): boolean => j.InProgress === true && j.allowParallelRun) !== -1;
|
||||
return (
|
||||
JobRepository.Instance.getAvailableJobs().findIndex(
|
||||
(j): boolean => j.InProgress === true && j.allowParallelRun
|
||||
) !== -1
|
||||
);
|
||||
}
|
||||
|
||||
getProgresses(): { [id: string]: JobProgressDTO } {
|
||||
return this.progressManager.Progresses;
|
||||
}
|
||||
|
||||
async run<T>(jobName: string, config: T, soloRun: boolean, allowParallelRun: boolean): Promise<void> {
|
||||
if ((allowParallelRun === false && this.JobRunning === true) || this.JobNoParallelRunning === true) {
|
||||
throw new Error('Can\'t start this job while an other is running');
|
||||
async run<T>(
|
||||
jobName: string,
|
||||
config: T,
|
||||
soloRun: boolean,
|
||||
allowParallelRun: boolean
|
||||
): Promise<void> {
|
||||
if (
|
||||
(allowParallelRun === false && this.JobRunning === true) ||
|
||||
this.JobNoParallelRunning === true
|
||||
) {
|
||||
throw new Error("Can't start this job while an other is running");
|
||||
}
|
||||
|
||||
const t = this.findJob(jobName);
|
||||
@ -62,20 +84,37 @@ export class JobManager implements IJobManager, IJobListener {
|
||||
this.progressManager.onJobProgressUpdate(progress.toDTO());
|
||||
};
|
||||
|
||||
onJobFinished = async (job: IJob<any>, state: JobProgressStates, soloRun: boolean): Promise<void> => {
|
||||
onJobFinished = async (
|
||||
job: IJob<any>,
|
||||
state: JobProgressStates,
|
||||
soloRun: boolean
|
||||
): Promise<void> => {
|
||||
// if it was not finished peacefully or was a soloRun, do not start the next one
|
||||
if (state !== JobProgressStates.finished || soloRun === true) {
|
||||
return;
|
||||
}
|
||||
const sch = Config.Server.Jobs.scheduled.find((s): boolean => s.jobName === job.Name);
|
||||
const sch = Config.Server.Jobs.scheduled.find(
|
||||
(s): boolean => s.jobName === job.Name
|
||||
);
|
||||
if (sch) {
|
||||
const children = Config.Server.Jobs.scheduled.filter((s): boolean => s.trigger.type === JobTriggerType.after &&
|
||||
(s.trigger as AfterJobTrigger).afterScheduleName === sch.name);
|
||||
const children = Config.Server.Jobs.scheduled.filter(
|
||||
(s): boolean =>
|
||||
s.trigger.type === JobTriggerType.after &&
|
||||
(s.trigger as AfterJobTrigger).afterScheduleName === sch.name
|
||||
);
|
||||
for (const item of children) {
|
||||
try {
|
||||
await this.run(item.jobName, item.config, false, item.allowParallelRun);
|
||||
await this.run(
|
||||
item.jobName,
|
||||
item.config,
|
||||
false,
|
||||
item.allowParallelRun
|
||||
);
|
||||
} catch (e) {
|
||||
NotificationManager.warning('Job running error:' + item.name, e.toString());
|
||||
NotificationManager.warning(
|
||||
'Job running error:' + item.name,
|
||||
e.toString()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -107,21 +146,32 @@ export class JobManager implements IJobManager, IJobListener {
|
||||
* Schedules a single job to run
|
||||
*/
|
||||
private runSchedule(schedule: JobScheduleDTO): void {
|
||||
const nextDate = JobScheduleDTOUtils.getNextRunningDate(new Date(), schedule);
|
||||
const nextDate = JobScheduleDTOUtils.getNextRunningDate(
|
||||
new Date(),
|
||||
schedule
|
||||
);
|
||||
if (nextDate && nextDate.getTime() > Date.now()) {
|
||||
Logger.debug(LOG_TAG, 'running schedule: ' + schedule.jobName +
|
||||
' at ' + nextDate.toLocaleString(undefined, {hour12: false}));
|
||||
Logger.debug(
|
||||
LOG_TAG,
|
||||
'running schedule: ' +
|
||||
schedule.jobName +
|
||||
' at ' +
|
||||
nextDate.toLocaleString(undefined, { hour12: false })
|
||||
);
|
||||
|
||||
const timer: NodeJS.Timeout = setTimeout(async (): Promise<void> => {
|
||||
this.timers = this.timers.filter((t): boolean => t.timer !== timer);
|
||||
await this.run(schedule.jobName, schedule.config, false, schedule.allowParallelRun);
|
||||
await this.run(
|
||||
schedule.jobName,
|
||||
schedule.config,
|
||||
false,
|
||||
schedule.allowParallelRun
|
||||
);
|
||||
this.runSchedule(schedule);
|
||||
}, nextDate.getTime() - Date.now());
|
||||
this.timers.push({schedule, timer});
|
||||
|
||||
this.timers.push({ schedule, timer });
|
||||
} else {
|
||||
Logger.debug(LOG_TAG, 'skipping schedule:' + schedule.jobName);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,17 +1,22 @@
|
||||
import {promises as fsp} from 'fs';
|
||||
import { promises as fsp } from 'fs';
|
||||
import * as path from 'path';
|
||||
import {ProjectPath} from '../../ProjectPath';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {JobProgressDTO, JobProgressStates} from '../../../common/entities/job/JobProgressDTO';
|
||||
import { ProjectPath } from '../../ProjectPath';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import {
|
||||
JobProgressDTO,
|
||||
JobProgressStates,
|
||||
} from '../../../common/entities/job/JobProgressDTO';
|
||||
|
||||
export class JobProgressManager {
|
||||
private static readonly VERSION = 3;
|
||||
private db: {
|
||||
version: number,
|
||||
progresses: { [key: string]: { progress: JobProgressDTO, timestamp: number } }
|
||||
version: number;
|
||||
progresses: {
|
||||
[key: string]: { progress: JobProgressDTO; timestamp: number };
|
||||
};
|
||||
} = {
|
||||
version: JobProgressManager.VERSION,
|
||||
progresses: {}
|
||||
progresses: {},
|
||||
};
|
||||
private readonly dbPath: string;
|
||||
private timer: NodeJS.Timeout = null;
|
||||
@ -25,16 +30,17 @@ export class JobProgressManager {
|
||||
const m: { [key: string]: JobProgressDTO } = {};
|
||||
for (const key of Object.keys(this.db.progresses)) {
|
||||
m[key] = this.db.progresses[key].progress;
|
||||
if (this.db.progresses[key].progress.state === JobProgressStates.running) {
|
||||
if (
|
||||
this.db.progresses[key].progress.state === JobProgressStates.running
|
||||
) {
|
||||
m[key].time.end = Date.now();
|
||||
}
|
||||
}
|
||||
return m;
|
||||
}
|
||||
|
||||
|
||||
onJobProgressUpdate(progress: JobProgressDTO): void {
|
||||
this.db.progresses[progress.HashName] = {progress, timestamp: Date.now()};
|
||||
this.db.progresses[progress.HashName] = { progress, timestamp: Date.now() };
|
||||
this.delayedSave();
|
||||
}
|
||||
|
||||
@ -51,10 +57,16 @@ export class JobProgressManager {
|
||||
}
|
||||
this.db = db;
|
||||
|
||||
while (Object.keys(this.db.progresses).length > Config.Server.Jobs.maxSavedProgress) {
|
||||
while (
|
||||
Object.keys(this.db.progresses).length >
|
||||
Config.Server.Jobs.maxSavedProgress
|
||||
) {
|
||||
let min: string = null;
|
||||
for (const key of Object.keys(this.db.progresses)) {
|
||||
if (min === null || this.db.progresses[min].timestamp > this.db.progresses[key].timestamp) {
|
||||
if (
|
||||
min === null ||
|
||||
this.db.progresses[min].timestamp > this.db.progresses[key].timestamp
|
||||
) {
|
||||
min = key;
|
||||
}
|
||||
}
|
||||
@ -62,8 +74,10 @@ export class JobProgressManager {
|
||||
}
|
||||
|
||||
for (const key of Object.keys(this.db.progresses)) {
|
||||
if (this.db.progresses[key].progress.state === JobProgressStates.running ||
|
||||
this.db.progresses[key].progress.state === JobProgressStates.cancelling) {
|
||||
if (
|
||||
this.db.progresses[key].progress.state === JobProgressStates.running ||
|
||||
this.db.progresses[key].progress.state === JobProgressStates.cancelling
|
||||
) {
|
||||
this.db.progresses[key].progress.state = JobProgressStates.interrupted;
|
||||
}
|
||||
}
|
||||
@ -82,5 +96,4 @@ export class JobProgressManager {
|
||||
this.timer = null;
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,15 +1,14 @@
|
||||
import {IJob} from './jobs/IJob';
|
||||
import {IndexingJob} from './jobs/IndexingJob';
|
||||
import {DBRestJob} from './jobs/DBResetJob';
|
||||
import {VideoConvertingJob} from './jobs/VideoConvertingJob';
|
||||
import {PhotoConvertingJob} from './jobs/PhotoConvertingJob';
|
||||
import {ThumbnailGenerationJob} from './jobs/ThumbnailGenerationJob';
|
||||
import {TempFolderCleaningJob} from './jobs/TempFolderCleaningJob';
|
||||
import {PreviewFillingJob} from './jobs/PreviewFillingJob';
|
||||
import {PreviewRestJob} from './jobs/PreviewResetJob';
|
||||
import { IJob } from './jobs/IJob';
|
||||
import { IndexingJob } from './jobs/IndexingJob';
|
||||
import { DBRestJob } from './jobs/DBResetJob';
|
||||
import { VideoConvertingJob } from './jobs/VideoConvertingJob';
|
||||
import { PhotoConvertingJob } from './jobs/PhotoConvertingJob';
|
||||
import { ThumbnailGenerationJob } from './jobs/ThumbnailGenerationJob';
|
||||
import { TempFolderCleaningJob } from './jobs/TempFolderCleaningJob';
|
||||
import { PreviewFillingJob } from './jobs/PreviewFillingJob';
|
||||
import { PreviewRestJob } from './jobs/PreviewResetJob';
|
||||
|
||||
export class JobRepository {
|
||||
|
||||
private static instance: JobRepository = null;
|
||||
availableJobs: { [key: string]: IJob<any> } = {};
|
||||
|
||||
@ -21,7 +20,7 @@ export class JobRepository {
|
||||
}
|
||||
|
||||
getAvailableJobs(): IJob<any>[] {
|
||||
return Object.values(this.availableJobs).filter(t => t.Supported);
|
||||
return Object.values(this.availableJobs).filter((t) => t.Supported);
|
||||
}
|
||||
|
||||
register(job: IJob<any>): void {
|
||||
@ -32,7 +31,6 @@ export class JobRepository {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
JobRepository.Instance.register(new IndexingJob());
|
||||
JobRepository.Instance.register(new DBRestJob());
|
||||
JobRepository.Instance.register(new PreviewFillingJob());
|
||||
|
@ -1,9 +1,11 @@
|
||||
import {ObjectManagers} from '../../ObjectManagers';
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {ConfigTemplateEntry, DefaultsJobs} from '../../../../common/entities/job/JobDTO';
|
||||
import {Job} from './Job';
|
||||
import {DatabaseType} from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
import { ObjectManagers } from '../../ObjectManagers';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import {
|
||||
ConfigTemplateEntry,
|
||||
DefaultsJobs,
|
||||
} from '../../../../common/entities/job/JobDTO';
|
||||
import { Job } from './Job';
|
||||
import { DatabaseType } from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
export class DBRestJob extends Job {
|
||||
public readonly Name = DefaultsJobs[DefaultsJobs['Database Reset']];
|
||||
@ -14,8 +16,7 @@ export class DBRestJob extends Job {
|
||||
return Config.Server.Database.type !== DatabaseType.memory;
|
||||
}
|
||||
|
||||
protected async init(): Promise<void> {
|
||||
}
|
||||
protected async init(): Promise<void> {}
|
||||
|
||||
protected async step(): Promise<boolean> {
|
||||
this.Progress.Left = 1;
|
||||
@ -23,6 +24,4 @@ export class DBRestJob extends Job {
|
||||
await ObjectManagers.getInstance().IndexingManager.resetDB();
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,31 +1,31 @@
|
||||
import {ConfigTemplateEntry} from '../../../../common/entities/job/JobDTO';
|
||||
import {Job} from './Job';
|
||||
import { ConfigTemplateEntry } from '../../../../common/entities/job/JobDTO';
|
||||
import { Job } from './Job';
|
||||
import * as path from 'path';
|
||||
import {DiskManager} from '../../DiskManger';
|
||||
import {DirectoryScanSettings} from '../../threading/DiskMangerWorker';
|
||||
import {Logger} from '../../../Logger';
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {FileDTO} from '../../../../common/entities/FileDTO';
|
||||
import {SQLConnection} from '../../database/sql/SQLConnection';
|
||||
import {MediaEntity} from '../../database/sql/enitites/MediaEntity';
|
||||
import {PhotoEntity} from '../../database/sql/enitites/PhotoEntity';
|
||||
import {VideoEntity} from '../../database/sql/enitites/VideoEntity';
|
||||
import {backendTexts} from '../../../../common/BackendTexts';
|
||||
import {ProjectPath} from '../../../ProjectPath';
|
||||
import {DatabaseType} from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
import { DiskManager } from '../../DiskManger';
|
||||
import { DirectoryScanSettings } from '../../threading/DiskMangerWorker';
|
||||
import { Logger } from '../../../Logger';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { FileDTO } from '../../../../common/entities/FileDTO';
|
||||
import { SQLConnection } from '../../database/sql/SQLConnection';
|
||||
import { MediaEntity } from '../../database/sql/enitites/MediaEntity';
|
||||
import { PhotoEntity } from '../../database/sql/enitites/PhotoEntity';
|
||||
import { VideoEntity } from '../../database/sql/enitites/VideoEntity';
|
||||
import { backendTexts } from '../../../../common/BackendTexts';
|
||||
import { ProjectPath } from '../../../ProjectPath';
|
||||
import { DatabaseType } from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
const LOG_TAG = '[FileJob]';
|
||||
|
||||
/**
|
||||
* Abstract class for thumbnail creation, file deleting etc.
|
||||
*/
|
||||
export abstract class FileJob<S extends { indexedOnly: boolean } = { indexedOnly: boolean }> extends Job<S> {
|
||||
export abstract class FileJob<
|
||||
S extends { indexedOnly: boolean } = { indexedOnly: boolean }
|
||||
> extends Job<S> {
|
||||
public readonly ConfigTemplate: ConfigTemplateEntry[] = [];
|
||||
directoryQueue: string[] = [];
|
||||
fileQueue: string[] = [];
|
||||
|
||||
|
||||
protected constructor(private scanFilter: DirectoryScanSettings) {
|
||||
super();
|
||||
this.scanFilter.noChildDirPhotos = true;
|
||||
@ -35,7 +35,7 @@ export abstract class FileJob<S extends { indexedOnly: boolean } = { indexedOnly
|
||||
type: 'boolean',
|
||||
name: backendTexts.indexedFilesOnly.name,
|
||||
description: backendTexts.indexedFilesOnly.description,
|
||||
defaultValue: true
|
||||
defaultValue: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -50,7 +50,6 @@ export abstract class FileJob<S extends { indexedOnly: boolean } = { indexedOnly
|
||||
return files;
|
||||
}
|
||||
|
||||
|
||||
protected async filterMetaFiles(files: FileDTO[]): Promise<FileDTO[]> {
|
||||
return files;
|
||||
}
|
||||
@ -65,9 +64,10 @@ export abstract class FileJob<S extends { indexedOnly: boolean } = { indexedOnly
|
||||
}
|
||||
|
||||
if (this.directoryQueue.length > 0) {
|
||||
|
||||
if (this.config.indexedOnly === true &&
|
||||
Config.Server.Database.type !== DatabaseType.memory) {
|
||||
if (
|
||||
this.config.indexedOnly === true &&
|
||||
Config.Server.Database.type !== DatabaseType.memory
|
||||
) {
|
||||
await this.loadAllMediaFilesFromDB();
|
||||
this.directoryQueue = [];
|
||||
} else {
|
||||
@ -87,8 +87,13 @@ export abstract class FileJob<S extends { indexedOnly: boolean } = { indexedOnly
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
Logger.error(LOG_TAG, 'Error during processing file:' + filePath + ', ' + e.toString());
|
||||
this.Progress.log('Error during processing file:' + filePath + ', ' + e.toString());
|
||||
Logger.error(
|
||||
LOG_TAG,
|
||||
'Error during processing file:' + filePath + ', ' + e.toString()
|
||||
);
|
||||
this.Progress.log(
|
||||
'Error during processing file:' + filePath + ', ' + e.toString()
|
||||
);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
@ -97,26 +102,42 @@ export abstract class FileJob<S extends { indexedOnly: boolean } = { indexedOnly
|
||||
private async loadADirectoryFromDisk(): Promise<void> {
|
||||
const directory = this.directoryQueue.shift();
|
||||
this.Progress.log('scanning directory: ' + directory);
|
||||
const scanned = await DiskManager.scanDirectoryNoMetadata(directory, this.scanFilter);
|
||||
const scanned = await DiskManager.scanDirectoryNoMetadata(
|
||||
directory,
|
||||
this.scanFilter
|
||||
);
|
||||
for (const item of scanned.directories) {
|
||||
this.directoryQueue.push(path.join(item.path, item.name));
|
||||
}
|
||||
if (this.scanFilter.noPhoto !== true || this.scanFilter.noVideo !== true) {
|
||||
const scannedAndFiltered = await this.filterMediaFiles(scanned.media);
|
||||
for (const item of scannedAndFiltered) {
|
||||
this.fileQueue.push(path.join(ProjectPath.ImageFolder, item.directory.path, item.directory.name, item.name));
|
||||
this.fileQueue.push(
|
||||
path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
item.directory.path,
|
||||
item.directory.name,
|
||||
item.name
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
if (this.scanFilter.noMetaFile !== true) {
|
||||
const scannedAndFiltered = await this.filterMetaFiles(scanned.metaFile);
|
||||
for (const item of scannedAndFiltered) {
|
||||
this.fileQueue.push(path.join(ProjectPath.ImageFolder, item.directory.path, item.directory.name, item.name));
|
||||
this.fileQueue.push(
|
||||
path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
item.directory.path,
|
||||
item.directory.name,
|
||||
item.name
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async loadAllMediaFilesFromDB(): Promise<void> {
|
||||
|
||||
if (this.scanFilter.noVideo === true && this.scanFilter.noPhoto === true) {
|
||||
return;
|
||||
}
|
||||
@ -141,7 +162,14 @@ export abstract class FileJob<S extends { indexedOnly: boolean } = { indexedOnly
|
||||
.getMany();
|
||||
|
||||
for (const item of result) {
|
||||
this.fileQueue.push(path.join(ProjectPath.ImageFolder, item.directory.path, item.directory.name, item.name));
|
||||
this.fileQueue.push(
|
||||
path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
item.directory.path,
|
||||
item.directory.name,
|
||||
item.name
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import {JobDTO} from '../../../../common/entities/job/JobDTO';
|
||||
import {JobProgress} from './JobProgress';
|
||||
import {IJobListener} from './IJobListener';
|
||||
import { JobDTO } from '../../../../common/entities/job/JobDTO';
|
||||
import { JobProgress } from './JobProgress';
|
||||
import { IJobListener } from './IJobListener';
|
||||
|
||||
export interface IJob<T> extends JobDTO {
|
||||
Name: string;
|
||||
|
@ -1,9 +1,13 @@
|
||||
import {JobProgress} from './JobProgress';
|
||||
import {IJob} from './IJob';
|
||||
import {JobProgressStates} from '../../../../common/entities/job/JobProgressDTO';
|
||||
import { JobProgress } from './JobProgress';
|
||||
import { IJob } from './IJob';
|
||||
import { JobProgressStates } from '../../../../common/entities/job/JobProgressDTO';
|
||||
|
||||
export interface IJobListener {
|
||||
onJobFinished(job: IJob<any>, state: JobProgressStates, soloRun: boolean): void;
|
||||
onJobFinished(
|
||||
job: IJob<any>,
|
||||
state: JobProgressStates,
|
||||
soloRun: boolean
|
||||
): void;
|
||||
|
||||
onProgressUpdate(progress: JobProgress): void;
|
||||
}
|
||||
|
@ -1,36 +1,41 @@
|
||||
import {ObjectManagers} from '../../ObjectManagers';
|
||||
import { ObjectManagers } from '../../ObjectManagers';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {Job} from './Job';
|
||||
import {ConfigTemplateEntry, DefaultsJobs} from '../../../../common/entities/job/JobDTO';
|
||||
import {JobProgressStates} from '../../../../common/entities/job/JobProgressDTO';
|
||||
import {DatabaseType} from '../../../../common/config/private/PrivateConfig';
|
||||
import {DiskMangerWorker} from '../../threading/DiskMangerWorker';
|
||||
import {ProjectPath} from '../../../ProjectPath';
|
||||
import {backendTexts} from '../../../../common/BackendTexts';
|
||||
import {ParentDirectoryDTO} from '../../../../common/entities/DirectoryDTO';
|
||||
import {ISQLGalleryManager} from '../../database/sql/IGalleryManager';
|
||||
import {Logger} from '../../../Logger';
|
||||
import {FileDTO} from '../../../../common/entities/FileDTO';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { Job } from './Job';
|
||||
import {
|
||||
ConfigTemplateEntry,
|
||||
DefaultsJobs,
|
||||
} from '../../../../common/entities/job/JobDTO';
|
||||
import { JobProgressStates } from '../../../../common/entities/job/JobProgressDTO';
|
||||
import { DatabaseType } from '../../../../common/config/private/PrivateConfig';
|
||||
import { DiskMangerWorker } from '../../threading/DiskMangerWorker';
|
||||
import { ProjectPath } from '../../../ProjectPath';
|
||||
import { backendTexts } from '../../../../common/BackendTexts';
|
||||
import { ParentDirectoryDTO } from '../../../../common/entities/DirectoryDTO';
|
||||
import { ISQLGalleryManager } from '../../database/sql/IGalleryManager';
|
||||
import { Logger } from '../../../Logger';
|
||||
import { FileDTO } from '../../../../common/entities/FileDTO';
|
||||
|
||||
|
||||
export class IndexingJob<S extends { indexChangesOnly: boolean } = { indexChangesOnly: boolean }> extends Job<S> {
|
||||
export class IndexingJob<
|
||||
S extends { indexChangesOnly: boolean } = { indexChangesOnly: boolean }
|
||||
> extends Job<S> {
|
||||
public readonly Name = DefaultsJobs[DefaultsJobs.Indexing];
|
||||
directoriesToIndex: string[] = [];
|
||||
public readonly ConfigTemplate: ConfigTemplateEntry[] = [{
|
||||
id: 'indexChangesOnly',
|
||||
type: 'boolean',
|
||||
name: backendTexts.indexChangesOnly.name,
|
||||
description: backendTexts.indexChangesOnly.description,
|
||||
defaultValue: true
|
||||
}];
|
||||
public readonly ConfigTemplate: ConfigTemplateEntry[] = [
|
||||
{
|
||||
id: 'indexChangesOnly',
|
||||
type: 'boolean',
|
||||
name: backendTexts.indexChangesOnly.name,
|
||||
description: backendTexts.indexChangesOnly.description,
|
||||
defaultValue: true,
|
||||
},
|
||||
];
|
||||
|
||||
public get Supported(): boolean {
|
||||
return Config.Server.Database.type !== DatabaseType.memory;
|
||||
}
|
||||
|
||||
|
||||
protected async init(): Promise<void> {
|
||||
this.directoriesToIndex.push('/');
|
||||
}
|
||||
@ -53,9 +58,15 @@ export class IndexingJob<S extends { indexChangesOnly: boolean } = { indexChange
|
||||
if (this.config.indexChangesOnly) {
|
||||
const stat = fs.statSync(path.join(ProjectPath.ImageFolder, directory));
|
||||
const lastModified = DiskMangerWorker.calcLastModified(stat);
|
||||
scanned = await (ObjectManagers.getInstance().GalleryManager as ISQLGalleryManager).selectDirStructure(directory);
|
||||
scanned = await (
|
||||
ObjectManagers.getInstance().GalleryManager as ISQLGalleryManager
|
||||
).selectDirStructure(directory);
|
||||
// If not modified and it was scanned before, dir is up-to-date
|
||||
if (scanned && scanned.lastModified === lastModified && scanned.lastScanned != null) {
|
||||
if (
|
||||
scanned &&
|
||||
scanned.lastModified === lastModified &&
|
||||
scanned.lastScanned != null
|
||||
) {
|
||||
dirChanged = false;
|
||||
}
|
||||
}
|
||||
@ -64,7 +75,10 @@ export class IndexingJob<S extends { indexChangesOnly: boolean } = { indexChange
|
||||
if (dirChanged || !this.config.indexChangesOnly) {
|
||||
this.Progress.log('Indexing: ' + directory);
|
||||
this.Progress.Processed++;
|
||||
scanned = await ObjectManagers.getInstance().IndexingManager.indexDirectory(directory);
|
||||
scanned =
|
||||
await ObjectManagers.getInstance().IndexingManager.indexDirectory(
|
||||
directory
|
||||
);
|
||||
} else {
|
||||
this.Progress.log('Skipped: ' + directory);
|
||||
this.Progress.Skipped++;
|
||||
@ -78,6 +92,4 @@ export class IndexingJob<S extends { indexChangesOnly: boolean } = { indexChange
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,9 +1,13 @@
|
||||
import {Logger} from '../../../Logger';
|
||||
import {IJob} from './IJob';
|
||||
import {ConfigTemplateEntry, JobDTO, JobDTOUtils} from '../../../../common/entities/job/JobDTO';
|
||||
import {JobProgress} from './JobProgress';
|
||||
import {IJobListener} from './IJobListener';
|
||||
import {JobProgressStates} from '../../../../common/entities/job/JobProgressDTO';
|
||||
import { Logger } from '../../../Logger';
|
||||
import { IJob } from './IJob';
|
||||
import {
|
||||
ConfigTemplateEntry,
|
||||
JobDTO,
|
||||
JobDTOUtils,
|
||||
} from '../../../../common/entities/job/JobDTO';
|
||||
import { JobProgress } from './JobProgress';
|
||||
import { IJobListener } from './IJobListener';
|
||||
import { JobProgressStates } from '../../../../common/entities/job/JobProgressDTO';
|
||||
|
||||
declare const process: any;
|
||||
declare const global: any;
|
||||
@ -29,36 +33,54 @@ export abstract class Job<T = void> implements IJob<T> {
|
||||
|
||||
public abstract get ConfigTemplate(): ConfigTemplateEntry[];
|
||||
|
||||
|
||||
public get Progress(): JobProgress {
|
||||
return this.progress;
|
||||
}
|
||||
|
||||
public get InProgress(): boolean {
|
||||
return this.Progress !== null && (this.Progress.State === JobProgressStates.running ||
|
||||
this.Progress.State === JobProgressStates.cancelling);
|
||||
return (
|
||||
this.Progress !== null &&
|
||||
(this.Progress.State === JobProgressStates.running ||
|
||||
this.Progress.State === JobProgressStates.cancelling)
|
||||
);
|
||||
}
|
||||
|
||||
public start(config: T, soloRun = false, allowParallelRun = false): Promise<void> {
|
||||
public start(
|
||||
config: T,
|
||||
soloRun = false,
|
||||
allowParallelRun = false
|
||||
): Promise<void> {
|
||||
if (this.InProgress === false && this.Supported === true) {
|
||||
Logger.info(LOG_TAG, 'Running job ' + (soloRun === true ? 'solo' : '') + ': ' + this.Name);
|
||||
Logger.info(
|
||||
LOG_TAG,
|
||||
'Running job ' + (soloRun === true ? 'solo' : '') + ': ' + this.Name
|
||||
);
|
||||
this.soloRun = soloRun;
|
||||
this.allowParallelRun = allowParallelRun;
|
||||
this.config = config;
|
||||
this.progress = new JobProgress(this.Name, JobDTOUtils.getHashName(this.Name, this.config));
|
||||
this.progress = new JobProgress(
|
||||
this.Name,
|
||||
JobDTOUtils.getHashName(this.Name, this.config)
|
||||
);
|
||||
this.progress.OnChange = this.jobListener.onProgressUpdate;
|
||||
const pr = new Promise<void>((resolve): void => {
|
||||
this.prResolve = resolve;
|
||||
});
|
||||
this.init().catch(console.error);
|
||||
this.run();
|
||||
if (!this.IsInstant) { // if instant, wait for execution, otherwise, return right away
|
||||
if (!this.IsInstant) {
|
||||
// if instant, wait for execution, otherwise, return right away
|
||||
return Promise.resolve();
|
||||
}
|
||||
return pr;
|
||||
} else {
|
||||
Logger.info(LOG_TAG, 'Job already running or not supported: ' + this.Name);
|
||||
return Promise.reject('Job already running or not supported: ' + this.Name);
|
||||
Logger.info(
|
||||
LOG_TAG,
|
||||
'Job already running or not supported: ' + this.Name
|
||||
);
|
||||
return Promise.reject(
|
||||
'Job already running or not supported: ' + this.Name
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -73,7 +95,7 @@ export abstract class Job<T = void> implements IJob<T> {
|
||||
public toJSON(): JobDTO {
|
||||
return {
|
||||
Name: this.Name,
|
||||
ConfigTemplate: this.ConfigTemplate
|
||||
ConfigTemplate: this.ConfigTemplate,
|
||||
};
|
||||
}
|
||||
|
||||
@ -110,11 +132,15 @@ export abstract class Job<T = void> implements IJob<T> {
|
||||
private run(): void {
|
||||
process.nextTick(async (): Promise<void> => {
|
||||
try {
|
||||
if (this.Progress == null || this.Progress.State !== JobProgressStates.running) {
|
||||
if (
|
||||
this.Progress == null ||
|
||||
this.Progress.State !== JobProgressStates.running
|
||||
) {
|
||||
this.onFinish();
|
||||
return;
|
||||
}
|
||||
if (await this.step() === false) { // finished
|
||||
if ((await this.step()) === false) {
|
||||
// finished
|
||||
this.onFinish();
|
||||
return;
|
||||
}
|
||||
|
@ -1,5 +1,8 @@
|
||||
import {JobProgressDTO, JobProgressLogDTO, JobProgressStates} from '../../../../common/entities/job/JobProgressDTO';
|
||||
|
||||
import {
|
||||
JobProgressDTO,
|
||||
JobProgressLogDTO,
|
||||
JobProgressStates,
|
||||
} from '../../../../common/entities/job/JobProgressDTO';
|
||||
|
||||
export class JobProgress {
|
||||
private steps = {
|
||||
@ -13,11 +16,12 @@ export class JobProgress {
|
||||
end: null as number,
|
||||
};
|
||||
private logCounter = 0;
|
||||
private logs: { id: number, timestamp: string, comment: string }[] = [];
|
||||
private logs: { id: number; timestamp: string; comment: string }[] = [];
|
||||
|
||||
|
||||
constructor(public readonly jobName: string, public readonly HashName: string) {
|
||||
}
|
||||
constructor(
|
||||
public readonly jobName: string,
|
||||
public readonly HashName: string
|
||||
) {}
|
||||
|
||||
set OnChange(val: (progress: JobProgress) => void) {
|
||||
this.onChange = val;
|
||||
@ -77,14 +81,17 @@ export class JobProgress {
|
||||
return this.logs;
|
||||
}
|
||||
|
||||
onChange = (progress: JobProgress): void => {
|
||||
};
|
||||
onChange = (progress: JobProgress): void => {};
|
||||
|
||||
log(log: string): void {
|
||||
while (this.logs.length > 10) {
|
||||
this.logs.shift();
|
||||
}
|
||||
this.logs.push({id: this.logCounter++, timestamp: (new Date()).toISOString(), comment: log});
|
||||
this.logs.push({
|
||||
id: this.logCounter++,
|
||||
timestamp: new Date().toISOString(),
|
||||
comment: log,
|
||||
});
|
||||
this.onChange(this);
|
||||
}
|
||||
|
||||
@ -95,10 +102,10 @@ export class JobProgress {
|
||||
state: this.state,
|
||||
time: {
|
||||
start: this.time.start,
|
||||
end: this.time.end
|
||||
end: this.time.end,
|
||||
},
|
||||
logs: this.logs,
|
||||
steps: this.steps
|
||||
steps: this.steps,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -1,29 +1,27 @@
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {DefaultsJobs} from '../../../../common/entities/job/JobDTO';
|
||||
import {FileJob} from './FileJob';
|
||||
import {PhotoProcessing} from '../../fileprocessing/PhotoProcessing';
|
||||
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { DefaultsJobs } from '../../../../common/entities/job/JobDTO';
|
||||
import { FileJob } from './FileJob';
|
||||
import { PhotoProcessing } from '../../fileprocessing/PhotoProcessing';
|
||||
|
||||
export class PhotoConvertingJob extends FileJob {
|
||||
public readonly Name = DefaultsJobs[DefaultsJobs['Photo Converting']];
|
||||
|
||||
constructor() {
|
||||
super({noVideo: true, noMetaFile: true});
|
||||
super({ noVideo: true, noMetaFile: true });
|
||||
}
|
||||
|
||||
public get Supported(): boolean {
|
||||
return Config.Client.Media.Photo.Converting.enabled === true;
|
||||
}
|
||||
|
||||
|
||||
protected async shouldProcess(mPath: string): Promise<boolean> {
|
||||
return !(await PhotoProcessing.convertedPhotoExist(mPath, Config.Server.Media.Photo.Converting.resolution));
|
||||
return !(await PhotoProcessing.convertedPhotoExist(
|
||||
mPath,
|
||||
Config.Server.Media.Photo.Converting.resolution
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
protected async processFile(mPath: string): Promise<void> {
|
||||
await PhotoProcessing.convertPhoto(mPath);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,27 +1,29 @@
|
||||
import {ObjectManagers} from '../../ObjectManagers';
|
||||
import {ConfigTemplateEntry, DefaultsJobs} from '../../../../common/entities/job/JobDTO';
|
||||
import {Job} from './Job';
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {DatabaseType} from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
import { ObjectManagers } from '../../ObjectManagers';
|
||||
import {
|
||||
ConfigTemplateEntry,
|
||||
DefaultsJobs,
|
||||
} from '../../../../common/entities/job/JobDTO';
|
||||
import { Job } from './Job';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { DatabaseType } from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
export class PreviewFillingJob extends Job {
|
||||
public readonly Name = DefaultsJobs[DefaultsJobs['Preview Filling']];
|
||||
public readonly ConfigTemplate: ConfigTemplateEntry[] = null;
|
||||
directoryToSetPreview: { id: number, name: string, path: string }[] = null;
|
||||
directoryToSetPreview: { id: number; name: string; path: string }[] = null;
|
||||
status: 'Persons' | 'Albums' | 'Directory' = 'Persons';
|
||||
|
||||
public get Supported(): boolean {
|
||||
return Config.Server.Database.type !== DatabaseType.memory;
|
||||
}
|
||||
|
||||
protected async init(): Promise<void> {
|
||||
}
|
||||
protected async init(): Promise<void> {}
|
||||
|
||||
protected async step(): Promise<boolean> {
|
||||
if (!this.directoryToSetPreview) {
|
||||
this.Progress.log('Loading Directories to process');
|
||||
this.directoryToSetPreview = await ObjectManagers.getInstance().PreviewManager.getPartialDirsWithoutPreviews();
|
||||
this.directoryToSetPreview =
|
||||
await ObjectManagers.getInstance().PreviewManager.getPartialDirsWithoutPreviews();
|
||||
this.Progress.Left = this.directoryToSetPreview.length + 2;
|
||||
return true;
|
||||
}
|
||||
@ -57,7 +59,8 @@ export class PreviewFillingJob extends Job {
|
||||
|
||||
private async stepDirectoryPreview(): Promise<boolean> {
|
||||
if (this.directoryToSetPreview.length === 0) {
|
||||
this.directoryToSetPreview = await ObjectManagers.getInstance().PreviewManager.getPartialDirsWithoutPreviews();
|
||||
this.directoryToSetPreview =
|
||||
await ObjectManagers.getInstance().PreviewManager.getPartialDirsWithoutPreviews();
|
||||
// double check if there is really no more
|
||||
if (this.directoryToSetPreview.length > 0) {
|
||||
return true; // continue
|
||||
@ -66,14 +69,13 @@ export class PreviewFillingJob extends Job {
|
||||
return false;
|
||||
}
|
||||
const directory = this.directoryToSetPreview.shift();
|
||||
this.Progress.log('Setting preview: ' + directory.path + directory.name);
|
||||
this.Progress.log('Setting preview: ' + directory.path + directory.name);
|
||||
this.Progress.Left = this.directoryToSetPreview.length;
|
||||
|
||||
await ObjectManagers.getInstance().PreviewManager.setAndGetPreviewForDirectory(directory);
|
||||
await ObjectManagers.getInstance().PreviewManager.setAndGetPreviewForDirectory(
|
||||
directory
|
||||
);
|
||||
this.Progress.Processed++;
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,9 +1,11 @@
|
||||
import {ObjectManagers} from '../../ObjectManagers';
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {ConfigTemplateEntry, DefaultsJobs} from '../../../../common/entities/job/JobDTO';
|
||||
import {Job} from './Job';
|
||||
import {DatabaseType} from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
import { ObjectManagers } from '../../ObjectManagers';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import {
|
||||
ConfigTemplateEntry,
|
||||
DefaultsJobs,
|
||||
} from '../../../../common/entities/job/JobDTO';
|
||||
import { Job } from './Job';
|
||||
import { DatabaseType } from '../../../../common/config/private/PrivateConfig';
|
||||
|
||||
export class PreviewRestJob extends Job {
|
||||
public readonly Name = DefaultsJobs[DefaultsJobs['Preview Reset']];
|
||||
@ -14,8 +16,7 @@ export class PreviewRestJob extends Job {
|
||||
return Config.Server.Database.type !== DatabaseType.memory;
|
||||
}
|
||||
|
||||
protected async init(): Promise<void> {
|
||||
}
|
||||
protected async init(): Promise<void> {}
|
||||
|
||||
protected async step(): Promise<boolean> {
|
||||
this.Progress.Left = 1;
|
||||
@ -25,6 +26,4 @@ export class PreviewRestJob extends Job {
|
||||
await ObjectManagers.getInstance().PersonManager.resetPreviews();
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,11 +1,13 @@
|
||||
import {ConfigTemplateEntry, DefaultsJobs} from '../../../../common/entities/job/JobDTO';
|
||||
import {
|
||||
ConfigTemplateEntry,
|
||||
DefaultsJobs,
|
||||
} from '../../../../common/entities/job/JobDTO';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import {Job} from './Job';
|
||||
import {ProjectPath} from '../../../ProjectPath';
|
||||
import {PhotoProcessing} from '../../fileprocessing/PhotoProcessing';
|
||||
import {VideoProcessing} from '../../fileprocessing/VideoProcessing';
|
||||
|
||||
import { Job } from './Job';
|
||||
import { ProjectPath } from '../../../ProjectPath';
|
||||
import { PhotoProcessing } from '../../fileprocessing/PhotoProcessing';
|
||||
import { VideoProcessing } from '../../fileprocessing/VideoProcessing';
|
||||
|
||||
export class TempFolderCleaningJob extends Job {
|
||||
public readonly Name = DefaultsJobs[DefaultsJobs['Temp Folder Cleaning']];
|
||||
@ -14,14 +16,12 @@ export class TempFolderCleaningJob extends Job {
|
||||
directoryQueue: string[] = [];
|
||||
private tempRootCleaned = false;
|
||||
|
||||
|
||||
protected async init(): Promise<void> {
|
||||
this.tempRootCleaned = false;
|
||||
this.directoryQueue = [];
|
||||
this.directoryQueue.push(ProjectPath.TranscodedFolder);
|
||||
}
|
||||
|
||||
|
||||
protected async isValidFile(filePath: string): Promise<boolean> {
|
||||
if (PhotoProcessing.isPhoto(filePath)) {
|
||||
return PhotoProcessing.isValidConvertedPath(filePath);
|
||||
@ -35,18 +35,21 @@ export class TempFolderCleaningJob extends Job {
|
||||
}
|
||||
|
||||
protected async isValidDirectory(filePath: string): Promise<boolean> {
|
||||
const originalPath = path.join(ProjectPath.ImageFolder,
|
||||
path.relative(ProjectPath.TranscodedFolder, filePath));
|
||||
const originalPath = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
path.relative(ProjectPath.TranscodedFolder, filePath)
|
||||
);
|
||||
try {
|
||||
await fs.promises.access(originalPath);
|
||||
return true;
|
||||
} catch (e) {
|
||||
}
|
||||
} catch (e) {}
|
||||
return false;
|
||||
}
|
||||
|
||||
protected async readDir(dirPath: string): Promise<string[]> {
|
||||
return (await fs.promises.readdir(dirPath)).map(f => path.normalize(path.join(dirPath, f)));
|
||||
return (await fs.promises.readdir(dirPath)).map((f) =>
|
||||
path.normalize(path.join(dirPath, f))
|
||||
);
|
||||
}
|
||||
|
||||
protected async stepTempDirectory(): Promise<boolean> {
|
||||
@ -57,7 +60,7 @@ export class TempFolderCleaningJob extends Job {
|
||||
this.Progress.log('processing: ' + file);
|
||||
this.Progress.Processed++;
|
||||
if ((await fs.promises.stat(file)).isDirectory()) {
|
||||
await fs.promises.rm(file, {recursive: true});
|
||||
await fs.promises.rm(file, { recursive: true });
|
||||
} else {
|
||||
await fs.promises.unlink(file);
|
||||
}
|
||||
@ -67,30 +70,28 @@ export class TempFolderCleaningJob extends Job {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
|
||||
|
||||
}
|
||||
|
||||
protected async stepConvertedDirectory(): Promise<boolean> {
|
||||
|
||||
const filePath = this.directoryQueue.shift();
|
||||
const stat = await fs.promises.stat(filePath);
|
||||
|
||||
this.Progress.Left = this.directoryQueue.length;
|
||||
if (stat.isDirectory()) {
|
||||
if (await this.isValidDirectory(filePath) === false) {
|
||||
if ((await this.isValidDirectory(filePath)) === false) {
|
||||
this.Progress.log('processing: ' + filePath);
|
||||
this.Progress.Processed++;
|
||||
await fs.promises.rm(filePath, {recursive: true});
|
||||
await fs.promises.rm(filePath, { recursive: true });
|
||||
} else {
|
||||
this.Progress.log('skipping: ' + filePath);
|
||||
this.Progress.Skipped++;
|
||||
this.directoryQueue = this.directoryQueue.concat(await this.readDir(filePath));
|
||||
this.directoryQueue = this.directoryQueue.concat(
|
||||
await this.readDir(filePath)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (await this.isValidFile(filePath) === false) {
|
||||
if ((await this.isValidFile(filePath)) === false) {
|
||||
this.Progress.log('processing: ' + filePath);
|
||||
this.Progress.Processed++;
|
||||
await fs.promises.unlink(filePath);
|
||||
@ -98,7 +99,6 @@ export class TempFolderCleaningJob extends Job {
|
||||
this.Progress.log('skipping: ' + filePath);
|
||||
this.Progress.Skipped++;
|
||||
}
|
||||
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -114,5 +114,4 @@ export class TempFolderCleaningJob extends Job {
|
||||
}
|
||||
return this.stepConvertedDirectory();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,25 +1,26 @@
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {DefaultsJobs} from '../../../../common/entities/job/JobDTO';
|
||||
import {FileJob} from './FileJob';
|
||||
import {PhotoProcessing} from '../../fileprocessing/PhotoProcessing';
|
||||
import {ThumbnailSourceType} from '../../threading/PhotoWorker';
|
||||
import {MediaDTO, MediaDTOUtils} from '../../../../common/entities/MediaDTO';
|
||||
import {FileDTO} from '../../../../common/entities/FileDTO';
|
||||
import {backendTexts} from '../../../../common/BackendTexts';
|
||||
|
||||
|
||||
export class ThumbnailGenerationJob extends FileJob<{ sizes: number[], indexedOnly: boolean }> {
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { DefaultsJobs } from '../../../../common/entities/job/JobDTO';
|
||||
import { FileJob } from './FileJob';
|
||||
import { PhotoProcessing } from '../../fileprocessing/PhotoProcessing';
|
||||
import { ThumbnailSourceType } from '../../threading/PhotoWorker';
|
||||
import { MediaDTOUtils } from '../../../../common/entities/MediaDTO';
|
||||
import { FileDTO } from '../../../../common/entities/FileDTO';
|
||||
import { backendTexts } from '../../../../common/BackendTexts';
|
||||
|
||||
export class ThumbnailGenerationJob extends FileJob<{
|
||||
sizes: number[];
|
||||
indexedOnly: boolean;
|
||||
}> {
|
||||
public readonly Name = DefaultsJobs[DefaultsJobs['Thumbnail Generation']];
|
||||
|
||||
constructor() {
|
||||
super({noMetaFile: true});
|
||||
super({ noMetaFile: true });
|
||||
this.ConfigTemplate.push({
|
||||
id: 'sizes',
|
||||
type: 'number-array',
|
||||
name: backendTexts.sizeToGenerate.name,
|
||||
description: backendTexts.sizeToGenerate.description,
|
||||
defaultValue: [Config.Client.Media.Thumbnail.thumbnailSizes[0]]
|
||||
defaultValue: [Config.Client.Media.Thumbnail.thumbnailSizes[0]],
|
||||
});
|
||||
}
|
||||
|
||||
@ -27,10 +28,18 @@ export class ThumbnailGenerationJob extends FileJob<{ sizes: number[], indexedOn
|
||||
return true;
|
||||
}
|
||||
|
||||
start(config: { sizes: number[], indexedOnly: boolean }, soloRun = false, allowParallelRun = false): Promise<void> {
|
||||
start(
|
||||
config: { sizes: number[]; indexedOnly: boolean },
|
||||
soloRun = false,
|
||||
allowParallelRun = false
|
||||
): Promise<void> {
|
||||
for (const item of config.sizes) {
|
||||
if (Config.Client.Media.Thumbnail.thumbnailSizes.indexOf(item) === -1) {
|
||||
throw new Error('unknown thumbnails size: ' + item + '. Add it to the possible thumbnail sizes.');
|
||||
throw new Error(
|
||||
'unknown thumbnails size: ' +
|
||||
item +
|
||||
'. Add it to the possible thumbnail sizes.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -55,13 +64,14 @@ export class ThumbnailGenerationJob extends FileJob<{ sizes: number[], indexedOn
|
||||
|
||||
protected async processFile(mPath: string): Promise<void> {
|
||||
for (const item of this.config.sizes) {
|
||||
await PhotoProcessing.generateThumbnail(mPath,
|
||||
await PhotoProcessing.generateThumbnail(
|
||||
mPath,
|
||||
item,
|
||||
MediaDTOUtils.isVideoPath(mPath) ? ThumbnailSourceType.Video : ThumbnailSourceType.Photo,
|
||||
false);
|
||||
|
||||
MediaDTOUtils.isVideoPath(mPath)
|
||||
? ThumbnailSourceType.Video
|
||||
: ThumbnailSourceType.Photo,
|
||||
false
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,16 +1,15 @@
|
||||
import {Config} from '../../../../common/config/private/Config';
|
||||
import {DefaultsJobs} from '../../../../common/entities/job/JobDTO';
|
||||
import {FileJob} from './FileJob';
|
||||
import {VideoProcessing} from '../../fileprocessing/VideoProcessing';
|
||||
import { Config } from '../../../../common/config/private/Config';
|
||||
import { DefaultsJobs } from '../../../../common/entities/job/JobDTO';
|
||||
import { FileJob } from './FileJob';
|
||||
import { VideoProcessing } from '../../fileprocessing/VideoProcessing';
|
||||
|
||||
declare const global: any;
|
||||
|
||||
|
||||
export class VideoConvertingJob extends FileJob {
|
||||
public readonly Name = DefaultsJobs[DefaultsJobs['Video Converting']];
|
||||
|
||||
constructor() {
|
||||
super({noPhoto: true, noMetaFile: true});
|
||||
super({ noPhoto: true, noMetaFile: true });
|
||||
}
|
||||
|
||||
public get Supported(): boolean {
|
||||
@ -27,6 +26,4 @@ export class VideoConvertingJob extends FileJob {
|
||||
global.gc();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,22 +1,22 @@
|
||||
import {promises as fsp, Stats} from 'fs';
|
||||
import { promises as fsp, Stats } from 'fs';
|
||||
import * as path from 'path';
|
||||
import {ParentDirectoryDTO, SubDirectoryDTO} from '../../../common/entities/DirectoryDTO';
|
||||
import {PhotoDTO} from '../../../common/entities/PhotoDTO';
|
||||
import {ProjectPath} from '../../ProjectPath';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {VideoDTO} from '../../../common/entities/VideoDTO';
|
||||
import {FileDTO} from '../../../common/entities/FileDTO';
|
||||
import {MetadataLoader} from './MetadataLoader';
|
||||
import {Logger} from '../../Logger';
|
||||
import {SupportedFormats} from '../../../common/SupportedFormats';
|
||||
import {VideoProcessing} from '../fileprocessing/VideoProcessing';
|
||||
import {PhotoProcessing} from '../fileprocessing/PhotoProcessing';
|
||||
import {Utils} from '../../../common/Utils';
|
||||
|
||||
import {
|
||||
ParentDirectoryDTO,
|
||||
SubDirectoryDTO,
|
||||
} from '../../../common/entities/DirectoryDTO';
|
||||
import { PhotoDTO } from '../../../common/entities/PhotoDTO';
|
||||
import { ProjectPath } from '../../ProjectPath';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import { VideoDTO } from '../../../common/entities/VideoDTO';
|
||||
import { FileDTO } from '../../../common/entities/FileDTO';
|
||||
import { MetadataLoader } from './MetadataLoader';
|
||||
import { Logger } from '../../Logger';
|
||||
import { SupportedFormats } from '../../../common/SupportedFormats';
|
||||
import { VideoProcessing } from '../fileprocessing/VideoProcessing';
|
||||
import { PhotoProcessing } from '../fileprocessing/PhotoProcessing';
|
||||
import { Utils } from '../../../common/Utils';
|
||||
|
||||
export class DiskMangerWorker {
|
||||
|
||||
|
||||
public static calcLastModified(stat: Stats): number {
|
||||
return Math.max(stat.ctime.getTime(), stat.mtime.getTime());
|
||||
}
|
||||
@ -26,12 +26,17 @@ export class DiskMangerWorker {
|
||||
}
|
||||
|
||||
public static pathFromRelativeDirName(relativeDirectoryName: string): string {
|
||||
return path.join(path.dirname(this.normalizeDirPath(relativeDirectoryName)), path.sep);
|
||||
return path.join(
|
||||
path.dirname(this.normalizeDirPath(relativeDirectoryName)),
|
||||
path.sep
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
public static pathFromParent(parent: { path: string, name: string }): string {
|
||||
return path.join(this.normalizeDirPath(path.join(parent.path, parent.name)), path.sep);
|
||||
public static pathFromParent(parent: { path: string; name: string }): string {
|
||||
return path.join(
|
||||
this.normalizeDirPath(path.join(parent.path, parent.name)),
|
||||
path.sep
|
||||
);
|
||||
}
|
||||
|
||||
public static dirName(dirPath: string): string {
|
||||
@ -41,9 +46,15 @@ export class DiskMangerWorker {
|
||||
return path.basename(dirPath);
|
||||
}
|
||||
|
||||
public static async excludeDir(name: string, relativeDirectoryName: string, absoluteDirectoryName: string): Promise<boolean> {
|
||||
if (Config.Server.Indexing.excludeFolderList.length === 0 &&
|
||||
Config.Server.Indexing.excludeFileList.length === 0) {
|
||||
public static async excludeDir(
|
||||
name: string,
|
||||
relativeDirectoryName: string,
|
||||
absoluteDirectoryName: string
|
||||
): Promise<boolean> {
|
||||
if (
|
||||
Config.Server.Indexing.excludeFolderList.length === 0 &&
|
||||
Config.Server.Indexing.excludeFileList.length === 0
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
const absoluteName = path.normalize(path.join(absoluteDirectoryName, name));
|
||||
@ -69,28 +80,38 @@ export class DiskMangerWorker {
|
||||
try {
|
||||
await fsp.access(path.join(absoluteName, exclude));
|
||||
return true;
|
||||
} catch (e) {
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public static async scanDirectoryNoMetadata(relativeDirectoryName: string,
|
||||
settings: DirectoryScanSettings = {}): Promise<ParentDirectoryDTO<FileDTO>> {
|
||||
public static async scanDirectoryNoMetadata(
|
||||
relativeDirectoryName: string,
|
||||
settings: DirectoryScanSettings = {}
|
||||
): Promise<ParentDirectoryDTO<FileDTO>> {
|
||||
settings.noMetadata = true;
|
||||
return (await this.scanDirectory(relativeDirectoryName, settings)) as ParentDirectoryDTO<FileDTO>;
|
||||
return (await this.scanDirectory(
|
||||
relativeDirectoryName,
|
||||
settings
|
||||
)) as ParentDirectoryDTO<FileDTO>;
|
||||
}
|
||||
|
||||
public static async scanDirectory(relativeDirectoryName: string,
|
||||
settings: DirectoryScanSettings = {}): Promise<ParentDirectoryDTO> {
|
||||
|
||||
public static async scanDirectory(
|
||||
relativeDirectoryName: string,
|
||||
settings: DirectoryScanSettings = {}
|
||||
): Promise<ParentDirectoryDTO> {
|
||||
relativeDirectoryName = this.normalizeDirPath(relativeDirectoryName);
|
||||
const directoryName = DiskMangerWorker.dirName(relativeDirectoryName);
|
||||
const directoryParent = this.pathFromRelativeDirName(relativeDirectoryName);
|
||||
const absoluteDirectoryName = path.join(ProjectPath.ImageFolder, relativeDirectoryName);
|
||||
const absoluteDirectoryName = path.join(
|
||||
ProjectPath.ImageFolder,
|
||||
relativeDirectoryName
|
||||
);
|
||||
|
||||
const stat = await fsp.stat(path.join(ProjectPath.ImageFolder, relativeDirectoryName));
|
||||
const stat = await fsp.stat(
|
||||
path.join(ProjectPath.ImageFolder, relativeDirectoryName)
|
||||
);
|
||||
const directory: ParentDirectoryDTO = {
|
||||
id: null,
|
||||
parent: null,
|
||||
@ -104,34 +125,47 @@ export class DiskMangerWorker {
|
||||
preview: null,
|
||||
validPreview: false,
|
||||
media: [],
|
||||
metaFile: []
|
||||
metaFile: [],
|
||||
};
|
||||
|
||||
// nothing to scan, we are here for the empty dir
|
||||
if (settings.noPhoto === true && settings.noMetadata === true && settings.noVideo === true) {
|
||||
if (
|
||||
settings.noPhoto === true &&
|
||||
settings.noMetadata === true &&
|
||||
settings.noVideo === true
|
||||
) {
|
||||
return directory;
|
||||
}
|
||||
const list = await fsp.readdir(absoluteDirectoryName);
|
||||
for (const file of list) {
|
||||
const fullFilePath = path.normalize(path.join(absoluteDirectoryName, file));
|
||||
const fullFilePath = path.normalize(
|
||||
path.join(absoluteDirectoryName, file)
|
||||
);
|
||||
if ((await fsp.stat(fullFilePath)).isDirectory()) {
|
||||
if (settings.noDirectory === true || settings.previewOnly === true ||
|
||||
await DiskMangerWorker.excludeDir(file, relativeDirectoryName, absoluteDirectoryName)) {
|
||||
if (
|
||||
settings.noDirectory === true ||
|
||||
settings.previewOnly === true ||
|
||||
(await DiskMangerWorker.excludeDir(
|
||||
file,
|
||||
relativeDirectoryName,
|
||||
absoluteDirectoryName
|
||||
))
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// create preview directory
|
||||
const d = await DiskMangerWorker.scanDirectory(path.join(relativeDirectoryName, file),
|
||||
const d = (await DiskMangerWorker.scanDirectory(
|
||||
path.join(relativeDirectoryName, file),
|
||||
{
|
||||
previewOnly: true
|
||||
previewOnly: true,
|
||||
}
|
||||
) as SubDirectoryDTO;
|
||||
)) as SubDirectoryDTO;
|
||||
|
||||
d.lastScanned = 0; // it was not a fully scan
|
||||
d.isPartial = true;
|
||||
|
||||
directory.directories.push(d);
|
||||
|
||||
} else if (PhotoProcessing.isPhoto(fullFilePath)) {
|
||||
if (settings.noPhoto === true) {
|
||||
continue;
|
||||
@ -140,7 +174,10 @@ export class DiskMangerWorker {
|
||||
const photo = {
|
||||
name: file,
|
||||
directory: null,
|
||||
metadata: settings.noMetadata === true ? null : await MetadataLoader.loadPhotoMetadata(fullFilePath)
|
||||
metadata:
|
||||
settings.noMetadata === true
|
||||
? null
|
||||
: await MetadataLoader.loadPhotoMetadata(fullFilePath),
|
||||
} as PhotoDTO;
|
||||
|
||||
if (!directory.preview) {
|
||||
@ -148,7 +185,7 @@ export class DiskMangerWorker {
|
||||
|
||||
directory.preview.directory = {
|
||||
path: directory.path,
|
||||
name: directory.name
|
||||
name: directory.name,
|
||||
};
|
||||
}
|
||||
// add the preview photo to the list of media, so it will be saved to the DB
|
||||
@ -159,32 +196,43 @@ export class DiskMangerWorker {
|
||||
if (settings.previewOnly === true) {
|
||||
break;
|
||||
}
|
||||
|
||||
} else if (VideoProcessing.isVideo(fullFilePath)) {
|
||||
if (Config.Client.Media.Video.enabled === false || settings.noVideo === true || settings.previewOnly === true) {
|
||||
if (
|
||||
Config.Client.Media.Video.enabled === false ||
|
||||
settings.noVideo === true ||
|
||||
settings.previewOnly === true
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
directory.media.push({
|
||||
name: file,
|
||||
directory: null,
|
||||
metadata: settings.noMetadata === true ? null : await MetadataLoader.loadVideoMetadata(fullFilePath)
|
||||
metadata:
|
||||
settings.noMetadata === true
|
||||
? null
|
||||
: await MetadataLoader.loadVideoMetadata(fullFilePath),
|
||||
} as VideoDTO);
|
||||
} catch (e) {
|
||||
Logger.warn('Media loading error, skipping: ' + file + ', reason: ' + e.toString());
|
||||
Logger.warn(
|
||||
'Media loading error, skipping: ' +
|
||||
file +
|
||||
', reason: ' +
|
||||
e.toString()
|
||||
);
|
||||
}
|
||||
|
||||
} else if (DiskMangerWorker.isMetaFile(fullFilePath)) {
|
||||
if (!DiskMangerWorker.isEnabledMetaFile(fullFilePath) ||
|
||||
if (
|
||||
!DiskMangerWorker.isEnabledMetaFile(fullFilePath) ||
|
||||
settings.noMetaFile === true ||
|
||||
settings.previewOnly === true) {
|
||||
settings.previewOnly === true
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
directory.metaFile.push({
|
||||
name: file,
|
||||
directory: null,
|
||||
} as FileDTO);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -193,7 +241,6 @@ export class DiskMangerWorker {
|
||||
return directory;
|
||||
}
|
||||
|
||||
|
||||
private static isMetaFile(fullPath: string): boolean {
|
||||
const extension = path.extname(fullPath).toLowerCase();
|
||||
return SupportedFormats.WithDots.MetaFiles.indexOf(extension) !== -1;
|
||||
@ -213,8 +260,6 @@ export class DiskMangerWorker {
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
export interface DirectoryScanSettings {
|
||||
|
@ -1,65 +1,68 @@
|
||||
import {VideoMetadata} from '../../../common/entities/VideoDTO';
|
||||
import {FaceRegion, PhotoMetadata} from '../../../common/entities/PhotoDTO';
|
||||
import {Config} from '../../../common/config/private/Config';
|
||||
import {Logger} from '../../Logger';
|
||||
import { VideoMetadata } from '../../../common/entities/VideoDTO';
|
||||
import { FaceRegion, PhotoMetadata } from '../../../common/entities/PhotoDTO';
|
||||
import { Config } from '../../../common/config/private/Config';
|
||||
import { Logger } from '../../Logger';
|
||||
import * as fs from 'fs';
|
||||
import {imageSize} from 'image-size';
|
||||
import { imageSize } from 'image-size';
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
import * as ExifReader from 'exifreader';
|
||||
import {ExifParserFactory, OrientationTypes} from 'ts-exif-parser';
|
||||
import {IptcParser} from 'ts-node-iptc';
|
||||
import {FFmpegFactory} from '../FFmpegFactory';
|
||||
import {FfprobeData} from 'fluent-ffmpeg';
|
||||
import {Utils} from '../../../common/Utils';
|
||||
|
||||
import { ExifParserFactory, OrientationTypes } from 'ts-exif-parser';
|
||||
import { IptcParser } from 'ts-node-iptc';
|
||||
import { FFmpegFactory } from '../FFmpegFactory';
|
||||
import { FfprobeData } from 'fluent-ffmpeg';
|
||||
import { Utils } from '../../../common/Utils';
|
||||
|
||||
const LOG_TAG = '[MetadataLoader]';
|
||||
const ffmpeg = FFmpegFactory.get();
|
||||
|
||||
export class MetadataLoader {
|
||||
|
||||
public static loadVideoMetadata(fullPath: string): Promise<VideoMetadata> {
|
||||
return new Promise<VideoMetadata>((resolve) => {
|
||||
const metadata: VideoMetadata = {
|
||||
size: {
|
||||
width: 1,
|
||||
height: 1
|
||||
height: 1,
|
||||
},
|
||||
bitRate: 0,
|
||||
duration: 0,
|
||||
creationDate: 0,
|
||||
fileSize: 0,
|
||||
fps: 0
|
||||
fps: 0,
|
||||
};
|
||||
try {
|
||||
const stat = fs.statSync(fullPath);
|
||||
metadata.fileSize = stat.size;
|
||||
metadata.creationDate = stat.mtime.getTime();
|
||||
} catch (err) {
|
||||
}
|
||||
} catch (err) {}
|
||||
try {
|
||||
ffmpeg(fullPath).ffprobe((err: any, data: FfprobeData) => {
|
||||
if (!!err || data === null || !data.streams[0]) {
|
||||
return resolve(metadata);
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
for (const stream of data.streams) {
|
||||
if (stream.width) {
|
||||
metadata.size.width = stream.width;
|
||||
metadata.size.height = stream.height;
|
||||
|
||||
if (Utils.isInt32(parseInt('' + stream.rotation, 10)) &&
|
||||
(Math.abs(parseInt('' + stream.rotation, 10)) / 90) % 2 === 1) {
|
||||
if (
|
||||
Utils.isInt32(parseInt('' + stream.rotation, 10)) &&
|
||||
(Math.abs(parseInt('' + stream.rotation, 10)) / 90) % 2 === 1
|
||||
) {
|
||||
// noinspection JSSuspiciousNameCombination
|
||||
metadata.size.width = stream.height;
|
||||
// noinspection JSSuspiciousNameCombination
|
||||
metadata.size.height = stream.width;
|
||||
}
|
||||
|
||||
if (Utils.isInt32(Math.floor(parseFloat(stream.duration) * 1000))) {
|
||||
metadata.duration = Math.floor(parseFloat(stream.duration) * 1000);
|
||||
if (
|
||||
Utils.isInt32(Math.floor(parseFloat(stream.duration) * 1000))
|
||||
) {
|
||||
metadata.duration = Math.floor(
|
||||
parseFloat(stream.duration) * 1000
|
||||
);
|
||||
}
|
||||
|
||||
if (Utils.isInt32(parseInt(stream.bit_rate, 10))) {
|
||||
@ -68,13 +71,14 @@ export class MetadataLoader {
|
||||
if (Utils.isInt32(parseInt(stream.avg_frame_rate, 10))) {
|
||||
metadata.fps = parseInt(stream.avg_frame_rate, 10) || null;
|
||||
}
|
||||
metadata.creationDate = Date.parse(stream.tags.creation_time) || metadata.creationDate;
|
||||
metadata.creationDate =
|
||||
Date.parse(stream.tags.creation_time) ||
|
||||
metadata.creationDate;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
}
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch (err) {}
|
||||
metadata.creationDate = metadata.creationDate || 0;
|
||||
|
||||
return resolve(metadata);
|
||||
@ -87,237 +91,296 @@ export class MetadataLoader {
|
||||
|
||||
public static loadPhotoMetadata(fullPath: string): Promise<PhotoMetadata> {
|
||||
return new Promise<PhotoMetadata>((resolve, reject) => {
|
||||
const fd = fs.openSync(fullPath, 'r');
|
||||
const fd = fs.openSync(fullPath, 'r');
|
||||
|
||||
const data = Buffer.allocUnsafe(Config.Server.photoMetadataSize);
|
||||
fs.read(fd, data, 0, Config.Server.photoMetadataSize, 0, (err) => {
|
||||
fs.closeSync(fd);
|
||||
if (err) {
|
||||
return reject({file: fullPath, error: err});
|
||||
}
|
||||
const metadata: PhotoMetadata = {
|
||||
size: {width: 1, height: 1},
|
||||
creationDate: 0,
|
||||
fileSize: 0
|
||||
};
|
||||
const data = Buffer.allocUnsafe(Config.Server.photoMetadataSize);
|
||||
fs.read(fd, data, 0, Config.Server.photoMetadataSize, 0, (err) => {
|
||||
fs.closeSync(fd);
|
||||
if (err) {
|
||||
return reject({ file: fullPath, error: err });
|
||||
}
|
||||
const metadata: PhotoMetadata = {
|
||||
size: { width: 1, height: 1 },
|
||||
creationDate: 0,
|
||||
fileSize: 0,
|
||||
};
|
||||
try {
|
||||
try {
|
||||
const stat = fs.statSync(fullPath);
|
||||
metadata.fileSize = stat.size;
|
||||
metadata.creationDate = stat.mtime.getTime();
|
||||
} catch (err) {}
|
||||
|
||||
try {
|
||||
const stat = fs.statSync(fullPath);
|
||||
metadata.fileSize = stat.size;
|
||||
metadata.creationDate = stat.mtime.getTime();
|
||||
} catch (err) {
|
||||
}
|
||||
|
||||
try {
|
||||
const exif = ExifParserFactory.create(data).parse();
|
||||
if (exif.tags.ISO || exif.tags.Model ||
|
||||
exif.tags.Make || exif.tags.FNumber ||
|
||||
exif.tags.ExposureTime || exif.tags.FocalLength ||
|
||||
exif.tags.LensModel) {
|
||||
if (exif.tags.Model && exif.tags.Model !== '') {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.model = '' + exif.tags.Model;
|
||||
}
|
||||
if (exif.tags.Make && exif.tags.Make !== '') {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.make = '' + exif.tags.Make;
|
||||
}
|
||||
if (exif.tags.LensModel && exif.tags.LensModel !== '') {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.lens = '' + exif.tags.LensModel;
|
||||
}
|
||||
if (Utils.isUInt32(exif.tags.ISO)) {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.ISO = parseInt('' + exif.tags.ISO, 10);
|
||||
}
|
||||
if (Utils.isFloat32(exif.tags.FocalLength)) {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.focalLength = parseFloat('' + exif.tags.FocalLength);
|
||||
}
|
||||
if (Utils.isFloat32(exif.tags.ExposureTime)) {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.exposure = parseFloat(parseFloat('' + exif.tags.ExposureTime).toFixed(4));
|
||||
}
|
||||
if (Utils.isFloat32(exif.tags.FNumber)) {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.fStop = parseFloat(parseFloat('' + exif.tags.FNumber).toFixed(2));
|
||||
}
|
||||
try {
|
||||
const exif = ExifParserFactory.create(data).parse();
|
||||
if (
|
||||
exif.tags.ISO ||
|
||||
exif.tags.Model ||
|
||||
exif.tags.Make ||
|
||||
exif.tags.FNumber ||
|
||||
exif.tags.ExposureTime ||
|
||||
exif.tags.FocalLength ||
|
||||
exif.tags.LensModel
|
||||
) {
|
||||
if (exif.tags.Model && exif.tags.Model !== '') {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.model = '' + exif.tags.Model;
|
||||
}
|
||||
if (!isNaN(exif.tags.GPSLatitude) || exif.tags.GPSLongitude || exif.tags.GPSAltitude) {
|
||||
metadata.positionData = metadata.positionData || {};
|
||||
metadata.positionData.GPSData = {};
|
||||
|
||||
if (Utils.isFloat32(exif.tags.GPSLongitude)) {
|
||||
metadata.positionData.GPSData.longitude = parseFloat(exif.tags.GPSLongitude.toFixed(6));
|
||||
}
|
||||
if (Utils.isFloat32(exif.tags.GPSLatitude)) {
|
||||
metadata.positionData.GPSData.latitude = parseFloat(exif.tags.GPSLatitude.toFixed(6));
|
||||
}
|
||||
if (exif.tags.Make && exif.tags.Make !== '') {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.make = '' + exif.tags.Make;
|
||||
}
|
||||
if (exif.tags.CreateDate || exif.tags.DateTimeOriginal || exif.tags.ModifyDate) {
|
||||
metadata.creationDate = (exif.tags.DateTimeOriginal || exif.tags.CreateDate || exif.tags.ModifyDate) * 1000;
|
||||
if (exif.tags.LensModel && exif.tags.LensModel !== '') {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.lens = '' + exif.tags.LensModel;
|
||||
}
|
||||
|
||||
|
||||
if (exif.imageSize) {
|
||||
metadata.size = {width: exif.imageSize.width, height: exif.imageSize.height};
|
||||
} else if (exif.tags.RelatedImageWidth && exif.tags.RelatedImageHeight) {
|
||||
metadata.size = {width: exif.tags.RelatedImageWidth, height: exif.tags.RelatedImageHeight};
|
||||
} else {
|
||||
const info = imageSize(fullPath);
|
||||
metadata.size = {width: info.width, height: info.height};
|
||||
if (Utils.isUInt32(exif.tags.ISO)) {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.ISO = parseInt('' + exif.tags.ISO, 10);
|
||||
}
|
||||
} catch (err) {
|
||||
Logger.debug(LOG_TAG, 'Error parsing exif', fullPath, err);
|
||||
try {
|
||||
const info = imageSize(fullPath);
|
||||
metadata.size = {width: info.width, height: info.height};
|
||||
} catch (e) {
|
||||
metadata.size = {width: 1, height: 1};
|
||||
if (Utils.isFloat32(exif.tags.FocalLength)) {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.focalLength = parseFloat(
|
||||
'' + exif.tags.FocalLength
|
||||
);
|
||||
}
|
||||
if (Utils.isFloat32(exif.tags.ExposureTime)) {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.exposure = parseFloat(
|
||||
parseFloat('' + exif.tags.ExposureTime).toFixed(4)
|
||||
);
|
||||
}
|
||||
if (Utils.isFloat32(exif.tags.FNumber)) {
|
||||
metadata.cameraData = metadata.cameraData || {};
|
||||
metadata.cameraData.fStop = parseFloat(
|
||||
parseFloat('' + exif.tags.FNumber).toFixed(2)
|
||||
);
|
||||
}
|
||||
}
|
||||
if (
|
||||
!isNaN(exif.tags.GPSLatitude) ||
|
||||
exif.tags.GPSLongitude ||
|
||||
exif.tags.GPSAltitude
|
||||
) {
|
||||
metadata.positionData = metadata.positionData || {};
|
||||
metadata.positionData.GPSData = {};
|
||||
|
||||
try {
|
||||
const iptcData = IptcParser.parse(data);
|
||||
if (iptcData.country_or_primary_location_name) {
|
||||
metadata.positionData = metadata.positionData || {};
|
||||
metadata.positionData.country = iptcData.country_or_primary_location_name.replace(/\0/g, '').trim();
|
||||
if (Utils.isFloat32(exif.tags.GPSLongitude)) {
|
||||
metadata.positionData.GPSData.longitude = parseFloat(
|
||||
exif.tags.GPSLongitude.toFixed(6)
|
||||
);
|
||||
}
|
||||
if (iptcData.province_or_state) {
|
||||
metadata.positionData = metadata.positionData || {};
|
||||
metadata.positionData.state = iptcData.province_or_state.replace(/\0/g, '').trim();
|
||||
if (Utils.isFloat32(exif.tags.GPSLatitude)) {
|
||||
metadata.positionData.GPSData.latitude = parseFloat(
|
||||
exif.tags.GPSLatitude.toFixed(6)
|
||||
);
|
||||
}
|
||||
if (iptcData.city) {
|
||||
metadata.positionData = metadata.positionData || {};
|
||||
metadata.positionData.city = iptcData.city.replace(/\0/g, '').trim();
|
||||
}
|
||||
if (iptcData.caption) {
|
||||
metadata.caption = iptcData.caption.replace(/\0/g, '').trim();
|
||||
}
|
||||
if (Array.isArray(iptcData.keywords)) {
|
||||
metadata.keywords = iptcData.keywords;
|
||||
}
|
||||
|
||||
if (iptcData.date_time) {
|
||||
metadata.creationDate = iptcData.date_time.getTime();
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
// Logger.debug(LOG_TAG, 'Error parsing iptc data', fullPath, err);
|
||||
}
|
||||
if (
|
||||
exif.tags.CreateDate ||
|
||||
exif.tags.DateTimeOriginal ||
|
||||
exif.tags.ModifyDate
|
||||
) {
|
||||
metadata.creationDate =
|
||||
(exif.tags.DateTimeOriginal ||
|
||||
exif.tags.CreateDate ||
|
||||
exif.tags.ModifyDate) * 1000;
|
||||
}
|
||||
|
||||
if (!metadata.creationDate) { // creationDate can be negative, when it was created before epoch (1970)
|
||||
metadata.creationDate = 0;
|
||||
if (exif.imageSize) {
|
||||
metadata.size = {
|
||||
width: exif.imageSize.width,
|
||||
height: exif.imageSize.height,
|
||||
};
|
||||
} else if (
|
||||
exif.tags.RelatedImageWidth &&
|
||||
exif.tags.RelatedImageHeight
|
||||
) {
|
||||
metadata.size = {
|
||||
width: exif.tags.RelatedImageWidth,
|
||||
height: exif.tags.RelatedImageHeight,
|
||||
};
|
||||
} else {
|
||||
const info = imageSize(fullPath);
|
||||
metadata.size = { width: info.width, height: info.height };
|
||||
}
|
||||
|
||||
try {
|
||||
// TODO: clean up the three different exif readers,
|
||||
// and keep the minimum amount only
|
||||
const exif = ExifReader.load(data);
|
||||
if (exif.Rating) {
|
||||
metadata.rating = (parseInt(exif.Rating.value, 10) as any);
|
||||
if (metadata.rating < 0) {
|
||||
metadata.rating = 0;
|
||||
}
|
||||
}
|
||||
if (exif.subject && exif.subject.value && exif.subject.value.length > 0) {
|
||||
if (metadata.keywords === undefined) {
|
||||
metadata.keywords = [];
|
||||
}
|
||||
for (const kw of exif.subject.value) {
|
||||
if (metadata.keywords.indexOf(kw.description) === -1) {
|
||||
metadata.keywords.push(kw.description);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (exif.Orientation) {
|
||||
const orientation = (parseInt(exif.Orientation.value as any, 10) as any);
|
||||
if (OrientationTypes.BOTTOM_LEFT < orientation) {
|
||||
// noinspection JSSuspiciousNameCombination
|
||||
const height = metadata.size.width;
|
||||
// noinspection JSSuspiciousNameCombination
|
||||
metadata.size.width = metadata.size.height;
|
||||
metadata.size.height = height;
|
||||
}
|
||||
}
|
||||
if (Config.Client.Faces.enabled) {
|
||||
const faces: FaceRegion[] = [];
|
||||
if (exif.Regions && exif.Regions.value.RegionList && exif.Regions.value.RegionList.value) {
|
||||
for (const regionRoot of exif.Regions.value.RegionList.value as any[]) {
|
||||
|
||||
let type;
|
||||
let name;
|
||||
let box;
|
||||
const createFaceBox = (w: string, h: string, x: string, y: string) => {
|
||||
return {
|
||||
width: Math.round(parseFloat(w) * metadata.size.width),
|
||||
height: Math.round(parseFloat(h) * metadata.size.height),
|
||||
left: Math.round(parseFloat(x) * metadata.size.width),
|
||||
top: Math.round(parseFloat(y) * metadata.size.height)
|
||||
};
|
||||
};
|
||||
|
||||
/* Adobe Lightroom based face region structure */
|
||||
if (regionRoot.value &&
|
||||
regionRoot.value['rdf:Description'] &&
|
||||
regionRoot.value['rdf:Description'].value &&
|
||||
regionRoot.value['rdf:Description'].value['mwg-rs:Area']) {
|
||||
|
||||
const region = regionRoot.value['rdf:Description'];
|
||||
const regionBox = region.value['mwg-rs:Area'].attributes;
|
||||
|
||||
name = region.attributes['mwg-rs:Name'];
|
||||
type = region.attributes['mwg-rs:Type'];
|
||||
box = createFaceBox(regionBox['stArea:w'],
|
||||
regionBox['stArea:h'],
|
||||
regionBox['stArea:x'],
|
||||
regionBox['stArea:y']);
|
||||
/* Load exiftool edited face region structure, see github issue #191 */
|
||||
} else if (regionRoot.Area && regionRoot.Name && regionRoot.Type) {
|
||||
|
||||
const regionBox = regionRoot.Area.value;
|
||||
name = regionRoot.Name.value;
|
||||
type = regionRoot.Type.value;
|
||||
box = createFaceBox(regionBox.w.value,
|
||||
regionBox.h.value,
|
||||
regionBox.x.value,
|
||||
regionBox.y.value);
|
||||
}
|
||||
|
||||
if (type !== 'Face' || !name) {
|
||||
continue;
|
||||
}
|
||||
// convert center base box to corner based box
|
||||
box.left = Math.round(Math.max(0, box.left - box.width / 2));
|
||||
box.top = Math.round(Math.max(0, box.top - box.height / 2));
|
||||
faces.push({name, box});
|
||||
}
|
||||
}
|
||||
if (faces.length > 0) {
|
||||
metadata.faces = faces; // save faces
|
||||
if (Config.Client.Faces.keywordsToPersons) {
|
||||
// remove faces from keywords
|
||||
metadata.faces.forEach(f => {
|
||||
const index = metadata.keywords.indexOf(f.name);
|
||||
if (index !== -1) {
|
||||
metadata.keywords.splice(index, 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
}
|
||||
|
||||
|
||||
return resolve(metadata);
|
||||
} catch (err) {
|
||||
return reject({file: fullPath, error: err});
|
||||
Logger.debug(LOG_TAG, 'Error parsing exif', fullPath, err);
|
||||
try {
|
||||
const info = imageSize(fullPath);
|
||||
metadata.size = { width: info.width, height: info.height };
|
||||
} catch (e) {
|
||||
metadata.size = { width: 1, height: 1 };
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const iptcData = IptcParser.parse(data);
|
||||
if (iptcData.country_or_primary_location_name) {
|
||||
metadata.positionData = metadata.positionData || {};
|
||||
metadata.positionData.country =
|
||||
iptcData.country_or_primary_location_name
|
||||
.replace(/\0/g, '')
|
||||
.trim();
|
||||
}
|
||||
if (iptcData.province_or_state) {
|
||||
metadata.positionData = metadata.positionData || {};
|
||||
metadata.positionData.state = iptcData.province_or_state
|
||||
.replace(/\0/g, '')
|
||||
.trim();
|
||||
}
|
||||
if (iptcData.city) {
|
||||
metadata.positionData = metadata.positionData || {};
|
||||
metadata.positionData.city = iptcData.city
|
||||
.replace(/\0/g, '')
|
||||
.trim();
|
||||
}
|
||||
if (iptcData.caption) {
|
||||
metadata.caption = iptcData.caption.replace(/\0/g, '').trim();
|
||||
}
|
||||
if (Array.isArray(iptcData.keywords)) {
|
||||
metadata.keywords = iptcData.keywords;
|
||||
}
|
||||
|
||||
if (iptcData.date_time) {
|
||||
metadata.creationDate = iptcData.date_time.getTime();
|
||||
}
|
||||
} catch (err) {
|
||||
// Logger.debug(LOG_TAG, 'Error parsing iptc data', fullPath, err);
|
||||
}
|
||||
|
||||
if (!metadata.creationDate) {
|
||||
// creationDate can be negative, when it was created before epoch (1970)
|
||||
metadata.creationDate = 0;
|
||||
}
|
||||
|
||||
try {
|
||||
// TODO: clean up the three different exif readers,
|
||||
// and keep the minimum amount only
|
||||
const exif = ExifReader.load(data);
|
||||
if (exif.Rating) {
|
||||
metadata.rating = parseInt(exif.Rating.value, 10) as any;
|
||||
if (metadata.rating < 0) {
|
||||
metadata.rating = 0;
|
||||
}
|
||||
}
|
||||
if (
|
||||
exif.subject &&
|
||||
exif.subject.value &&
|
||||
exif.subject.value.length > 0
|
||||
) {
|
||||
if (metadata.keywords === undefined) {
|
||||
metadata.keywords = [];
|
||||
}
|
||||
for (const kw of exif.subject.value) {
|
||||
if (metadata.keywords.indexOf(kw.description) === -1) {
|
||||
metadata.keywords.push(kw.description);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (exif.Orientation) {
|
||||
const orientation = parseInt(
|
||||
exif.Orientation.value as any,
|
||||
10
|
||||
) as any;
|
||||
if (OrientationTypes.BOTTOM_LEFT < orientation) {
|
||||
// noinspection JSSuspiciousNameCombination
|
||||
const height = metadata.size.width;
|
||||
// noinspection JSSuspiciousNameCombination
|
||||
metadata.size.width = metadata.size.height;
|
||||
metadata.size.height = height;
|
||||
}
|
||||
}
|
||||
if (Config.Client.Faces.enabled) {
|
||||
const faces: FaceRegion[] = [];
|
||||
if (
|
||||
exif.Regions &&
|
||||
exif.Regions.value.RegionList &&
|
||||
exif.Regions.value.RegionList.value
|
||||
) {
|
||||
for (const regionRoot of exif.Regions.value.RegionList
|
||||
.value as any[]) {
|
||||
let type;
|
||||
let name;
|
||||
let box;
|
||||
const createFaceBox = (
|
||||
w: string,
|
||||
h: string,
|
||||
x: string,
|
||||
y: string
|
||||
) => {
|
||||
return {
|
||||
width: Math.round(parseFloat(w) * metadata.size.width),
|
||||
height: Math.round(parseFloat(h) * metadata.size.height),
|
||||
left: Math.round(parseFloat(x) * metadata.size.width),
|
||||
top: Math.round(parseFloat(y) * metadata.size.height),
|
||||
};
|
||||
};
|
||||
|
||||
/* Adobe Lightroom based face region structure */
|
||||
if (
|
||||
regionRoot.value &&
|
||||
regionRoot.value['rdf:Description'] &&
|
||||
regionRoot.value['rdf:Description'].value &&
|
||||
regionRoot.value['rdf:Description'].value['mwg-rs:Area']
|
||||
) {
|
||||
const region = regionRoot.value['rdf:Description'];
|
||||
const regionBox = region.value['mwg-rs:Area'].attributes;
|
||||
|
||||
name = region.attributes['mwg-rs:Name'];
|
||||
type = region.attributes['mwg-rs:Type'];
|
||||
box = createFaceBox(
|
||||
regionBox['stArea:w'],
|
||||
regionBox['stArea:h'],
|
||||
regionBox['stArea:x'],
|
||||
regionBox['stArea:y']
|
||||
);
|
||||
/* Load exiftool edited face region structure, see github issue #191 */
|
||||
} else if (
|
||||
regionRoot.Area &&
|
||||
regionRoot.Name &&
|
||||
regionRoot.Type
|
||||
) {
|
||||
const regionBox = regionRoot.Area.value;
|
||||
name = regionRoot.Name.value;
|
||||
type = regionRoot.Type.value;
|
||||
box = createFaceBox(
|
||||
regionBox.w.value,
|
||||
regionBox.h.value,
|
||||
regionBox.x.value,
|
||||
regionBox.y.value
|
||||
);
|
||||
}
|
||||
|
||||
if (type !== 'Face' || !name) {
|
||||
continue;
|
||||
}
|
||||
// convert center base box to corner based box
|
||||
box.left = Math.round(Math.max(0, box.left - box.width / 2));
|
||||
box.top = Math.round(Math.max(0, box.top - box.height / 2));
|
||||
faces.push({ name, box });
|
||||
}
|
||||
}
|
||||
if (faces.length > 0) {
|
||||
metadata.faces = faces; // save faces
|
||||
if (Config.Client.Faces.keywordsToPersons) {
|
||||
// remove faces from keywords
|
||||
metadata.faces.forEach((f) => {
|
||||
const index = metadata.keywords.indexOf(f.name);
|
||||
if (index !== -1) {
|
||||
metadata.keywords.splice(index, 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {}
|
||||
|
||||
return resolve(metadata);
|
||||
} catch (err) {
|
||||
return reject({ file: fullPath, error: err });
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user