You've already forked immich
mirror of
https://github.com/immich-app/immich.git
synced 2025-06-24 04:46:50 +02:00
feat: built-in automatic database backups (#13773)
This commit is contained in:
@ -21,6 +21,17 @@ The recommended way to backup and restore the Immich database is to use the `pg_
|
|||||||
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
|
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
### Automatic Database Backups
|
||||||
|
|
||||||
|
Immich will automatically create database backups by default. The backups are stored in `UPLOAD_LOCATION/backups`.
|
||||||
|
You can adjust the schedule and amount of kept backups in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
|
||||||
|
By default, Immich will keep the last 14 backups and create a new backup every day at 2:00 AM.
|
||||||
|
|
||||||
|
#### Restoring
|
||||||
|
|
||||||
|
We hope to make restoring simpler in future versions, for now you can find the backups in the `UPLOAD_LOCATION/backups` folder on your host.
|
||||||
|
Then please follow the steps in the following section for restoring the database.
|
||||||
|
|
||||||
### Manual Backup and Restore
|
### Manual Backup and Restore
|
||||||
|
|
||||||
<Tabs>
|
<Tabs>
|
||||||
|
11
i18n/en.json
11
i18n/en.json
@ -34,6 +34,11 @@
|
|||||||
"authentication_settings_disable_all": "Are you sure you want to disable all login methods? Login will be completely disabled.",
|
"authentication_settings_disable_all": "Are you sure you want to disable all login methods? Login will be completely disabled.",
|
||||||
"authentication_settings_reenable": "To re-enable, use a <link>Server Command</link>.",
|
"authentication_settings_reenable": "To re-enable, use a <link>Server Command</link>.",
|
||||||
"background_task_job": "Background Tasks",
|
"background_task_job": "Background Tasks",
|
||||||
|
"backup_database": "Backup Database",
|
||||||
|
"backup_database_enable_description": "Enable database backups",
|
||||||
|
"backup_keep_last_amount": "Amount of previous backups to keep",
|
||||||
|
"backup_settings": "Backup Settings",
|
||||||
|
"backup_settings_description": "Manage database backup settings",
|
||||||
"check_all": "Check All",
|
"check_all": "Check All",
|
||||||
"cleared_jobs": "Cleared jobs for: {job}",
|
"cleared_jobs": "Cleared jobs for: {job}",
|
||||||
"config_set_by_file": "Config is currently set by a config file",
|
"config_set_by_file": "Config is currently set by a config file",
|
||||||
@ -43,6 +48,9 @@
|
|||||||
"confirm_reprocess_all_faces": "Are you sure you want to reprocess all faces? This will also clear named people.",
|
"confirm_reprocess_all_faces": "Are you sure you want to reprocess all faces? This will also clear named people.",
|
||||||
"confirm_user_password_reset": "Are you sure you want to reset {user}'s password?",
|
"confirm_user_password_reset": "Are you sure you want to reset {user}'s password?",
|
||||||
"create_job": "Create job",
|
"create_job": "Create job",
|
||||||
|
"cron_expression": "Cron expression",
|
||||||
|
"cron_expression_description": "Set the scanning interval using the cron format. For more information please refer to e.g. <link>Crontab Guru</link>",
|
||||||
|
"cron_expression_presets": "Cron expression presets",
|
||||||
"disable_login": "Disable login",
|
"disable_login": "Disable login",
|
||||||
"duplicate_detection_job_description": "Run machine learning on assets to detect similar images. Relies on Smart Search",
|
"duplicate_detection_job_description": "Run machine learning on assets to detect similar images. Relies on Smart Search",
|
||||||
"exclusion_pattern_description": "Exclusion patterns lets you ignore files and folders when scanning your library. This is useful if you have folders that contain files you don't want to import, such as RAW files.",
|
"exclusion_pattern_description": "Exclusion patterns lets you ignore files and folders when scanning your library. This is useful if you have folders that contain files you don't want to import, such as RAW files.",
|
||||||
@ -80,9 +88,6 @@
|
|||||||
"jobs_delayed": "{jobCount, plural, other {# delayed}}",
|
"jobs_delayed": "{jobCount, plural, other {# delayed}}",
|
||||||
"jobs_failed": "{jobCount, plural, other {# failed}}",
|
"jobs_failed": "{jobCount, plural, other {# failed}}",
|
||||||
"library_created": "Created library: {library}",
|
"library_created": "Created library: {library}",
|
||||||
"library_cron_expression": "Cron expression",
|
|
||||||
"library_cron_expression_description": "Set the scanning interval using the cron format. For more information please refer to e.g. <link>Crontab Guru</link>",
|
|
||||||
"library_cron_expression_presets": "Cron expression presets",
|
|
||||||
"library_deleted": "Library deleted",
|
"library_deleted": "Library deleted",
|
||||||
"library_import_path_description": "Specify a folder to import. This folder, including subfolders, will be scanned for images and videos.",
|
"library_import_path_description": "Specify a folder to import. This folder, including subfolders, will be scanned for images and videos.",
|
||||||
"library_scanning": "Periodic Scanning",
|
"library_scanning": "Periodic Scanning",
|
||||||
|
2
mobile/openapi/README.md
generated
2
mobile/openapi/README.md
generated
@ -306,6 +306,7 @@ Class | Method | HTTP request | Description
|
|||||||
- [CreateAlbumDto](doc//CreateAlbumDto.md)
|
- [CreateAlbumDto](doc//CreateAlbumDto.md)
|
||||||
- [CreateLibraryDto](doc//CreateLibraryDto.md)
|
- [CreateLibraryDto](doc//CreateLibraryDto.md)
|
||||||
- [CreateProfileImageResponseDto](doc//CreateProfileImageResponseDto.md)
|
- [CreateProfileImageResponseDto](doc//CreateProfileImageResponseDto.md)
|
||||||
|
- [DatabaseBackupConfig](doc//DatabaseBackupConfig.md)
|
||||||
- [DownloadArchiveInfo](doc//DownloadArchiveInfo.md)
|
- [DownloadArchiveInfo](doc//DownloadArchiveInfo.md)
|
||||||
- [DownloadInfoDto](doc//DownloadInfoDto.md)
|
- [DownloadInfoDto](doc//DownloadInfoDto.md)
|
||||||
- [DownloadResponse](doc//DownloadResponse.md)
|
- [DownloadResponse](doc//DownloadResponse.md)
|
||||||
@ -413,6 +414,7 @@ Class | Method | HTTP request | Description
|
|||||||
- [StackCreateDto](doc//StackCreateDto.md)
|
- [StackCreateDto](doc//StackCreateDto.md)
|
||||||
- [StackResponseDto](doc//StackResponseDto.md)
|
- [StackResponseDto](doc//StackResponseDto.md)
|
||||||
- [StackUpdateDto](doc//StackUpdateDto.md)
|
- [StackUpdateDto](doc//StackUpdateDto.md)
|
||||||
|
- [SystemConfigBackupsDto](doc//SystemConfigBackupsDto.md)
|
||||||
- [SystemConfigDto](doc//SystemConfigDto.md)
|
- [SystemConfigDto](doc//SystemConfigDto.md)
|
||||||
- [SystemConfigFFmpegDto](doc//SystemConfigFFmpegDto.md)
|
- [SystemConfigFFmpegDto](doc//SystemConfigFFmpegDto.md)
|
||||||
- [SystemConfigFacesDto](doc//SystemConfigFacesDto.md)
|
- [SystemConfigFacesDto](doc//SystemConfigFacesDto.md)
|
||||||
|
2
mobile/openapi/lib/api.dart
generated
2
mobile/openapi/lib/api.dart
generated
@ -120,6 +120,7 @@ part 'model/colorspace.dart';
|
|||||||
part 'model/create_album_dto.dart';
|
part 'model/create_album_dto.dart';
|
||||||
part 'model/create_library_dto.dart';
|
part 'model/create_library_dto.dart';
|
||||||
part 'model/create_profile_image_response_dto.dart';
|
part 'model/create_profile_image_response_dto.dart';
|
||||||
|
part 'model/database_backup_config.dart';
|
||||||
part 'model/download_archive_info.dart';
|
part 'model/download_archive_info.dart';
|
||||||
part 'model/download_info_dto.dart';
|
part 'model/download_info_dto.dart';
|
||||||
part 'model/download_response.dart';
|
part 'model/download_response.dart';
|
||||||
@ -227,6 +228,7 @@ part 'model/source_type.dart';
|
|||||||
part 'model/stack_create_dto.dart';
|
part 'model/stack_create_dto.dart';
|
||||||
part 'model/stack_response_dto.dart';
|
part 'model/stack_response_dto.dart';
|
||||||
part 'model/stack_update_dto.dart';
|
part 'model/stack_update_dto.dart';
|
||||||
|
part 'model/system_config_backups_dto.dart';
|
||||||
part 'model/system_config_dto.dart';
|
part 'model/system_config_dto.dart';
|
||||||
part 'model/system_config_f_fmpeg_dto.dart';
|
part 'model/system_config_f_fmpeg_dto.dart';
|
||||||
part 'model/system_config_faces_dto.dart';
|
part 'model/system_config_faces_dto.dart';
|
||||||
|
4
mobile/openapi/lib/api_client.dart
generated
4
mobile/openapi/lib/api_client.dart
generated
@ -294,6 +294,8 @@ class ApiClient {
|
|||||||
return CreateLibraryDto.fromJson(value);
|
return CreateLibraryDto.fromJson(value);
|
||||||
case 'CreateProfileImageResponseDto':
|
case 'CreateProfileImageResponseDto':
|
||||||
return CreateProfileImageResponseDto.fromJson(value);
|
return CreateProfileImageResponseDto.fromJson(value);
|
||||||
|
case 'DatabaseBackupConfig':
|
||||||
|
return DatabaseBackupConfig.fromJson(value);
|
||||||
case 'DownloadArchiveInfo':
|
case 'DownloadArchiveInfo':
|
||||||
return DownloadArchiveInfo.fromJson(value);
|
return DownloadArchiveInfo.fromJson(value);
|
||||||
case 'DownloadInfoDto':
|
case 'DownloadInfoDto':
|
||||||
@ -508,6 +510,8 @@ class ApiClient {
|
|||||||
return StackResponseDto.fromJson(value);
|
return StackResponseDto.fromJson(value);
|
||||||
case 'StackUpdateDto':
|
case 'StackUpdateDto':
|
||||||
return StackUpdateDto.fromJson(value);
|
return StackUpdateDto.fromJson(value);
|
||||||
|
case 'SystemConfigBackupsDto':
|
||||||
|
return SystemConfigBackupsDto.fromJson(value);
|
||||||
case 'SystemConfigDto':
|
case 'SystemConfigDto':
|
||||||
return SystemConfigDto.fromJson(value);
|
return SystemConfigDto.fromJson(value);
|
||||||
case 'SystemConfigFFmpegDto':
|
case 'SystemConfigFFmpegDto':
|
||||||
|
@ -14,6 +14,7 @@ class AllJobStatusResponseDto {
|
|||||||
/// Returns a new [AllJobStatusResponseDto] instance.
|
/// Returns a new [AllJobStatusResponseDto] instance.
|
||||||
AllJobStatusResponseDto({
|
AllJobStatusResponseDto({
|
||||||
required this.backgroundTask,
|
required this.backgroundTask,
|
||||||
|
required this.backupDatabase,
|
||||||
required this.duplicateDetection,
|
required this.duplicateDetection,
|
||||||
required this.faceDetection,
|
required this.faceDetection,
|
||||||
required this.facialRecognition,
|
required this.facialRecognition,
|
||||||
@ -31,6 +32,8 @@ class AllJobStatusResponseDto {
|
|||||||
|
|
||||||
JobStatusDto backgroundTask;
|
JobStatusDto backgroundTask;
|
||||||
|
|
||||||
|
JobStatusDto backupDatabase;
|
||||||
|
|
||||||
JobStatusDto duplicateDetection;
|
JobStatusDto duplicateDetection;
|
||||||
|
|
||||||
JobStatusDto faceDetection;
|
JobStatusDto faceDetection;
|
||||||
@ -60,6 +63,7 @@ class AllJobStatusResponseDto {
|
|||||||
@override
|
@override
|
||||||
bool operator ==(Object other) => identical(this, other) || other is AllJobStatusResponseDto &&
|
bool operator ==(Object other) => identical(this, other) || other is AllJobStatusResponseDto &&
|
||||||
other.backgroundTask == backgroundTask &&
|
other.backgroundTask == backgroundTask &&
|
||||||
|
other.backupDatabase == backupDatabase &&
|
||||||
other.duplicateDetection == duplicateDetection &&
|
other.duplicateDetection == duplicateDetection &&
|
||||||
other.faceDetection == faceDetection &&
|
other.faceDetection == faceDetection &&
|
||||||
other.facialRecognition == facialRecognition &&
|
other.facialRecognition == facialRecognition &&
|
||||||
@ -78,6 +82,7 @@ class AllJobStatusResponseDto {
|
|||||||
int get hashCode =>
|
int get hashCode =>
|
||||||
// ignore: unnecessary_parenthesis
|
// ignore: unnecessary_parenthesis
|
||||||
(backgroundTask.hashCode) +
|
(backgroundTask.hashCode) +
|
||||||
|
(backupDatabase.hashCode) +
|
||||||
(duplicateDetection.hashCode) +
|
(duplicateDetection.hashCode) +
|
||||||
(faceDetection.hashCode) +
|
(faceDetection.hashCode) +
|
||||||
(facialRecognition.hashCode) +
|
(facialRecognition.hashCode) +
|
||||||
@ -93,11 +98,12 @@ class AllJobStatusResponseDto {
|
|||||||
(videoConversion.hashCode);
|
(videoConversion.hashCode);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String toString() => 'AllJobStatusResponseDto[backgroundTask=$backgroundTask, duplicateDetection=$duplicateDetection, faceDetection=$faceDetection, facialRecognition=$facialRecognition, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
|
String toString() => 'AllJobStatusResponseDto[backgroundTask=$backgroundTask, backupDatabase=$backupDatabase, duplicateDetection=$duplicateDetection, faceDetection=$faceDetection, facialRecognition=$facialRecognition, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
final json = <String, dynamic>{};
|
final json = <String, dynamic>{};
|
||||||
json[r'backgroundTask'] = this.backgroundTask;
|
json[r'backgroundTask'] = this.backgroundTask;
|
||||||
|
json[r'backupDatabase'] = this.backupDatabase;
|
||||||
json[r'duplicateDetection'] = this.duplicateDetection;
|
json[r'duplicateDetection'] = this.duplicateDetection;
|
||||||
json[r'faceDetection'] = this.faceDetection;
|
json[r'faceDetection'] = this.faceDetection;
|
||||||
json[r'facialRecognition'] = this.facialRecognition;
|
json[r'facialRecognition'] = this.facialRecognition;
|
||||||
@ -124,6 +130,7 @@ class AllJobStatusResponseDto {
|
|||||||
|
|
||||||
return AllJobStatusResponseDto(
|
return AllJobStatusResponseDto(
|
||||||
backgroundTask: JobStatusDto.fromJson(json[r'backgroundTask'])!,
|
backgroundTask: JobStatusDto.fromJson(json[r'backgroundTask'])!,
|
||||||
|
backupDatabase: JobStatusDto.fromJson(json[r'backupDatabase'])!,
|
||||||
duplicateDetection: JobStatusDto.fromJson(json[r'duplicateDetection'])!,
|
duplicateDetection: JobStatusDto.fromJson(json[r'duplicateDetection'])!,
|
||||||
faceDetection: JobStatusDto.fromJson(json[r'faceDetection'])!,
|
faceDetection: JobStatusDto.fromJson(json[r'faceDetection'])!,
|
||||||
facialRecognition: JobStatusDto.fromJson(json[r'facialRecognition'])!,
|
facialRecognition: JobStatusDto.fromJson(json[r'facialRecognition'])!,
|
||||||
@ -185,6 +192,7 @@ class AllJobStatusResponseDto {
|
|||||||
/// The list of required keys that must be present in a JSON.
|
/// The list of required keys that must be present in a JSON.
|
||||||
static const requiredKeys = <String>{
|
static const requiredKeys = <String>{
|
||||||
'backgroundTask',
|
'backgroundTask',
|
||||||
|
'backupDatabase',
|
||||||
'duplicateDetection',
|
'duplicateDetection',
|
||||||
'faceDetection',
|
'faceDetection',
|
||||||
'facialRecognition',
|
'facialRecognition',
|
||||||
|
116
mobile/openapi/lib/model/database_backup_config.dart
generated
Normal file
116
mobile/openapi/lib/model/database_backup_config.dart
generated
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
//
|
||||||
|
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||||
|
//
|
||||||
|
// @dart=2.18
|
||||||
|
|
||||||
|
// ignore_for_file: unused_element, unused_import
|
||||||
|
// ignore_for_file: always_put_required_named_parameters_first
|
||||||
|
// ignore_for_file: constant_identifier_names
|
||||||
|
// ignore_for_file: lines_longer_than_80_chars
|
||||||
|
|
||||||
|
part of openapi.api;
|
||||||
|
|
||||||
|
class DatabaseBackupConfig {
|
||||||
|
/// Returns a new [DatabaseBackupConfig] instance.
|
||||||
|
DatabaseBackupConfig({
|
||||||
|
required this.cronExpression,
|
||||||
|
required this.enabled,
|
||||||
|
required this.keepLastAmount,
|
||||||
|
});
|
||||||
|
|
||||||
|
String cronExpression;
|
||||||
|
|
||||||
|
bool enabled;
|
||||||
|
|
||||||
|
/// Minimum value: 1
|
||||||
|
num keepLastAmount;
|
||||||
|
|
||||||
|
@override
|
||||||
|
bool operator ==(Object other) => identical(this, other) || other is DatabaseBackupConfig &&
|
||||||
|
other.cronExpression == cronExpression &&
|
||||||
|
other.enabled == enabled &&
|
||||||
|
other.keepLastAmount == keepLastAmount;
|
||||||
|
|
||||||
|
@override
|
||||||
|
int get hashCode =>
|
||||||
|
// ignore: unnecessary_parenthesis
|
||||||
|
(cronExpression.hashCode) +
|
||||||
|
(enabled.hashCode) +
|
||||||
|
(keepLastAmount.hashCode);
|
||||||
|
|
||||||
|
@override
|
||||||
|
String toString() => 'DatabaseBackupConfig[cronExpression=$cronExpression, enabled=$enabled, keepLastAmount=$keepLastAmount]';
|
||||||
|
|
||||||
|
Map<String, dynamic> toJson() {
|
||||||
|
final json = <String, dynamic>{};
|
||||||
|
json[r'cronExpression'] = this.cronExpression;
|
||||||
|
json[r'enabled'] = this.enabled;
|
||||||
|
json[r'keepLastAmount'] = this.keepLastAmount;
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a new [DatabaseBackupConfig] instance and imports its values from
|
||||||
|
/// [value] if it's a [Map], null otherwise.
|
||||||
|
// ignore: prefer_constructors_over_static_methods
|
||||||
|
static DatabaseBackupConfig? fromJson(dynamic value) {
|
||||||
|
upgradeDto(value, "DatabaseBackupConfig");
|
||||||
|
if (value is Map) {
|
||||||
|
final json = value.cast<String, dynamic>();
|
||||||
|
|
||||||
|
return DatabaseBackupConfig(
|
||||||
|
cronExpression: mapValueOfType<String>(json, r'cronExpression')!,
|
||||||
|
enabled: mapValueOfType<bool>(json, r'enabled')!,
|
||||||
|
keepLastAmount: num.parse('${json[r'keepLastAmount']}'),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
static List<DatabaseBackupConfig> listFromJson(dynamic json, {bool growable = false,}) {
|
||||||
|
final result = <DatabaseBackupConfig>[];
|
||||||
|
if (json is List && json.isNotEmpty) {
|
||||||
|
for (final row in json) {
|
||||||
|
final value = DatabaseBackupConfig.fromJson(row);
|
||||||
|
if (value != null) {
|
||||||
|
result.add(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result.toList(growable: growable);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Map<String, DatabaseBackupConfig> mapFromJson(dynamic json) {
|
||||||
|
final map = <String, DatabaseBackupConfig>{};
|
||||||
|
if (json is Map && json.isNotEmpty) {
|
||||||
|
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||||
|
for (final entry in json.entries) {
|
||||||
|
final value = DatabaseBackupConfig.fromJson(entry.value);
|
||||||
|
if (value != null) {
|
||||||
|
map[entry.key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
// maps a json object with a list of DatabaseBackupConfig-objects as value to a dart map
|
||||||
|
static Map<String, List<DatabaseBackupConfig>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||||
|
final map = <String, List<DatabaseBackupConfig>>{};
|
||||||
|
if (json is Map && json.isNotEmpty) {
|
||||||
|
// ignore: parameter_assignments
|
||||||
|
json = json.cast<String, dynamic>();
|
||||||
|
for (final entry in json.entries) {
|
||||||
|
map[entry.key] = DatabaseBackupConfig.listFromJson(entry.value, growable: growable,);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The list of required keys that must be present in a JSON.
|
||||||
|
static const requiredKeys = <String>{
|
||||||
|
'cronExpression',
|
||||||
|
'enabled',
|
||||||
|
'keepLastAmount',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
3
mobile/openapi/lib/model/job_name.dart
generated
3
mobile/openapi/lib/model/job_name.dart
generated
@ -37,6 +37,7 @@ class JobName {
|
|||||||
static const sidecar = JobName._(r'sidecar');
|
static const sidecar = JobName._(r'sidecar');
|
||||||
static const library_ = JobName._(r'library');
|
static const library_ = JobName._(r'library');
|
||||||
static const notifications = JobName._(r'notifications');
|
static const notifications = JobName._(r'notifications');
|
||||||
|
static const backupDatabase = JobName._(r'backupDatabase');
|
||||||
|
|
||||||
/// List of all possible values in this [enum][JobName].
|
/// List of all possible values in this [enum][JobName].
|
||||||
static const values = <JobName>[
|
static const values = <JobName>[
|
||||||
@ -54,6 +55,7 @@ class JobName {
|
|||||||
sidecar,
|
sidecar,
|
||||||
library_,
|
library_,
|
||||||
notifications,
|
notifications,
|
||||||
|
backupDatabase,
|
||||||
];
|
];
|
||||||
|
|
||||||
static JobName? fromJson(dynamic value) => JobNameTypeTransformer().decode(value);
|
static JobName? fromJson(dynamic value) => JobNameTypeTransformer().decode(value);
|
||||||
@ -106,6 +108,7 @@ class JobNameTypeTransformer {
|
|||||||
case r'sidecar': return JobName.sidecar;
|
case r'sidecar': return JobName.sidecar;
|
||||||
case r'library': return JobName.library_;
|
case r'library': return JobName.library_;
|
||||||
case r'notifications': return JobName.notifications;
|
case r'notifications': return JobName.notifications;
|
||||||
|
case r'backupDatabase': return JobName.backupDatabase;
|
||||||
default:
|
default:
|
||||||
if (!allowNull) {
|
if (!allowNull) {
|
||||||
throw ArgumentError('Unknown enum value to decode: $data');
|
throw ArgumentError('Unknown enum value to decode: $data');
|
||||||
|
99
mobile/openapi/lib/model/system_config_backups_dto.dart
generated
Normal file
99
mobile/openapi/lib/model/system_config_backups_dto.dart
generated
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
//
|
||||||
|
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||||
|
//
|
||||||
|
// @dart=2.18
|
||||||
|
|
||||||
|
// ignore_for_file: unused_element, unused_import
|
||||||
|
// ignore_for_file: always_put_required_named_parameters_first
|
||||||
|
// ignore_for_file: constant_identifier_names
|
||||||
|
// ignore_for_file: lines_longer_than_80_chars
|
||||||
|
|
||||||
|
part of openapi.api;
|
||||||
|
|
||||||
|
class SystemConfigBackupsDto {
|
||||||
|
/// Returns a new [SystemConfigBackupsDto] instance.
|
||||||
|
SystemConfigBackupsDto({
|
||||||
|
required this.database,
|
||||||
|
});
|
||||||
|
|
||||||
|
DatabaseBackupConfig database;
|
||||||
|
|
||||||
|
@override
|
||||||
|
bool operator ==(Object other) => identical(this, other) || other is SystemConfigBackupsDto &&
|
||||||
|
other.database == database;
|
||||||
|
|
||||||
|
@override
|
||||||
|
int get hashCode =>
|
||||||
|
// ignore: unnecessary_parenthesis
|
||||||
|
(database.hashCode);
|
||||||
|
|
||||||
|
@override
|
||||||
|
String toString() => 'SystemConfigBackupsDto[database=$database]';
|
||||||
|
|
||||||
|
Map<String, dynamic> toJson() {
|
||||||
|
final json = <String, dynamic>{};
|
||||||
|
json[r'database'] = this.database;
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a new [SystemConfigBackupsDto] instance and imports its values from
|
||||||
|
/// [value] if it's a [Map], null otherwise.
|
||||||
|
// ignore: prefer_constructors_over_static_methods
|
||||||
|
static SystemConfigBackupsDto? fromJson(dynamic value) {
|
||||||
|
upgradeDto(value, "SystemConfigBackupsDto");
|
||||||
|
if (value is Map) {
|
||||||
|
final json = value.cast<String, dynamic>();
|
||||||
|
|
||||||
|
return SystemConfigBackupsDto(
|
||||||
|
database: DatabaseBackupConfig.fromJson(json[r'database'])!,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
static List<SystemConfigBackupsDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||||
|
final result = <SystemConfigBackupsDto>[];
|
||||||
|
if (json is List && json.isNotEmpty) {
|
||||||
|
for (final row in json) {
|
||||||
|
final value = SystemConfigBackupsDto.fromJson(row);
|
||||||
|
if (value != null) {
|
||||||
|
result.add(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result.toList(growable: growable);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Map<String, SystemConfigBackupsDto> mapFromJson(dynamic json) {
|
||||||
|
final map = <String, SystemConfigBackupsDto>{};
|
||||||
|
if (json is Map && json.isNotEmpty) {
|
||||||
|
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||||
|
for (final entry in json.entries) {
|
||||||
|
final value = SystemConfigBackupsDto.fromJson(entry.value);
|
||||||
|
if (value != null) {
|
||||||
|
map[entry.key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
// maps a json object with a list of SystemConfigBackupsDto-objects as value to a dart map
|
||||||
|
static Map<String, List<SystemConfigBackupsDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||||
|
final map = <String, List<SystemConfigBackupsDto>>{};
|
||||||
|
if (json is Map && json.isNotEmpty) {
|
||||||
|
// ignore: parameter_assignments
|
||||||
|
json = json.cast<String, dynamic>();
|
||||||
|
for (final entry in json.entries) {
|
||||||
|
map[entry.key] = SystemConfigBackupsDto.listFromJson(entry.value, growable: growable,);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The list of required keys that must be present in a JSON.
|
||||||
|
static const requiredKeys = <String>{
|
||||||
|
'database',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
10
mobile/openapi/lib/model/system_config_dto.dart
generated
10
mobile/openapi/lib/model/system_config_dto.dart
generated
@ -13,6 +13,7 @@ part of openapi.api;
|
|||||||
class SystemConfigDto {
|
class SystemConfigDto {
|
||||||
/// Returns a new [SystemConfigDto] instance.
|
/// Returns a new [SystemConfigDto] instance.
|
||||||
SystemConfigDto({
|
SystemConfigDto({
|
||||||
|
required this.backup,
|
||||||
required this.ffmpeg,
|
required this.ffmpeg,
|
||||||
required this.image,
|
required this.image,
|
||||||
required this.job,
|
required this.job,
|
||||||
@ -33,6 +34,8 @@ class SystemConfigDto {
|
|||||||
required this.user,
|
required this.user,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
SystemConfigBackupsDto backup;
|
||||||
|
|
||||||
SystemConfigFFmpegDto ffmpeg;
|
SystemConfigFFmpegDto ffmpeg;
|
||||||
|
|
||||||
SystemConfigImageDto image;
|
SystemConfigImageDto image;
|
||||||
@ -71,6 +74,7 @@ class SystemConfigDto {
|
|||||||
|
|
||||||
@override
|
@override
|
||||||
bool operator ==(Object other) => identical(this, other) || other is SystemConfigDto &&
|
bool operator ==(Object other) => identical(this, other) || other is SystemConfigDto &&
|
||||||
|
other.backup == backup &&
|
||||||
other.ffmpeg == ffmpeg &&
|
other.ffmpeg == ffmpeg &&
|
||||||
other.image == image &&
|
other.image == image &&
|
||||||
other.job == job &&
|
other.job == job &&
|
||||||
@ -93,6 +97,7 @@ class SystemConfigDto {
|
|||||||
@override
|
@override
|
||||||
int get hashCode =>
|
int get hashCode =>
|
||||||
// ignore: unnecessary_parenthesis
|
// ignore: unnecessary_parenthesis
|
||||||
|
(backup.hashCode) +
|
||||||
(ffmpeg.hashCode) +
|
(ffmpeg.hashCode) +
|
||||||
(image.hashCode) +
|
(image.hashCode) +
|
||||||
(job.hashCode) +
|
(job.hashCode) +
|
||||||
@ -113,10 +118,11 @@ class SystemConfigDto {
|
|||||||
(user.hashCode);
|
(user.hashCode);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String toString() => 'SystemConfigDto[ffmpeg=$ffmpeg, image=$image, job=$job, library_=$library_, logging=$logging, machineLearning=$machineLearning, map=$map, metadata=$metadata, newVersionCheck=$newVersionCheck, notifications=$notifications, oauth=$oauth, passwordLogin=$passwordLogin, reverseGeocoding=$reverseGeocoding, server=$server, storageTemplate=$storageTemplate, theme=$theme, trash=$trash, user=$user]';
|
String toString() => 'SystemConfigDto[backup=$backup, ffmpeg=$ffmpeg, image=$image, job=$job, library_=$library_, logging=$logging, machineLearning=$machineLearning, map=$map, metadata=$metadata, newVersionCheck=$newVersionCheck, notifications=$notifications, oauth=$oauth, passwordLogin=$passwordLogin, reverseGeocoding=$reverseGeocoding, server=$server, storageTemplate=$storageTemplate, theme=$theme, trash=$trash, user=$user]';
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
final json = <String, dynamic>{};
|
final json = <String, dynamic>{};
|
||||||
|
json[r'backup'] = this.backup;
|
||||||
json[r'ffmpeg'] = this.ffmpeg;
|
json[r'ffmpeg'] = this.ffmpeg;
|
||||||
json[r'image'] = this.image;
|
json[r'image'] = this.image;
|
||||||
json[r'job'] = this.job;
|
json[r'job'] = this.job;
|
||||||
@ -147,6 +153,7 @@ class SystemConfigDto {
|
|||||||
final json = value.cast<String, dynamic>();
|
final json = value.cast<String, dynamic>();
|
||||||
|
|
||||||
return SystemConfigDto(
|
return SystemConfigDto(
|
||||||
|
backup: SystemConfigBackupsDto.fromJson(json[r'backup'])!,
|
||||||
ffmpeg: SystemConfigFFmpegDto.fromJson(json[r'ffmpeg'])!,
|
ffmpeg: SystemConfigFFmpegDto.fromJson(json[r'ffmpeg'])!,
|
||||||
image: SystemConfigImageDto.fromJson(json[r'image'])!,
|
image: SystemConfigImageDto.fromJson(json[r'image'])!,
|
||||||
job: SystemConfigJobDto.fromJson(json[r'job'])!,
|
job: SystemConfigJobDto.fromJson(json[r'job'])!,
|
||||||
@ -212,6 +219,7 @@ class SystemConfigDto {
|
|||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
/// The list of required keys that must be present in a JSON.
|
||||||
static const requiredKeys = <String>{
|
static const requiredKeys = <String>{
|
||||||
|
'backup',
|
||||||
'ffmpeg',
|
'ffmpeg',
|
||||||
'image',
|
'image',
|
||||||
'job',
|
'job',
|
||||||
|
@ -7745,6 +7745,9 @@
|
|||||||
"backgroundTask": {
|
"backgroundTask": {
|
||||||
"$ref": "#/components/schemas/JobStatusDto"
|
"$ref": "#/components/schemas/JobStatusDto"
|
||||||
},
|
},
|
||||||
|
"backupDatabase": {
|
||||||
|
"$ref": "#/components/schemas/JobStatusDto"
|
||||||
|
},
|
||||||
"duplicateDetection": {
|
"duplicateDetection": {
|
||||||
"$ref": "#/components/schemas/JobStatusDto"
|
"$ref": "#/components/schemas/JobStatusDto"
|
||||||
},
|
},
|
||||||
@ -7787,6 +7790,7 @@
|
|||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"backgroundTask",
|
"backgroundTask",
|
||||||
|
"backupDatabase",
|
||||||
"duplicateDetection",
|
"duplicateDetection",
|
||||||
"faceDetection",
|
"faceDetection",
|
||||||
"facialRecognition",
|
"facialRecognition",
|
||||||
@ -8754,6 +8758,26 @@
|
|||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
|
"DatabaseBackupConfig": {
|
||||||
|
"properties": {
|
||||||
|
"cronExpression": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"enabled": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"keepLastAmount": {
|
||||||
|
"minimum": 1,
|
||||||
|
"type": "number"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"cronExpression",
|
||||||
|
"enabled",
|
||||||
|
"keepLastAmount"
|
||||||
|
],
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"DownloadArchiveInfo": {
|
"DownloadArchiveInfo": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"assetIds": {
|
"assetIds": {
|
||||||
@ -9289,7 +9313,8 @@
|
|||||||
"search",
|
"search",
|
||||||
"sidecar",
|
"sidecar",
|
||||||
"library",
|
"library",
|
||||||
"notifications"
|
"notifications",
|
||||||
|
"backupDatabase"
|
||||||
],
|
],
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
@ -11456,8 +11481,22 @@
|
|||||||
},
|
},
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
|
"SystemConfigBackupsDto": {
|
||||||
|
"properties": {
|
||||||
|
"database": {
|
||||||
|
"$ref": "#/components/schemas/DatabaseBackupConfig"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"database"
|
||||||
|
],
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"SystemConfigDto": {
|
"SystemConfigDto": {
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"backup": {
|
||||||
|
"$ref": "#/components/schemas/SystemConfigBackupsDto"
|
||||||
|
},
|
||||||
"ffmpeg": {
|
"ffmpeg": {
|
||||||
"$ref": "#/components/schemas/SystemConfigFFmpegDto"
|
"$ref": "#/components/schemas/SystemConfigFFmpegDto"
|
||||||
},
|
},
|
||||||
@ -11514,6 +11553,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
|
"backup",
|
||||||
"ffmpeg",
|
"ffmpeg",
|
||||||
"image",
|
"image",
|
||||||
"job",
|
"job",
|
||||||
|
@ -535,6 +535,7 @@ export type JobStatusDto = {
|
|||||||
};
|
};
|
||||||
export type AllJobStatusResponseDto = {
|
export type AllJobStatusResponseDto = {
|
||||||
backgroundTask: JobStatusDto;
|
backgroundTask: JobStatusDto;
|
||||||
|
backupDatabase: JobStatusDto;
|
||||||
duplicateDetection: JobStatusDto;
|
duplicateDetection: JobStatusDto;
|
||||||
faceDetection: JobStatusDto;
|
faceDetection: JobStatusDto;
|
||||||
facialRecognition: JobStatusDto;
|
facialRecognition: JobStatusDto;
|
||||||
@ -1084,6 +1085,14 @@ export type AssetFullSyncDto = {
|
|||||||
updatedUntil: string;
|
updatedUntil: string;
|
||||||
userId?: string;
|
userId?: string;
|
||||||
};
|
};
|
||||||
|
export type DatabaseBackupConfig = {
|
||||||
|
cronExpression: string;
|
||||||
|
enabled: boolean;
|
||||||
|
keepLastAmount: number;
|
||||||
|
};
|
||||||
|
export type SystemConfigBackupsDto = {
|
||||||
|
database: DatabaseBackupConfig;
|
||||||
|
};
|
||||||
export type SystemConfigFFmpegDto = {
|
export type SystemConfigFFmpegDto = {
|
||||||
accel: TranscodeHWAccel;
|
accel: TranscodeHWAccel;
|
||||||
accelDecode: boolean;
|
accelDecode: boolean;
|
||||||
@ -1232,6 +1241,7 @@ export type SystemConfigUserDto = {
|
|||||||
deleteDelay: number;
|
deleteDelay: number;
|
||||||
};
|
};
|
||||||
export type SystemConfigDto = {
|
export type SystemConfigDto = {
|
||||||
|
backup: SystemConfigBackupsDto;
|
||||||
ffmpeg: SystemConfigFFmpegDto;
|
ffmpeg: SystemConfigFFmpegDto;
|
||||||
image: SystemConfigImageDto;
|
image: SystemConfigImageDto;
|
||||||
job: SystemConfigJobDto;
|
job: SystemConfigJobDto;
|
||||||
@ -3445,7 +3455,8 @@ export enum JobName {
|
|||||||
Search = "search",
|
Search = "search",
|
||||||
Sidecar = "sidecar",
|
Sidecar = "sidecar",
|
||||||
Library = "library",
|
Library = "library",
|
||||||
Notifications = "notifications"
|
Notifications = "notifications",
|
||||||
|
BackupDatabase = "backupDatabase"
|
||||||
}
|
}
|
||||||
export enum JobCommand {
|
export enum JobCommand {
|
||||||
Start = "start",
|
Start = "start",
|
||||||
|
@ -15,6 +15,13 @@ import { ConcurrentQueueName, QueueName } from 'src/interfaces/job.interface';
|
|||||||
import { ImageOptions } from 'src/interfaces/media.interface';
|
import { ImageOptions } from 'src/interfaces/media.interface';
|
||||||
|
|
||||||
export interface SystemConfig {
|
export interface SystemConfig {
|
||||||
|
backup: {
|
||||||
|
database: {
|
||||||
|
enabled: boolean;
|
||||||
|
cronExpression: string;
|
||||||
|
keepLastAmount: number;
|
||||||
|
};
|
||||||
|
};
|
||||||
ffmpeg: {
|
ffmpeg: {
|
||||||
crf: number;
|
crf: number;
|
||||||
threads: number;
|
threads: number;
|
||||||
@ -150,6 +157,13 @@ export interface SystemConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const defaults = Object.freeze<SystemConfig>({
|
export const defaults = Object.freeze<SystemConfig>({
|
||||||
|
backup: {
|
||||||
|
database: {
|
||||||
|
enabled: true,
|
||||||
|
cronExpression: CronExpression.EVERY_DAY_AT_2AM,
|
||||||
|
keepLastAmount: 14,
|
||||||
|
},
|
||||||
|
},
|
||||||
ffmpeg: {
|
ffmpeg: {
|
||||||
crf: 23,
|
crf: 23,
|
||||||
threads: 0,
|
threads: 0,
|
||||||
|
@ -97,4 +97,7 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
|
|||||||
|
|
||||||
@ApiProperty({ type: JobStatusDto })
|
@ApiProperty({ type: JobStatusDto })
|
||||||
[QueueName.NOTIFICATION]!: JobStatusDto;
|
[QueueName.NOTIFICATION]!: JobStatusDto;
|
||||||
|
|
||||||
|
@ApiProperty({ type: JobStatusDto })
|
||||||
|
[QueueName.BACKUP_DATABASE]!: JobStatusDto;
|
||||||
}
|
}
|
||||||
|
@ -46,6 +46,30 @@ const isLibraryScanEnabled = (config: SystemConfigLibraryScanDto) => config.enab
|
|||||||
const isOAuthEnabled = (config: SystemConfigOAuthDto) => config.enabled;
|
const isOAuthEnabled = (config: SystemConfigOAuthDto) => config.enabled;
|
||||||
const isOAuthOverrideEnabled = (config: SystemConfigOAuthDto) => config.mobileOverrideEnabled;
|
const isOAuthOverrideEnabled = (config: SystemConfigOAuthDto) => config.mobileOverrideEnabled;
|
||||||
const isEmailNotificationEnabled = (config: SystemConfigSmtpDto) => config.enabled;
|
const isEmailNotificationEnabled = (config: SystemConfigSmtpDto) => config.enabled;
|
||||||
|
const isDatabaseBackupEnabled = (config: DatabaseBackupConfig) => config.enabled;
|
||||||
|
|
||||||
|
export class DatabaseBackupConfig {
|
||||||
|
@ValidateBoolean()
|
||||||
|
enabled!: boolean;
|
||||||
|
|
||||||
|
@ValidateIf(isDatabaseBackupEnabled)
|
||||||
|
@IsNotEmpty()
|
||||||
|
@Validate(CronValidator, { message: 'Invalid cron expression' })
|
||||||
|
@IsString()
|
||||||
|
cronExpression!: string;
|
||||||
|
|
||||||
|
@IsInt()
|
||||||
|
@IsPositive()
|
||||||
|
@IsNotEmpty()
|
||||||
|
keepLastAmount!: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SystemConfigBackupsDto {
|
||||||
|
@Type(() => DatabaseBackupConfig)
|
||||||
|
@ValidateNested()
|
||||||
|
@IsObject()
|
||||||
|
database!: DatabaseBackupConfig;
|
||||||
|
}
|
||||||
|
|
||||||
export class SystemConfigFFmpegDto {
|
export class SystemConfigFFmpegDto {
|
||||||
@IsInt()
|
@IsInt()
|
||||||
@ -531,6 +555,11 @@ class SystemConfigUserDto {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class SystemConfigDto implements SystemConfig {
|
export class SystemConfigDto implements SystemConfig {
|
||||||
|
@Type(() => SystemConfigBackupsDto)
|
||||||
|
@ValidateNested()
|
||||||
|
@IsObject()
|
||||||
|
backup!: SystemConfigBackupsDto;
|
||||||
|
|
||||||
@Type(() => SystemConfigFFmpegDto)
|
@Type(() => SystemConfigFFmpegDto)
|
||||||
@ValidateNested()
|
@ValidateNested()
|
||||||
@IsObject()
|
@IsObject()
|
||||||
|
@ -181,6 +181,7 @@ export enum StorageFolder {
|
|||||||
UPLOAD = 'upload',
|
UPLOAD = 'upload',
|
||||||
PROFILE = 'profile',
|
PROFILE = 'profile',
|
||||||
THUMBNAILS = 'thumbs',
|
THUMBNAILS = 'thumbs',
|
||||||
|
BACKUPS = 'backups',
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum SystemMetadataKey {
|
export enum SystemMetadataKey {
|
||||||
|
@ -37,6 +37,7 @@ export enum DatabaseLock {
|
|||||||
CLIPDimSize = 512,
|
CLIPDimSize = 512,
|
||||||
Library = 1337,
|
Library = 1337,
|
||||||
GetSystemConfig = 69,
|
GetSystemConfig = 69,
|
||||||
|
BackupDatabase = 42,
|
||||||
}
|
}
|
||||||
|
|
||||||
export const EXTENSION_NAMES: Record<DatabaseExtension, string> = {
|
export const EXTENSION_NAMES: Record<DatabaseExtension, string> = {
|
||||||
|
@ -15,11 +15,15 @@ export enum QueueName {
|
|||||||
SIDECAR = 'sidecar',
|
SIDECAR = 'sidecar',
|
||||||
LIBRARY = 'library',
|
LIBRARY = 'library',
|
||||||
NOTIFICATION = 'notifications',
|
NOTIFICATION = 'notifications',
|
||||||
|
BACKUP_DATABASE = 'backupDatabase',
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ConcurrentQueueName = Exclude<
|
export type ConcurrentQueueName = Exclude<
|
||||||
QueueName,
|
QueueName,
|
||||||
QueueName.STORAGE_TEMPLATE_MIGRATION | QueueName.FACIAL_RECOGNITION | QueueName.DUPLICATE_DETECTION
|
| QueueName.STORAGE_TEMPLATE_MIGRATION
|
||||||
|
| QueueName.FACIAL_RECOGNITION
|
||||||
|
| QueueName.DUPLICATE_DETECTION
|
||||||
|
| QueueName.BACKUP_DATABASE
|
||||||
>;
|
>;
|
||||||
|
|
||||||
export enum JobCommand {
|
export enum JobCommand {
|
||||||
@ -31,6 +35,9 @@ export enum JobCommand {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export enum JobName {
|
export enum JobName {
|
||||||
|
//backups
|
||||||
|
BACKUP_DATABASE = 'database-backup',
|
||||||
|
|
||||||
// conversion
|
// conversion
|
||||||
QUEUE_VIDEO_CONVERSION = 'queue-video-conversion',
|
QUEUE_VIDEO_CONVERSION = 'queue-video-conversion',
|
||||||
VIDEO_CONVERSION = 'video-conversion',
|
VIDEO_CONVERSION = 'video-conversion',
|
||||||
@ -209,6 +216,9 @@ export enum QueueCleanType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export type JobItem =
|
export type JobItem =
|
||||||
|
// Backups
|
||||||
|
| { name: JobName.BACKUP_DATABASE; data?: IBaseJob }
|
||||||
|
|
||||||
// Transcoding
|
// Transcoding
|
||||||
| { name: JobName.QUEUE_VIDEO_CONVERSION; data: IBaseJob }
|
| { name: JobName.QUEUE_VIDEO_CONVERSION; data: IBaseJob }
|
||||||
| { name: JobName.VIDEO_CONVERSION; data: IEntityJob }
|
| { name: JobName.VIDEO_CONVERSION; data: IEntityJob }
|
||||||
|
25
server/src/interfaces/process.interface.ts
Normal file
25
server/src/interfaces/process.interface.ts
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import { ChildProcessWithoutNullStreams, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||||
|
import { Readable } from 'node:stream';
|
||||||
|
|
||||||
|
export interface ImmichReadStream {
|
||||||
|
stream: Readable;
|
||||||
|
type?: string;
|
||||||
|
length?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImmichZipStream extends ImmichReadStream {
|
||||||
|
addFile: (inputPath: string, filename: string) => void;
|
||||||
|
finalize: () => Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DiskUsage {
|
||||||
|
available: number;
|
||||||
|
free: number;
|
||||||
|
total: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const IProcessRepository = 'IProcessRepository';
|
||||||
|
|
||||||
|
export interface IProcessRepository {
|
||||||
|
spawn(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams;
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
import { WatchOptions } from 'chokidar';
|
import { WatchOptions } from 'chokidar';
|
||||||
import { Stats } from 'node:fs';
|
import { Stats } from 'node:fs';
|
||||||
import { FileReadOptions } from 'node:fs/promises';
|
import { FileReadOptions } from 'node:fs/promises';
|
||||||
import { Readable } from 'node:stream';
|
import { Readable, Writable } from 'node:stream';
|
||||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||||
|
|
||||||
export interface ImmichReadStream {
|
export interface ImmichReadStream {
|
||||||
@ -36,6 +36,7 @@ export interface IStorageRepository {
|
|||||||
createReadStream(filepath: string, mimeType?: string | null): Promise<ImmichReadStream>;
|
createReadStream(filepath: string, mimeType?: string | null): Promise<ImmichReadStream>;
|
||||||
readFile(filepath: string, options?: FileReadOptions<Buffer>): Promise<Buffer>;
|
readFile(filepath: string, options?: FileReadOptions<Buffer>): Promise<Buffer>;
|
||||||
createFile(filepath: string, buffer: Buffer): Promise<void>;
|
createFile(filepath: string, buffer: Buffer): Promise<void>;
|
||||||
|
createWriteStream(filepath: string): Writable;
|
||||||
createOrOverwriteFile(filepath: string, buffer: Buffer): Promise<void>;
|
createOrOverwriteFile(filepath: string, buffer: Buffer): Promise<void>;
|
||||||
overwriteFile(filepath: string, buffer: Buffer): Promise<void>;
|
overwriteFile(filepath: string, buffer: Buffer): Promise<void>;
|
||||||
realpath(filepath: string): Promise<string>;
|
realpath(filepath: string): Promise<string>;
|
||||||
|
@ -22,6 +22,7 @@ import { INotificationRepository } from 'src/interfaces/notification.interface';
|
|||||||
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
|
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
|
||||||
import { IPartnerRepository } from 'src/interfaces/partner.interface';
|
import { IPartnerRepository } from 'src/interfaces/partner.interface';
|
||||||
import { IPersonRepository } from 'src/interfaces/person.interface';
|
import { IPersonRepository } from 'src/interfaces/person.interface';
|
||||||
|
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||||
import { ISearchRepository } from 'src/interfaces/search.interface';
|
import { ISearchRepository } from 'src/interfaces/search.interface';
|
||||||
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
|
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
|
||||||
import { ISessionRepository } from 'src/interfaces/session.interface';
|
import { ISessionRepository } from 'src/interfaces/session.interface';
|
||||||
@ -59,6 +60,7 @@ import { NotificationRepository } from 'src/repositories/notification.repository
|
|||||||
import { OAuthRepository } from 'src/repositories/oauth.repository';
|
import { OAuthRepository } from 'src/repositories/oauth.repository';
|
||||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||||
import { PersonRepository } from 'src/repositories/person.repository';
|
import { PersonRepository } from 'src/repositories/person.repository';
|
||||||
|
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||||
import { SearchRepository } from 'src/repositories/search.repository';
|
import { SearchRepository } from 'src/repositories/search.repository';
|
||||||
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
|
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
|
||||||
import { SessionRepository } from 'src/repositories/session.repository';
|
import { SessionRepository } from 'src/repositories/session.repository';
|
||||||
@ -98,6 +100,7 @@ export const repositories = [
|
|||||||
{ provide: IOAuthRepository, useClass: OAuthRepository },
|
{ provide: IOAuthRepository, useClass: OAuthRepository },
|
||||||
{ provide: IPartnerRepository, useClass: PartnerRepository },
|
{ provide: IPartnerRepository, useClass: PartnerRepository },
|
||||||
{ provide: IPersonRepository, useClass: PersonRepository },
|
{ provide: IPersonRepository, useClass: PersonRepository },
|
||||||
|
{ provide: IProcessRepository, useClass: ProcessRepository },
|
||||||
{ provide: ISearchRepository, useClass: SearchRepository },
|
{ provide: ISearchRepository, useClass: SearchRepository },
|
||||||
{ provide: IServerInfoRepository, useClass: ServerInfoRepository },
|
{ provide: IServerInfoRepository, useClass: ServerInfoRepository },
|
||||||
{ provide: ISessionRepository, useClass: SessionRepository },
|
{ provide: ISessionRepository, useClass: SessionRepository },
|
||||||
|
@ -30,6 +30,9 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
|
|||||||
[JobName.PERSON_CLEANUP]: QueueName.BACKGROUND_TASK,
|
[JobName.PERSON_CLEANUP]: QueueName.BACKGROUND_TASK,
|
||||||
[JobName.USER_SYNC_USAGE]: QueueName.BACKGROUND_TASK,
|
[JobName.USER_SYNC_USAGE]: QueueName.BACKGROUND_TASK,
|
||||||
|
|
||||||
|
// backups
|
||||||
|
[JobName.BACKUP_DATABASE]: QueueName.BACKUP_DATABASE,
|
||||||
|
|
||||||
// conversion
|
// conversion
|
||||||
[JobName.QUEUE_VIDEO_CONVERSION]: QueueName.VIDEO_CONVERSION,
|
[JobName.QUEUE_VIDEO_CONVERSION]: QueueName.VIDEO_CONVERSION,
|
||||||
[JobName.VIDEO_CONVERSION]: QueueName.VIDEO_CONVERSION,
|
[JobName.VIDEO_CONVERSION]: QueueName.VIDEO_CONVERSION,
|
||||||
|
16
server/src/repositories/process.repository.ts
Normal file
16
server/src/repositories/process.repository.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import { Inject, Injectable } from '@nestjs/common';
|
||||||
|
import { ChildProcessWithoutNullStreams, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||||
|
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||||
|
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||||
|
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class ProcessRepository implements IProcessRepository {
|
||||||
|
constructor(@Inject(ILoggerRepository) private logger: ILoggerRepository) {
|
||||||
|
this.logger.setContext(StorageRepository.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
spawn(command: string, args: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||||
|
return spawn(command, args, options);
|
||||||
|
}
|
||||||
|
}
|
@ -2,9 +2,10 @@ import { Inject, Injectable } from '@nestjs/common';
|
|||||||
import archiver from 'archiver';
|
import archiver from 'archiver';
|
||||||
import chokidar, { WatchOptions } from 'chokidar';
|
import chokidar, { WatchOptions } from 'chokidar';
|
||||||
import { escapePath, glob, globStream } from 'fast-glob';
|
import { escapePath, glob, globStream } from 'fast-glob';
|
||||||
import { constants, createReadStream, existsSync, mkdirSync } from 'node:fs';
|
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync } from 'node:fs';
|
||||||
import fs from 'node:fs/promises';
|
import fs from 'node:fs/promises';
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
|
import { Writable } from 'node:stream';
|
||||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||||
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||||
import {
|
import {
|
||||||
@ -42,6 +43,10 @@ export class StorageRepository implements IStorageRepository {
|
|||||||
return fs.writeFile(filepath, buffer, { flag: 'wx' });
|
return fs.writeFile(filepath, buffer, { flag: 'wx' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
createWriteStream(filepath: string): Writable {
|
||||||
|
return createWriteStream(filepath, { flags: 'w' });
|
||||||
|
}
|
||||||
|
|
||||||
createOrOverwriteFile(filepath: string, buffer: Buffer) {
|
createOrOverwriteFile(filepath: string, buffer: Buffer) {
|
||||||
return fs.writeFile(filepath, buffer, { flag: 'w' });
|
return fs.writeFile(filepath, buffer, { flag: 'w' });
|
||||||
}
|
}
|
||||||
|
217
server/src/services/backup.service.spec.ts
Normal file
217
server/src/services/backup.service.spec.ts
Normal file
@ -0,0 +1,217 @@
|
|||||||
|
import { PassThrough } from 'node:stream';
|
||||||
|
import { defaults, SystemConfig } from 'src/config';
|
||||||
|
import { StorageCore } from 'src/cores/storage.core';
|
||||||
|
import { ImmichWorker, StorageFolder } from 'src/enum';
|
||||||
|
import { IDatabaseRepository } from 'src/interfaces/database.interface';
|
||||||
|
import { IJobRepository, JobStatus } from 'src/interfaces/job.interface';
|
||||||
|
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||||
|
import { IStorageRepository } from 'src/interfaces/storage.interface';
|
||||||
|
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
|
||||||
|
import { BackupService } from 'src/services/backup.service';
|
||||||
|
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||||
|
import { mockSpawn, newTestService } from 'test/utils';
|
||||||
|
import { describe, Mocked } from 'vitest';
|
||||||
|
|
||||||
|
describe(BackupService.name, () => {
|
||||||
|
let sut: BackupService;
|
||||||
|
|
||||||
|
let databaseMock: Mocked<IDatabaseRepository>;
|
||||||
|
let jobMock: Mocked<IJobRepository>;
|
||||||
|
let processMock: Mocked<IProcessRepository>;
|
||||||
|
let storageMock: Mocked<IStorageRepository>;
|
||||||
|
let systemMock: Mocked<ISystemMetadataRepository>;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
({ sut, databaseMock, jobMock, processMock, storageMock, systemMock } = newTestService(BackupService));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should work', () => {
|
||||||
|
expect(sut).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('onBootstrapEvent', () => {
|
||||||
|
it('should init cron job and handle config changes', async () => {
|
||||||
|
databaseMock.tryLock.mockResolvedValue(true);
|
||||||
|
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||||
|
|
||||||
|
await sut.onBootstrap(ImmichWorker.API);
|
||||||
|
|
||||||
|
expect(jobMock.addCronJob).toHaveBeenCalled();
|
||||||
|
expect(systemMock.get).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not initialize backup database cron job when lock is taken', async () => {
|
||||||
|
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||||
|
databaseMock.tryLock.mockResolvedValue(false);
|
||||||
|
|
||||||
|
await sut.onBootstrap(ImmichWorker.API);
|
||||||
|
|
||||||
|
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not initialise backup database job when running on microservices', async () => {
|
||||||
|
await sut.onBootstrap(ImmichWorker.MICROSERVICES);
|
||||||
|
|
||||||
|
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('onConfigUpdateEvent', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
systemMock.get.mockResolvedValue(defaults);
|
||||||
|
databaseMock.tryLock.mockResolvedValue(true);
|
||||||
|
await sut.onBootstrap(ImmichWorker.API);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update cron job if backup is enabled', () => {
|
||||||
|
sut.onConfigUpdate({
|
||||||
|
oldConfig: defaults,
|
||||||
|
newConfig: {
|
||||||
|
backup: {
|
||||||
|
database: {
|
||||||
|
enabled: true,
|
||||||
|
cronExpression: '0 1 * * *',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} as SystemConfig,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(jobMock.updateCronJob).toHaveBeenCalledWith('backupDatabase', '0 1 * * *', true);
|
||||||
|
expect(jobMock.updateCronJob).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should do nothing if oldConfig is not provided', () => {
|
||||||
|
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||||
|
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should do nothing if instance does not have the backup database lock', async () => {
|
||||||
|
databaseMock.tryLock.mockResolvedValue(false);
|
||||||
|
await sut.onBootstrap(ImmichWorker.API);
|
||||||
|
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
|
||||||
|
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('onConfigValidateEvent', () => {
|
||||||
|
it('should allow a valid cron expression', () => {
|
||||||
|
expect(() =>
|
||||||
|
sut.onConfigValidate({
|
||||||
|
newConfig: { backup: { database: { cronExpression: '0 0 * * *' } } } as SystemConfig,
|
||||||
|
oldConfig: {} as SystemConfig,
|
||||||
|
}),
|
||||||
|
).not.toThrow(expect.stringContaining('Invalid cron expression'));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fail for an invalid cron expression', () => {
|
||||||
|
expect(() =>
|
||||||
|
sut.onConfigValidate({
|
||||||
|
newConfig: { backup: { database: { cronExpression: 'foo' } } } as SystemConfig,
|
||||||
|
oldConfig: {} as SystemConfig,
|
||||||
|
}),
|
||||||
|
).toThrow(/Invalid cron expression.*/);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('cleanupDatabaseBackups', () => {
|
||||||
|
it('should do nothing if not reached keepLastAmount', async () => {
|
||||||
|
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||||
|
storageMock.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz']);
|
||||||
|
await sut.cleanupDatabaseBackups();
|
||||||
|
expect(storageMock.unlink).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove failed backup files', async () => {
|
||||||
|
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||||
|
storageMock.readdir.mockResolvedValue([
|
||||||
|
'immich-db-backup-123.sql.gz.tmp',
|
||||||
|
'immich-db-backup-234.sql.gz',
|
||||||
|
'immich-db-backup-345.sql.gz.tmp',
|
||||||
|
]);
|
||||||
|
await sut.cleanupDatabaseBackups();
|
||||||
|
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
|
||||||
|
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||||
|
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-123.sql.gz.tmp`,
|
||||||
|
);
|
||||||
|
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||||
|
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-345.sql.gz.tmp`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove old backup files over keepLastAmount', async () => {
|
||||||
|
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||||
|
storageMock.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz', 'immich-db-backup-2.sql.gz']);
|
||||||
|
await sut.cleanupDatabaseBackups();
|
||||||
|
expect(storageMock.unlink).toHaveBeenCalledTimes(1);
|
||||||
|
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||||
|
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove old backup files over keepLastAmount and failed backups', async () => {
|
||||||
|
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||||
|
storageMock.readdir.mockResolvedValue([
|
||||||
|
'immich-db-backup-1.sql.gz.tmp',
|
||||||
|
'immich-db-backup-2.sql.gz',
|
||||||
|
'immich-db-backup-3.sql.gz',
|
||||||
|
]);
|
||||||
|
await sut.cleanupDatabaseBackups();
|
||||||
|
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
|
||||||
|
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||||
|
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz.tmp`,
|
||||||
|
);
|
||||||
|
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||||
|
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-2.sql.gz`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('handleBackupDatabase', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
storageMock.readdir.mockResolvedValue([]);
|
||||||
|
processMock.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||||
|
storageMock.rename.mockResolvedValue();
|
||||||
|
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||||
|
storageMock.createWriteStream.mockReturnValue(new PassThrough());
|
||||||
|
});
|
||||||
|
it('should run a database backup successfully', async () => {
|
||||||
|
const result = await sut.handleBackupDatabase();
|
||||||
|
expect(result).toBe(JobStatus.SUCCESS);
|
||||||
|
expect(storageMock.createWriteStream).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
it('should rename file on success', async () => {
|
||||||
|
const result = await sut.handleBackupDatabase();
|
||||||
|
expect(result).toBe(JobStatus.SUCCESS);
|
||||||
|
expect(storageMock.rename).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
it('should fail if pg_dumpall fails', async () => {
|
||||||
|
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||||
|
const result = await sut.handleBackupDatabase();
|
||||||
|
expect(result).toBe(JobStatus.FAILED);
|
||||||
|
});
|
||||||
|
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||||
|
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||||
|
const result = await sut.handleBackupDatabase();
|
||||||
|
expect(result).toBe(JobStatus.FAILED);
|
||||||
|
expect(storageMock.rename).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
it('should fail if gzip fails', async () => {
|
||||||
|
processMock.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
|
||||||
|
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||||
|
const result = await sut.handleBackupDatabase();
|
||||||
|
expect(result).toBe(JobStatus.FAILED);
|
||||||
|
});
|
||||||
|
it('should fail if write stream fails', async () => {
|
||||||
|
storageMock.createWriteStream.mockImplementation(() => {
|
||||||
|
throw new Error('error');
|
||||||
|
});
|
||||||
|
const result = await sut.handleBackupDatabase();
|
||||||
|
expect(result).toBe(JobStatus.FAILED);
|
||||||
|
});
|
||||||
|
it('should fail if rename fails', async () => {
|
||||||
|
storageMock.rename.mockRejectedValue(new Error('error'));
|
||||||
|
const result = await sut.handleBackupDatabase();
|
||||||
|
expect(result).toBe(JobStatus.FAILED);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
157
server/src/services/backup.service.ts
Normal file
157
server/src/services/backup.service.ts
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
import { Injectable } from '@nestjs/common';
|
||||||
|
import { default as path } from 'node:path';
|
||||||
|
import { StorageCore } from 'src/cores/storage.core';
|
||||||
|
import { OnEvent } from 'src/decorators';
|
||||||
|
import { ImmichWorker, StorageFolder } from 'src/enum';
|
||||||
|
import { DatabaseLock } from 'src/interfaces/database.interface';
|
||||||
|
import { ArgOf } from 'src/interfaces/event.interface';
|
||||||
|
import { JobName, JobStatus } from 'src/interfaces/job.interface';
|
||||||
|
import { BaseService } from 'src/services/base.service';
|
||||||
|
import { handlePromiseError } from 'src/utils/misc';
|
||||||
|
import { validateCronExpression } from 'src/validation';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class BackupService extends BaseService {
|
||||||
|
private backupLock = false;
|
||||||
|
|
||||||
|
@OnEvent({ name: 'app.bootstrap' })
|
||||||
|
async onBootstrap(workerType: ImmichWorker) {
|
||||||
|
if (workerType !== ImmichWorker.API) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const {
|
||||||
|
backup: { database },
|
||||||
|
} = await this.getConfig({ withCache: true });
|
||||||
|
|
||||||
|
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
|
||||||
|
|
||||||
|
if (this.backupLock) {
|
||||||
|
this.jobRepository.addCronJob(
|
||||||
|
'backupDatabase',
|
||||||
|
database.cronExpression,
|
||||||
|
() => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger),
|
||||||
|
database.enabled,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@OnEvent({ name: 'config.update', server: true })
|
||||||
|
onConfigUpdate({ newConfig: { backup }, oldConfig }: ArgOf<'config.update'>) {
|
||||||
|
if (!oldConfig || !this.backupLock) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.jobRepository.updateCronJob('backupDatabase', backup.database.cronExpression, backup.database.enabled);
|
||||||
|
}
|
||||||
|
|
||||||
|
@OnEvent({ name: 'config.validate' })
|
||||||
|
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
|
||||||
|
const { database } = newConfig.backup;
|
||||||
|
if (!validateCronExpression(database.cronExpression)) {
|
||||||
|
throw new Error(`Invalid cron expression ${database.cronExpression}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanupDatabaseBackups() {
|
||||||
|
this.logger.debug(`Database Backup Cleanup Started`);
|
||||||
|
const {
|
||||||
|
backup: { database: config },
|
||||||
|
} = await this.getConfig({ withCache: false });
|
||||||
|
|
||||||
|
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.BACKUPS);
|
||||||
|
const files = await this.storageRepository.readdir(backupsFolder);
|
||||||
|
const failedBackups = files.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz\.tmp$/));
|
||||||
|
const backups = files
|
||||||
|
.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz$/))
|
||||||
|
.sort()
|
||||||
|
.reverse();
|
||||||
|
|
||||||
|
const toDelete = backups.slice(config.keepLastAmount);
|
||||||
|
toDelete.push(...failedBackups);
|
||||||
|
|
||||||
|
for (const file of toDelete) {
|
||||||
|
await this.storageRepository.unlink(path.join(backupsFolder, file));
|
||||||
|
}
|
||||||
|
this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async handleBackupDatabase(): Promise<JobStatus> {
|
||||||
|
this.logger.debug(`Database Backup Started`);
|
||||||
|
|
||||||
|
const {
|
||||||
|
database: { config },
|
||||||
|
} = this.configRepository.getEnv();
|
||||||
|
|
||||||
|
const isUrlConnection = config.connectionType === 'url';
|
||||||
|
const databaseParams = isUrlConnection ? [config.url] : ['-U', config.username, '-h', config.host];
|
||||||
|
const backupFilePath = path.join(
|
||||||
|
StorageCore.getBaseFolder(StorageFolder.BACKUPS),
|
||||||
|
`immich-db-backup-${Date.now()}.sql.gz.tmp`,
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
const pgdump = this.processRepository.spawn(`pg_dumpall`, [...databaseParams, '--clean', '--if-exists'], {
|
||||||
|
env: { PATH: process.env.PATH, PGPASSWORD: isUrlConnection ? undefined : config.password },
|
||||||
|
});
|
||||||
|
|
||||||
|
const gzip = this.processRepository.spawn(`gzip`, []);
|
||||||
|
pgdump.stdout.pipe(gzip.stdin);
|
||||||
|
|
||||||
|
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
|
||||||
|
|
||||||
|
gzip.stdout.pipe(fileStream);
|
||||||
|
|
||||||
|
pgdump.on('error', (err) => {
|
||||||
|
this.logger.error('Backup failed with error', err);
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
gzip.on('error', (err) => {
|
||||||
|
this.logger.error('Gzip failed with error', err);
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
let pgdumpLogs = '';
|
||||||
|
let gzipLogs = '';
|
||||||
|
|
||||||
|
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
|
||||||
|
gzip.stderr.on('data', (data) => (gzipLogs += data));
|
||||||
|
|
||||||
|
pgdump.on('exit', (code) => {
|
||||||
|
if (code !== 0) {
|
||||||
|
this.logger.error(`Backup failed with code ${code}`);
|
||||||
|
reject(`Backup failed with code ${code}`);
|
||||||
|
this.logger.error(pgdumpLogs);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (pgdumpLogs) {
|
||||||
|
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
gzip.on('exit', (code) => {
|
||||||
|
if (code !== 0) {
|
||||||
|
this.logger.error(`Gzip failed with code ${code}`);
|
||||||
|
reject(`Gzip failed with code ${code}`);
|
||||||
|
this.logger.error(gzipLogs);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (pgdump.exitCode !== 0) {
|
||||||
|
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.error('Database Backup Failure', error);
|
||||||
|
return JobStatus.FAILED;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.debug(`Database Backup Success`);
|
||||||
|
await this.cleanupDatabaseBackups();
|
||||||
|
return JobStatus.SUCCESS;
|
||||||
|
}
|
||||||
|
}
|
@ -28,6 +28,7 @@ import { INotificationRepository } from 'src/interfaces/notification.interface';
|
|||||||
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
|
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
|
||||||
import { IPartnerRepository } from 'src/interfaces/partner.interface';
|
import { IPartnerRepository } from 'src/interfaces/partner.interface';
|
||||||
import { IPersonRepository } from 'src/interfaces/person.interface';
|
import { IPersonRepository } from 'src/interfaces/person.interface';
|
||||||
|
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||||
import { ISearchRepository } from 'src/interfaces/search.interface';
|
import { ISearchRepository } from 'src/interfaces/search.interface';
|
||||||
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
|
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
|
||||||
import { ISessionRepository } from 'src/interfaces/session.interface';
|
import { ISessionRepository } from 'src/interfaces/session.interface';
|
||||||
@ -72,6 +73,7 @@ export class BaseService {
|
|||||||
@Inject(IOAuthRepository) protected oauthRepository: IOAuthRepository,
|
@Inject(IOAuthRepository) protected oauthRepository: IOAuthRepository,
|
||||||
@Inject(IPartnerRepository) protected partnerRepository: IPartnerRepository,
|
@Inject(IPartnerRepository) protected partnerRepository: IPartnerRepository,
|
||||||
@Inject(IPersonRepository) protected personRepository: IPersonRepository,
|
@Inject(IPersonRepository) protected personRepository: IPersonRepository,
|
||||||
|
@Inject(IProcessRepository) protected processRepository: IProcessRepository,
|
||||||
@Inject(ISearchRepository) protected searchRepository: ISearchRepository,
|
@Inject(ISearchRepository) protected searchRepository: ISearchRepository,
|
||||||
@Inject(IServerInfoRepository) protected serverInfoRepository: IServerInfoRepository,
|
@Inject(IServerInfoRepository) protected serverInfoRepository: IServerInfoRepository,
|
||||||
@Inject(ISessionRepository) protected sessionRepository: ISessionRepository,
|
@Inject(ISessionRepository) protected sessionRepository: ISessionRepository,
|
||||||
|
@ -6,6 +6,7 @@ import { AssetMediaService } from 'src/services/asset-media.service';
|
|||||||
import { AssetService } from 'src/services/asset.service';
|
import { AssetService } from 'src/services/asset.service';
|
||||||
import { AuditService } from 'src/services/audit.service';
|
import { AuditService } from 'src/services/audit.service';
|
||||||
import { AuthService } from 'src/services/auth.service';
|
import { AuthService } from 'src/services/auth.service';
|
||||||
|
import { BackupService } from 'src/services/backup.service';
|
||||||
import { CliService } from 'src/services/cli.service';
|
import { CliService } from 'src/services/cli.service';
|
||||||
import { DatabaseService } from 'src/services/database.service';
|
import { DatabaseService } from 'src/services/database.service';
|
||||||
import { DownloadService } from 'src/services/download.service';
|
import { DownloadService } from 'src/services/download.service';
|
||||||
@ -48,6 +49,7 @@ export const services = [
|
|||||||
AssetService,
|
AssetService,
|
||||||
AuditService,
|
AuditService,
|
||||||
AuthService,
|
AuthService,
|
||||||
|
BackupService,
|
||||||
CliService,
|
CliService,
|
||||||
DatabaseService,
|
DatabaseService,
|
||||||
DownloadService,
|
DownloadService,
|
||||||
|
@ -44,7 +44,7 @@ describe(JobService.name, () => {
|
|||||||
sut.onBootstrap(ImmichWorker.MICROSERVICES);
|
sut.onBootstrap(ImmichWorker.MICROSERVICES);
|
||||||
sut.onConfigUpdate({ oldConfig: defaults, newConfig: defaults });
|
sut.onConfigUpdate({ oldConfig: defaults, newConfig: defaults });
|
||||||
|
|
||||||
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(14);
|
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(15);
|
||||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
|
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
|
||||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
|
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
|
||||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
|
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
|
||||||
@ -114,6 +114,7 @@ describe(JobService.name, () => {
|
|||||||
[QueueName.SIDECAR]: expectedJobStatus,
|
[QueueName.SIDECAR]: expectedJobStatus,
|
||||||
[QueueName.LIBRARY]: expectedJobStatus,
|
[QueueName.LIBRARY]: expectedJobStatus,
|
||||||
[QueueName.NOTIFICATION]: expectedJobStatus,
|
[QueueName.NOTIFICATION]: expectedJobStatus,
|
||||||
|
[QueueName.BACKUP_DATABASE]: expectedJobStatus,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -220,6 +220,7 @@ export class JobService extends BaseService {
|
|||||||
QueueName.FACIAL_RECOGNITION,
|
QueueName.FACIAL_RECOGNITION,
|
||||||
QueueName.STORAGE_TEMPLATE_MIGRATION,
|
QueueName.STORAGE_TEMPLATE_MIGRATION,
|
||||||
QueueName.DUPLICATE_DETECTION,
|
QueueName.DUPLICATE_DETECTION,
|
||||||
|
QueueName.BACKUP_DATABASE,
|
||||||
].includes(name);
|
].includes(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ import { ArgOf } from 'src/interfaces/event.interface';
|
|||||||
import { IDeleteFilesJob, JobName } from 'src/interfaces/job.interface';
|
import { IDeleteFilesJob, JobName } from 'src/interfaces/job.interface';
|
||||||
import { AssetService } from 'src/services/asset.service';
|
import { AssetService } from 'src/services/asset.service';
|
||||||
import { AuditService } from 'src/services/audit.service';
|
import { AuditService } from 'src/services/audit.service';
|
||||||
|
import { BackupService } from 'src/services/backup.service';
|
||||||
import { DuplicateService } from 'src/services/duplicate.service';
|
import { DuplicateService } from 'src/services/duplicate.service';
|
||||||
import { JobService } from 'src/services/job.service';
|
import { JobService } from 'src/services/job.service';
|
||||||
import { LibraryService } from 'src/services/library.service';
|
import { LibraryService } from 'src/services/library.service';
|
||||||
@ -26,6 +27,7 @@ export class MicroservicesService {
|
|||||||
constructor(
|
constructor(
|
||||||
private auditService: AuditService,
|
private auditService: AuditService,
|
||||||
private assetService: AssetService,
|
private assetService: AssetService,
|
||||||
|
private backupService: BackupService,
|
||||||
private jobService: JobService,
|
private jobService: JobService,
|
||||||
private libraryService: LibraryService,
|
private libraryService: LibraryService,
|
||||||
private mediaService: MediaService,
|
private mediaService: MediaService,
|
||||||
@ -52,6 +54,7 @@ export class MicroservicesService {
|
|||||||
await this.jobService.init({
|
await this.jobService.init({
|
||||||
[JobName.ASSET_DELETION]: (data) => this.assetService.handleAssetDeletion(data),
|
[JobName.ASSET_DELETION]: (data) => this.assetService.handleAssetDeletion(data),
|
||||||
[JobName.ASSET_DELETION_CHECK]: () => this.assetService.handleAssetDeletionCheck(),
|
[JobName.ASSET_DELETION_CHECK]: () => this.assetService.handleAssetDeletionCheck(),
|
||||||
|
[JobName.BACKUP_DATABASE]: () => this.backupService.handleBackupDatabase(),
|
||||||
[JobName.DELETE_FILES]: (data: IDeleteFilesJob) => this.storageService.handleDeleteFiles(data),
|
[JobName.DELETE_FILES]: (data: IDeleteFilesJob) => this.storageService.handleDeleteFiles(data),
|
||||||
[JobName.CLEAN_OLD_AUDIT_LOGS]: () => this.auditService.handleCleanup(),
|
[JobName.CLEAN_OLD_AUDIT_LOGS]: () => this.auditService.handleCleanup(),
|
||||||
[JobName.CLEAN_OLD_SESSION_TOKENS]: () => this.sessionService.handleCleanup(),
|
[JobName.CLEAN_OLD_SESSION_TOKENS]: () => this.sessionService.handleCleanup(),
|
||||||
|
@ -32,6 +32,7 @@ describe(StorageService.name, () => {
|
|||||||
|
|
||||||
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
|
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
|
||||||
mountChecks: {
|
mountChecks: {
|
||||||
|
backups: true,
|
||||||
'encoded-video': true,
|
'encoded-video': true,
|
||||||
library: true,
|
library: true,
|
||||||
profile: true,
|
profile: true,
|
||||||
@ -44,16 +45,19 @@ describe(StorageService.name, () => {
|
|||||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/profile');
|
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/profile');
|
||||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs');
|
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs');
|
||||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/upload');
|
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/upload');
|
||||||
|
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/backups');
|
||||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/encoded-video/.immich', expect.any(Buffer));
|
expect(storageMock.createFile).toHaveBeenCalledWith('upload/encoded-video/.immich', expect.any(Buffer));
|
||||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
|
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
|
||||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/profile/.immich', expect.any(Buffer));
|
expect(storageMock.createFile).toHaveBeenCalledWith('upload/profile/.immich', expect.any(Buffer));
|
||||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/thumbs/.immich', expect.any(Buffer));
|
expect(storageMock.createFile).toHaveBeenCalledWith('upload/thumbs/.immich', expect.any(Buffer));
|
||||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/upload/.immich', expect.any(Buffer));
|
expect(storageMock.createFile).toHaveBeenCalledWith('upload/upload/.immich', expect.any(Buffer));
|
||||||
|
expect(storageMock.createFile).toHaveBeenCalledWith('upload/backups/.immich', expect.any(Buffer));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should enable mount folder checking for a new folder type', async () => {
|
it('should enable mount folder checking for a new folder type', async () => {
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
mountChecks: {
|
mountChecks: {
|
||||||
|
backups: false,
|
||||||
'encoded-video': true,
|
'encoded-video': true,
|
||||||
library: false,
|
library: false,
|
||||||
profile: true,
|
profile: true,
|
||||||
@ -66,6 +70,7 @@ describe(StorageService.name, () => {
|
|||||||
|
|
||||||
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
|
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
|
||||||
mountChecks: {
|
mountChecks: {
|
||||||
|
backups: true,
|
||||||
'encoded-video': true,
|
'encoded-video': true,
|
||||||
library: true,
|
library: true,
|
||||||
profile: true,
|
profile: true,
|
||||||
@ -73,10 +78,12 @@ describe(StorageService.name, () => {
|
|||||||
upload: true,
|
upload: true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
expect(storageMock.mkdirSync).toHaveBeenCalledTimes(1);
|
expect(storageMock.mkdirSync).toHaveBeenCalledTimes(2);
|
||||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/library');
|
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/library');
|
||||||
expect(storageMock.createFile).toHaveBeenCalledTimes(1);
|
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/backups');
|
||||||
|
expect(storageMock.createFile).toHaveBeenCalledTimes(2);
|
||||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
|
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
|
||||||
|
expect(storageMock.createFile).toHaveBeenCalledWith('upload/backups/.immich', expect.any(Buffer));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error if .immich is missing', async () => {
|
it('should throw an error if .immich is missing', async () => {
|
||||||
|
@ -44,6 +44,13 @@ const updatedConfig = Object.freeze<SystemConfig>({
|
|||||||
[QueueName.VIDEO_CONVERSION]: { concurrency: 1 },
|
[QueueName.VIDEO_CONVERSION]: { concurrency: 1 },
|
||||||
[QueueName.NOTIFICATION]: { concurrency: 5 },
|
[QueueName.NOTIFICATION]: { concurrency: 5 },
|
||||||
},
|
},
|
||||||
|
backup: {
|
||||||
|
database: {
|
||||||
|
enabled: true,
|
||||||
|
cronExpression: '0 02 * * *',
|
||||||
|
keepLastAmount: 14,
|
||||||
|
},
|
||||||
|
},
|
||||||
ffmpeg: {
|
ffmpeg: {
|
||||||
crf: 30,
|
crf: 30,
|
||||||
threads: 0,
|
threads: 0,
|
||||||
|
9
server/test/fixtures/system-config.stub.ts
vendored
9
server/test/fixtures/system-config.stub.ts
vendored
@ -74,6 +74,15 @@ export const systemConfigStub = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
backupEnabled: {
|
||||||
|
backup: {
|
||||||
|
database: {
|
||||||
|
enabled: true,
|
||||||
|
cronExpression: '0 0 * * *',
|
||||||
|
keepLastAmount: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
machineLearningDisabled: {
|
machineLearningDisabled: {
|
||||||
machineLearning: {
|
machineLearning: {
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
8
server/test/repositories/process.repository.mock.ts
Normal file
8
server/test/repositories/process.repository.mock.ts
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||||
|
import { Mocked, vitest } from 'vitest';
|
||||||
|
|
||||||
|
export const newProcessRepositoryMock = (): Mocked<IProcessRepository> => {
|
||||||
|
return {
|
||||||
|
spawn: vitest.fn(),
|
||||||
|
};
|
||||||
|
};
|
@ -49,6 +49,7 @@ export const newStorageRepositoryMock = (reset = true): Mocked<IStorageRepositor
|
|||||||
createReadStream: vitest.fn(),
|
createReadStream: vitest.fn(),
|
||||||
readFile: vitest.fn(),
|
readFile: vitest.fn(),
|
||||||
createFile: vitest.fn(),
|
createFile: vitest.fn(),
|
||||||
|
createWriteStream: vitest.fn(),
|
||||||
createOrOverwriteFile: vitest.fn(),
|
createOrOverwriteFile: vitest.fn(),
|
||||||
overwriteFile: vitest.fn(),
|
overwriteFile: vitest.fn(),
|
||||||
unlink: vitest.fn(),
|
unlink: vitest.fn(),
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||||
|
import { Writable } from 'node:stream';
|
||||||
import { PNG } from 'pngjs';
|
import { PNG } from 'pngjs';
|
||||||
import { IMetadataRepository } from 'src/interfaces/metadata.interface';
|
import { IMetadataRepository } from 'src/interfaces/metadata.interface';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
@ -25,6 +27,7 @@ import { newNotificationRepositoryMock } from 'test/repositories/notification.re
|
|||||||
import { newOAuthRepositoryMock } from 'test/repositories/oauth.repository.mock';
|
import { newOAuthRepositoryMock } from 'test/repositories/oauth.repository.mock';
|
||||||
import { newPartnerRepositoryMock } from 'test/repositories/partner.repository.mock';
|
import { newPartnerRepositoryMock } from 'test/repositories/partner.repository.mock';
|
||||||
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
||||||
|
import { newProcessRepositoryMock } from 'test/repositories/process.repository.mock';
|
||||||
import { newSearchRepositoryMock } from 'test/repositories/search.repository.mock';
|
import { newSearchRepositoryMock } from 'test/repositories/search.repository.mock';
|
||||||
import { newServerInfoRepositoryMock } from 'test/repositories/server-info.repository.mock';
|
import { newServerInfoRepositoryMock } from 'test/repositories/server-info.repository.mock';
|
||||||
import { newSessionRepositoryMock } from 'test/repositories/session.repository.mock';
|
import { newSessionRepositoryMock } from 'test/repositories/session.repository.mock';
|
||||||
@ -38,7 +41,8 @@ import { newTrashRepositoryMock } from 'test/repositories/trash.repository.mock'
|
|||||||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||||
import { newVersionHistoryRepositoryMock } from 'test/repositories/version-history.repository.mock';
|
import { newVersionHistoryRepositoryMock } from 'test/repositories/version-history.repository.mock';
|
||||||
import { newViewRepositoryMock } from 'test/repositories/view.repository.mock';
|
import { newViewRepositoryMock } from 'test/repositories/view.repository.mock';
|
||||||
import { Mocked } from 'vitest';
|
import { Readable } from 'typeorm/platform/PlatformTools';
|
||||||
|
import { Mocked, vitest } from 'vitest';
|
||||||
|
|
||||||
type RepositoryOverrides = {
|
type RepositoryOverrides = {
|
||||||
metadataRepository: IMetadataRepository;
|
metadataRepository: IMetadataRepository;
|
||||||
@ -78,6 +82,7 @@ export const newTestService = <T extends BaseService>(
|
|||||||
const oauthMock = newOAuthRepositoryMock();
|
const oauthMock = newOAuthRepositoryMock();
|
||||||
const partnerMock = newPartnerRepositoryMock();
|
const partnerMock = newPartnerRepositoryMock();
|
||||||
const personMock = newPersonRepositoryMock();
|
const personMock = newPersonRepositoryMock();
|
||||||
|
const processMock = newProcessRepositoryMock();
|
||||||
const searchMock = newSearchRepositoryMock();
|
const searchMock = newSearchRepositoryMock();
|
||||||
const serverInfoMock = newServerInfoRepositoryMock();
|
const serverInfoMock = newServerInfoRepositoryMock();
|
||||||
const sessionMock = newSessionRepositoryMock();
|
const sessionMock = newSessionRepositoryMock();
|
||||||
@ -117,6 +122,7 @@ export const newTestService = <T extends BaseService>(
|
|||||||
oauthMock,
|
oauthMock,
|
||||||
partnerMock,
|
partnerMock,
|
||||||
personMock,
|
personMock,
|
||||||
|
processMock,
|
||||||
searchMock,
|
searchMock,
|
||||||
serverInfoMock,
|
serverInfoMock,
|
||||||
sessionMock,
|
sessionMock,
|
||||||
@ -158,6 +164,7 @@ export const newTestService = <T extends BaseService>(
|
|||||||
oauthMock,
|
oauthMock,
|
||||||
partnerMock,
|
partnerMock,
|
||||||
personMock,
|
personMock,
|
||||||
|
processMock,
|
||||||
searchMock,
|
searchMock,
|
||||||
serverInfoMock,
|
serverInfoMock,
|
||||||
sessionMock,
|
sessionMock,
|
||||||
@ -203,3 +210,37 @@ export const newRandomImage = () => {
|
|||||||
|
|
||||||
return value;
|
return value;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const mockSpawn = vitest.fn((exitCode: number, stdout: string, stderr: string, error?: unknown) => {
|
||||||
|
return {
|
||||||
|
stdout: new Readable({
|
||||||
|
read() {
|
||||||
|
this.push(stdout); // write mock data to stdout
|
||||||
|
this.push(null); // end stream
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
stderr: new Readable({
|
||||||
|
read() {
|
||||||
|
this.push(stderr); // write mock data to stderr
|
||||||
|
this.push(null); // end stream
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
stdin: new Writable({
|
||||||
|
write(chunk, encoding, callback) {
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
exitCode,
|
||||||
|
on: vitest.fn((event, callback: any) => {
|
||||||
|
if (event === 'close') {
|
||||||
|
callback(0);
|
||||||
|
}
|
||||||
|
if (event === 'error' && error) {
|
||||||
|
callback(error);
|
||||||
|
}
|
||||||
|
if (event === 'exit') {
|
||||||
|
callback(exitCode);
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
} as unknown as ChildProcessWithoutNullStreams;
|
||||||
|
});
|
||||||
|
@ -0,0 +1,91 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import type { SystemConfigDto } from '@immich/sdk';
|
||||||
|
import { isEqual } from 'lodash-es';
|
||||||
|
import { fade } from 'svelte/transition';
|
||||||
|
import type { SettingsResetEvent, SettingsSaveEvent } from '../admin-settings';
|
||||||
|
import SettingInputField, {
|
||||||
|
SettingInputFieldType,
|
||||||
|
} from '$lib/components/shared-components/settings/setting-input-field.svelte';
|
||||||
|
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
|
||||||
|
import SettingButtonsRow from '$lib/components/shared-components/settings/setting-buttons-row.svelte';
|
||||||
|
import SettingSelect from '$lib/components/shared-components/settings/setting-select.svelte';
|
||||||
|
import { t } from 'svelte-i18n';
|
||||||
|
import FormatMessage from '$lib/components/i18n/format-message.svelte';
|
||||||
|
|
||||||
|
export let savedConfig: SystemConfigDto;
|
||||||
|
export let defaultConfig: SystemConfigDto;
|
||||||
|
export let config: SystemConfigDto; // this is the config that is being edited
|
||||||
|
export let disabled = false;
|
||||||
|
export let onReset: SettingsResetEvent;
|
||||||
|
export let onSave: SettingsSaveEvent;
|
||||||
|
|
||||||
|
$: cronExpressionOptions = [
|
||||||
|
{ text: $t('interval.night_at_midnight'), value: '0 0 * * *' },
|
||||||
|
{ text: $t('interval.night_at_twoam'), value: '0 02 * * *' },
|
||||||
|
{ text: $t('interval.day_at_onepm'), value: '0 13 * * *' },
|
||||||
|
{ text: $t('interval.hours', { values: { hours: 6 } }), value: '0 */6 * * *' },
|
||||||
|
];
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<div in:fade={{ duration: 500 }}>
|
||||||
|
<form autocomplete="off" on:submit|preventDefault>
|
||||||
|
<div class="ml-4 mt-4 flex flex-col gap-4">
|
||||||
|
<SettingSwitch
|
||||||
|
title={$t('admin.backup_database_enable_description')}
|
||||||
|
{disabled}
|
||||||
|
bind:checked={config.backup.database.enabled}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SettingSelect
|
||||||
|
options={cronExpressionOptions}
|
||||||
|
disabled={disabled || !config.backup.database.enabled}
|
||||||
|
name="expression"
|
||||||
|
label={$t('admin.cron_expression_presets')}
|
||||||
|
bind:value={config.backup.database.cronExpression}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SettingInputField
|
||||||
|
inputType={SettingInputFieldType.TEXT}
|
||||||
|
required={true}
|
||||||
|
disabled={disabled || !config.backup.database.enabled}
|
||||||
|
label={$t('admin.cron_expression')}
|
||||||
|
bind:value={config.backup.database.cronExpression}
|
||||||
|
isEdited={config.backup.database.cronExpression !== savedConfig.backup.database.cronExpression}
|
||||||
|
>
|
||||||
|
<svelte:fragment slot="desc">
|
||||||
|
<p class="text-sm dark:text-immich-dark-fg">
|
||||||
|
<FormatMessage key="admin.cron_expression_description" let:message>
|
||||||
|
<a
|
||||||
|
href="https://crontab.guru/#{config.backup.database.cronExpression.replaceAll(' ', '_')}"
|
||||||
|
class="underline"
|
||||||
|
target="_blank"
|
||||||
|
rel="noreferrer"
|
||||||
|
>
|
||||||
|
{message}
|
||||||
|
<br />
|
||||||
|
</a>
|
||||||
|
</FormatMessage>
|
||||||
|
</p>
|
||||||
|
</svelte:fragment>
|
||||||
|
</SettingInputField>
|
||||||
|
|
||||||
|
<SettingInputField
|
||||||
|
inputType={SettingInputFieldType.NUMBER}
|
||||||
|
required={true}
|
||||||
|
label={$t('admin.backup_keep_last_amount')}
|
||||||
|
disabled={disabled || !config.backup.database.enabled}
|
||||||
|
bind:value={config.backup.database.keepLastAmount}
|
||||||
|
isEdited={config.backup.database.keepLastAmount !== savedConfig.backup.database.keepLastAmount}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SettingButtonsRow
|
||||||
|
onReset={(options) => onReset({ ...options, configKeys: ['backup'] })}
|
||||||
|
onSave={() => onSave({ backup: config.backup })}
|
||||||
|
showResetToDefault={!isEqual(savedConfig.backup, defaultConfig.backup)}
|
||||||
|
{disabled}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
@ -65,7 +65,7 @@
|
|||||||
class="font-medium text-immich-primary dark:text-immich-dark-primary text-sm"
|
class="font-medium text-immich-primary dark:text-immich-dark-primary text-sm"
|
||||||
for="expression-select"
|
for="expression-select"
|
||||||
>
|
>
|
||||||
{$t('admin.library_cron_expression_presets')}
|
{$t('admin.cron_expression_presets')}
|
||||||
</label>
|
</label>
|
||||||
<select
|
<select
|
||||||
class="p-2 mt-2 text-sm rounded-lg bg-slate-200 hover:cursor-pointer dark:bg-gray-600"
|
class="p-2 mt-2 text-sm rounded-lg bg-slate-200 hover:cursor-pointer dark:bg-gray-600"
|
||||||
@ -84,13 +84,13 @@
|
|||||||
inputType={SettingInputFieldType.TEXT}
|
inputType={SettingInputFieldType.TEXT}
|
||||||
required={true}
|
required={true}
|
||||||
disabled={disabled || !config.library.scan.enabled}
|
disabled={disabled || !config.library.scan.enabled}
|
||||||
label={$t('admin.library_cron_expression')}
|
label={$t('admin.cron_expression')}
|
||||||
bind:value={config.library.scan.cronExpression}
|
bind:value={config.library.scan.cronExpression}
|
||||||
isEdited={config.library.scan.cronExpression !== savedConfig.library.scan.cronExpression}
|
isEdited={config.library.scan.cronExpression !== savedConfig.library.scan.cronExpression}
|
||||||
>
|
>
|
||||||
<svelte:fragment slot="desc">
|
<svelte:fragment slot="desc">
|
||||||
<p class="text-sm dark:text-immich-dark-fg">
|
<p class="text-sm dark:text-immich-dark-fg">
|
||||||
<FormatMessage key="admin.library_cron_expression_description" let:message>
|
<FormatMessage key="admin.cron_expression_description" let:message>
|
||||||
<a
|
<a
|
||||||
href="https://crontab.guru/#{config.library.scan.cronExpression.replaceAll(' ', '_')}"
|
href="https://crontab.guru/#{config.library.scan.cronExpression.replaceAll(' ', '_')}"
|
||||||
class="underline"
|
class="underline"
|
||||||
|
@ -148,6 +148,7 @@ export const getJobName = derived(t, ($t) => {
|
|||||||
[JobName.Search]: $t('search'),
|
[JobName.Search]: $t('search'),
|
||||||
[JobName.Library]: $t('library'),
|
[JobName.Library]: $t('library'),
|
||||||
[JobName.Notifications]: $t('notifications'),
|
[JobName.Notifications]: $t('notifications'),
|
||||||
|
[JobName.BackupDatabase]: $t('admin.backup_database'),
|
||||||
};
|
};
|
||||||
|
|
||||||
return names[jobName];
|
return names[jobName];
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import AdminSettings from '$lib/components/admin-page/settings/admin-settings.svelte';
|
import AdminSettings from '$lib/components/admin-page/settings/admin-settings.svelte';
|
||||||
import AuthSettings from '$lib/components/admin-page/settings/auth/auth-settings.svelte';
|
import AuthSettings from '$lib/components/admin-page/settings/auth/auth-settings.svelte';
|
||||||
|
import BackupSettings from '$lib/components/admin-page/settings/backup-settings/backup-settings.svelte';
|
||||||
import FFmpegSettings from '$lib/components/admin-page/settings/ffmpeg/ffmpeg-settings.svelte';
|
import FFmpegSettings from '$lib/components/admin-page/settings/ffmpeg/ffmpeg-settings.svelte';
|
||||||
import ImageSettings from '$lib/components/admin-page/settings/image/image-settings.svelte';
|
import ImageSettings from '$lib/components/admin-page/settings/image/image-settings.svelte';
|
||||||
import JobSettings from '$lib/components/admin-page/settings/job-settings/job-settings.svelte';
|
import JobSettings from '$lib/components/admin-page/settings/job-settings/job-settings.svelte';
|
||||||
@ -30,6 +31,7 @@
|
|||||||
import {
|
import {
|
||||||
mdiAccountOutline,
|
mdiAccountOutline,
|
||||||
mdiAlert,
|
mdiAlert,
|
||||||
|
mdiBackupRestore,
|
||||||
mdiBellOutline,
|
mdiBellOutline,
|
||||||
mdiBookshelf,
|
mdiBookshelf,
|
||||||
mdiContentCopy,
|
mdiContentCopy,
|
||||||
@ -99,6 +101,13 @@
|
|||||||
key: 'authentication',
|
key: 'authentication',
|
||||||
icon: mdiLockOutline,
|
icon: mdiLockOutline,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
component: BackupSettings,
|
||||||
|
title: $t('admin.backup_settings'),
|
||||||
|
subtitle: $t('admin.backup_settings_description'),
|
||||||
|
key: 'backup',
|
||||||
|
icon: mdiBackupRestore,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
component: ImageSettings,
|
component: ImageSettings,
|
||||||
title: $t('admin.image_settings'),
|
title: $t('admin.image_settings'),
|
||||||
|
Reference in New Issue
Block a user