1
0
mirror of https://github.com/immich-app/immich.git synced 2024-12-26 10:50:29 +02:00

chore(server): introduce proper job status (#7932)

* introduce proper job status

* fix condition for onDone jobs

* fix tests
This commit is contained in:
Daniel Dietzler 2024-03-15 14:16:54 +01:00 committed by GitHub
parent 07e8f79563
commit a46366d336
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 241 additions and 216 deletions

View File

@ -22,6 +22,7 @@ import {
ISystemConfigRepository,
IUserRepository,
JobItem,
JobStatus,
TimeBucketOptions,
} from '../repositories';
import { StorageCore, StorageFolder } from '../storage';
@ -384,7 +385,7 @@ export class AssetService {
this.communicationRepository.send(ClientEvent.ASSET_STACK_UPDATE, auth.user.id, ids);
}
async handleAssetDeletionCheck() {
async handleAssetDeletionCheck(): Promise<JobStatus> {
const config = await this.configCore.getConfig();
const trashedDays = config.trash.enabled ? config.trash.days : 0;
const trashedBefore = DateTime.now()
@ -400,10 +401,10 @@ export class AssetService {
);
}
return true;
return JobStatus.SUCCESS;
}
async handleAssetDeletion(job: IAssetDeletionJob) {
async handleAssetDeletion(job: IAssetDeletionJob): Promise<JobStatus> {
const { id, fromExternal } = job;
const asset = await this.assetRepository.getById(id, {
@ -416,12 +417,12 @@ export class AssetService {
});
if (!asset) {
return false;
return JobStatus.FAILED;
}
// Ignore requests that are not from external library job but is for an external asset
if (!fromExternal && (!asset.library || asset.library.type === LibraryType.EXTERNAL)) {
return false;
return JobStatus.SKIPPED;
}
// Replace the parent of the stack children with a new asset
@ -456,7 +457,7 @@ export class AssetService {
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files } });
}
return true;
return JobStatus.SUCCESS;
}
async deleteAll(auth: AuthDto, dto: AssetBulkDeleteDto): Promise<void> {

View File

@ -18,6 +18,7 @@ import {
IPersonRepository,
IStorageRepository,
IUserRepository,
JobStatus,
} from '../repositories';
import { AuditService } from './audit.service';
@ -48,8 +49,8 @@ describe(AuditService.name, () => {
describe('handleCleanup', () => {
it('should delete old audit entries', async () => {
await expect(sut.handleCleanup()).resolves.toBe(true);
expect(auditMock.removeBefore).toBeCalledWith(expect.any(Date));
await expect(sut.handleCleanup()).resolves.toBe(JobStatus.SUCCESS);
expect(auditMock.removeBefore).toHaveBeenCalledWith(expect.any(Date));
});
});

View File

@ -16,6 +16,7 @@ import {
IPersonRepository,
IStorageRepository,
IUserRepository,
JobStatus,
} from '../repositories';
import { StorageCore, StorageFolder } from '../storage';
import {
@ -44,9 +45,9 @@ export class AuditService {
this.access = AccessCore.create(accessRepository);
}
async handleCleanup(): Promise<boolean> {
async handleCleanup(): Promise<JobStatus> {
await this.repository.removeBefore(DateTime.now().minus(AUDIT_LOG_MAX_DURATION).toJSDate());
return true;
return JobStatus.SUCCESS;
}
async getDeletes(auth: AuthDto, dto: AuditDeletesDto): Promise<AuditDeletesResponseDto> {

View File

@ -16,13 +16,14 @@ import {
ISystemConfigRepository,
JobHandler,
JobItem,
JobStatus,
} from '../repositories';
import { FeatureFlag, SystemConfigCore } from '../system-config/system-config.core';
import { JobCommand, JobName, QueueName } from './job.constants';
import { JobService } from './job.service';
const makeMockHandlers = (success: boolean) => {
const mock = jest.fn().mockResolvedValue(success);
const makeMockHandlers = (status: JobStatus) => {
const mock = jest.fn().mockResolvedValue(status);
return Object.fromEntries(Object.values(JobName).map((jobName) => [jobName, mock])) as unknown as Record<
JobName,
JobHandler
@ -221,13 +222,13 @@ describe(JobService.name, () => {
describe('init', () => {
it('should register a handler for each queue', async () => {
await sut.init(makeMockHandlers(true));
await sut.init(makeMockHandlers(JobStatus.SUCCESS));
expect(configMock.load).toHaveBeenCalled();
expect(jobMock.addHandler).toHaveBeenCalledTimes(Object.keys(QueueName).length);
});
it('should subscribe to config changes', async () => {
await sut.init(makeMockHandlers(false));
await sut.init(makeMockHandlers(JobStatus.FAILED));
SystemConfigCore.create(newSystemConfigRepositoryMock(false)).config$.next({
job: {
@ -332,7 +333,7 @@ describe(JobService.name, () => {
}
}
await sut.init(makeMockHandlers(true));
await sut.init(makeMockHandlers(JobStatus.SUCCESS));
await jobMock.addHandler.mock.calls[0][2](item);
if (jobs.length > 1) {
@ -348,7 +349,7 @@ describe(JobService.name, () => {
});
it(`should not queue any jobs when ${item.name} finishes with 'false'`, async () => {
await sut.init(makeMockHandlers(false));
await sut.init(makeMockHandlers(JobStatus.FAILED));
await jobMock.addHandler.mock.calls[0][2](item);
expect(jobMock.queueAll).not.toHaveBeenCalled();

View File

@ -11,6 +11,7 @@ import {
ISystemConfigRepository,
JobHandler,
JobItem,
JobStatus,
QueueCleanType,
} from '../repositories';
import { FeatureFlag, SystemConfigCore } from '../system-config/system-config.core';
@ -155,8 +156,8 @@ export class JobService {
try {
const handler = jobHandlers[name];
const success = await handler(data);
if (success) {
const status = await handler(data);
if (status === JobStatus.SUCCESS || status == JobStatus.SKIPPED) {
await this.onDone(item);
}
} catch (error: Error | any) {

View File

@ -28,6 +28,7 @@ import {
ILibraryRepository,
IStorageRepository,
ISystemConfigRepository,
JobStatus,
StorageEventType,
} from '../repositories';
import { SystemConfigCore } from '../system-config/system-config.core';
@ -214,7 +215,7 @@ describe(LibraryService.name, () => {
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
await expect(sut.handleQueueAssetRefresh(mockLibraryJob)).resolves.toBe(false);
await expect(sut.handleQueueAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
});
it('should ignore import paths that do not exist', async () => {
@ -340,7 +341,7 @@ describe(LibraryService.name, () => {
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.create.mock.calls).toEqual([
[
@ -388,7 +389,7 @@ describe(LibraryService.name, () => {
assetMock.create.mockResolvedValue(assetStub.image);
storageMock.checkFileExists.mockResolvedValue(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.create.mock.calls).toEqual([
[
@ -435,7 +436,7 @@ describe(LibraryService.name, () => {
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.video);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.create.mock.calls).toEqual([
[
@ -491,7 +492,7 @@ describe(LibraryService.name, () => {
assetMock.create.mockResolvedValue(assetStub.image);
libraryMock.get.mockResolvedValue({ ...libraryStub.externalLibrary1, deletedAt: new Date() });
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(false);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
expect(assetMock.create.mock.calls).toEqual([]);
});
@ -512,7 +513,7 @@ describe(LibraryService.name, () => {
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
expect(jobMock.queue).not.toHaveBeenCalled();
expect(jobMock.queueAll).not.toHaveBeenCalled();
@ -529,7 +530,7 @@ describe(LibraryService.name, () => {
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.METADATA_EXTRACTION,
@ -560,7 +561,7 @@ describe(LibraryService.name, () => {
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.save).toHaveBeenCalledWith({ id: assetStub.image.id, isOffline: true });
expect(jobMock.queue).not.toHaveBeenCalled();
@ -578,7 +579,7 @@ describe(LibraryService.name, () => {
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.offline);
assetMock.create.mockResolvedValue(assetStub.offline);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.save).toHaveBeenCalledWith({ id: assetStub.offline.id, isOffline: false });
@ -611,7 +612,7 @@ describe(LibraryService.name, () => {
expect(assetMock.save).not.toHaveBeenCalled();
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
});
it('should refresh an existing asset if forced', async () => {
@ -625,7 +626,7 @@ describe(LibraryService.name, () => {
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.image.id], {
fileCreatedAt: new Date('2023-01-01'),
@ -653,7 +654,7 @@ describe(LibraryService.name, () => {
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(true);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.create).toHaveBeenCalled();
const createdAsset = assetMock.create.mock.calls[0][0];
@ -1076,7 +1077,7 @@ describe(LibraryService.name, () => {
describe('handleQueueCleanup', () => {
it('should queue cleanup jobs', async () => {
libraryMock.getAllDeleted.mockResolvedValue([libraryStub.uploadLibrary1, libraryStub.externalLibrary1]);
await expect(sut.handleQueueCleanup()).resolves.toBe(true);
await expect(sut.handleQueueCleanup()).resolves.toBe(JobStatus.SUCCESS);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{ name: JobName.LIBRARY_DELETE, data: { id: libraryStub.uploadLibrary1.id } },
@ -1363,7 +1364,7 @@ describe(LibraryService.name, () => {
libraryMock.getAssetIds.mockResolvedValue([]);
libraryMock.delete.mockImplementation(async () => {});
await expect(sut.handleDeleteLibrary({ id: libraryStub.uploadLibrary1.id })).resolves.toBe(false);
await expect(sut.handleDeleteLibrary({ id: libraryStub.uploadLibrary1.id })).resolves.toBe(JobStatus.FAILED);
});
it('should delete an empty library', async () => {
@ -1371,7 +1372,7 @@ describe(LibraryService.name, () => {
libraryMock.getAssetIds.mockResolvedValue([]);
libraryMock.delete.mockImplementation(async () => {});
await expect(sut.handleDeleteLibrary({ id: libraryStub.uploadLibrary1.id })).resolves.toBe(true);
await expect(sut.handleDeleteLibrary({ id: libraryStub.uploadLibrary1.id })).resolves.toBe(JobStatus.SUCCESS);
});
it('should delete a library with assets', async () => {
@ -1381,7 +1382,7 @@ describe(LibraryService.name, () => {
assetMock.getById.mockResolvedValue(assetStub.image1);
await expect(sut.handleDeleteLibrary({ id: libraryStub.uploadLibrary1.id })).resolves.toBe(true);
await expect(sut.handleDeleteLibrary({ id: libraryStub.uploadLibrary1.id })).resolves.toBe(JobStatus.SUCCESS);
});
});
@ -1475,7 +1476,7 @@ describe(LibraryService.name, () => {
it('should queue the refresh job', async () => {
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibrary1]);
await expect(sut.handleQueueAllScan({})).resolves.toBe(true);
await expect(sut.handleQueueAllScan({})).resolves.toBe(JobStatus.SUCCESS);
expect(jobMock.queue.mock.calls).toEqual([
[
@ -1500,7 +1501,7 @@ describe(LibraryService.name, () => {
it('should queue the force refresh job', async () => {
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibrary1]);
await expect(sut.handleQueueAllScan({ force: true })).resolves.toBe(true);
await expect(sut.handleQueueAllScan({ force: true })).resolves.toBe(JobStatus.SUCCESS);
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_QUEUE_CLEANUP,
@ -1525,7 +1526,7 @@ describe(LibraryService.name, () => {
assetMock.getWith.mockResolvedValue({ items: [assetStub.image1], hasNextPage: false });
assetMock.getById.mockResolvedValue(assetStub.image1);
await expect(sut.handleOfflineRemoval({ id: libraryStub.externalLibrary1.id })).resolves.toBe(true);
await expect(sut.handleOfflineRemoval({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SUCCESS);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{

View File

@ -22,6 +22,7 @@ import {
ILibraryRepository,
IStorageRepository,
ISystemConfigRepository,
JobStatus,
StorageEventType,
WithProperty,
} from '../repositories';
@ -241,13 +242,13 @@ export class LibraryService extends EventEmitter {
return libraries.map((library) => mapLibrary(library));
}
async handleQueueCleanup(): Promise<boolean> {
async handleQueueCleanup(): Promise<JobStatus> {
this.logger.debug('Cleaning up any pending library deletions');
const pendingDeletion = await this.repository.getAllDeleted();
await this.jobRepository.queueAll(
pendingDeletion.map((libraryToDelete) => ({ name: JobName.LIBRARY_DELETE, data: { id: libraryToDelete.id } })),
);
return true;
return JobStatus.SUCCESS;
}
async create(auth: AuthDto, dto: CreateLibraryDto): Promise<LibraryResponseDto> {
@ -410,10 +411,10 @@ export class LibraryService extends EventEmitter {
await this.jobRepository.queue({ name: JobName.LIBRARY_DELETE, data: { id } });
}
async handleDeleteLibrary(job: IEntityJob): Promise<boolean> {
async handleDeleteLibrary(job: IEntityJob): Promise<JobStatus> {
const library = await this.repository.get(job.id, true);
if (!library) {
return false;
return JobStatus.FAILED;
}
// TODO use pagination
@ -427,10 +428,10 @@ export class LibraryService extends EventEmitter {
this.logger.log(`Deleting library ${job.id}`);
await this.repository.delete(job.id);
}
return true;
return JobStatus.SUCCESS;
}
async handleAssetRefresh(job: ILibraryFileJob) {
async handleAssetRefresh(job: ILibraryFileJob): Promise<JobStatus> {
const assetPath = path.normalize(job.assetPath);
const existingAssetEntity = await this.assetRepository.getByLibraryIdAndOriginalPath(job.id, assetPath);
@ -445,7 +446,7 @@ export class LibraryService extends EventEmitter {
this.logger.debug(`Marking asset as offline: ${assetPath}`);
await this.assetRepository.save({ id: existingAssetEntity.id, isOffline: true });
return true;
return JobStatus.SUCCESS;
} else {
// File can't be accessed and does not already exist in db
throw new BadRequestException('Cannot access file', { cause: error });
@ -483,7 +484,7 @@ export class LibraryService extends EventEmitter {
if (!doImport && !doRefresh) {
// If we don't import, exit here
return true;
return JobStatus.SKIPPED;
}
let assetType: AssetType;
@ -509,7 +510,7 @@ export class LibraryService extends EventEmitter {
const library = await this.repository.get(job.id, true);
if (library?.deletedAt) {
this.logger.error('Cannot import asset into deleted library');
return false;
return JobStatus.FAILED;
}
const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`);
@ -540,7 +541,7 @@ export class LibraryService extends EventEmitter {
});
} else {
// Not importing and not refreshing, do nothing
return true;
return JobStatus.SKIPPED;
}
this.logger.debug(`Queuing metadata extraction for: ${assetPath}`);
@ -551,7 +552,7 @@ export class LibraryService extends EventEmitter {
await this.jobRepository.queue({ name: JobName.VIDEO_CONVERSION, data: { id: assetId } });
}
return true;
return JobStatus.SUCCESS;
}
async queueScan(auth: AuthDto, id: string, dto: ScanLibraryDto) {
@ -584,7 +585,7 @@ export class LibraryService extends EventEmitter {
});
}
async handleQueueAllScan(job: IBaseJob): Promise<boolean> {
async handleQueueAllScan(job: IBaseJob): Promise<JobStatus> {
this.logger.debug(`Refreshing all external libraries: force=${job.force}`);
// Queue cleanup
@ -602,10 +603,10 @@ export class LibraryService extends EventEmitter {
},
})),
);
return true;
return JobStatus.SUCCESS;
}
async handleOfflineRemoval(job: IEntityJob): Promise<boolean> {
async handleOfflineRemoval(job: IEntityJob): Promise<JobStatus> {
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getWith(pagination, WithProperty.IS_OFFLINE, job.id),
);
@ -617,14 +618,14 @@ export class LibraryService extends EventEmitter {
);
}
return true;
return JobStatus.SUCCESS;
}
async handleQueueAssetRefresh(job: ILibraryRefreshJob): Promise<boolean> {
async handleQueueAssetRefresh(job: ILibraryRefreshJob): Promise<JobStatus> {
const library = await this.repository.get(job.id);
if (!library || library.type !== LibraryType.EXTERNAL) {
this.logger.warn('Can only refresh external libraries');
return false;
return JobStatus.FAILED;
}
this.logger.log(`Refreshing library: ${job.id}`);
@ -694,7 +695,7 @@ export class LibraryService extends EventEmitter {
await this.repository.update({ id: job.id, refreshedAt: new Date() });
return true;
return JobStatus.SUCCESS;
}
private async getPathTrie(library: LibraryEntity): Promise<Trie<string>> {

View File

@ -34,6 +34,7 @@ import {
IPersonRepository,
IStorageRepository,
ISystemConfigRepository,
JobStatus,
WithoutProperty,
} from '../repositories';
import { MediaService } from './media.service';
@ -1214,22 +1215,22 @@ describe(MediaService.name, () => {
expect(mediaMock.transcode).not.toHaveBeenCalled();
});
it('should return false if hwaccel is enabled for an unsupported codec', async () => {
it('should fail if hwaccel is enabled for an unsupported codec', async () => {
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.FFMPEG_ACCEL, value: TranscodeHWAccel.NVENC },
{ key: SystemConfigKey.FFMPEG_TARGET_VIDEO_CODEC, value: VideoCodec.VP9 },
]);
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toEqual(false);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
expect(mediaMock.transcode).not.toHaveBeenCalled();
});
it('should return false if hwaccel option is invalid', async () => {
it('should fail if hwaccel option is invalid', async () => {
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
configMock.load.mockResolvedValue([{ key: SystemConfigKey.FFMPEG_ACCEL, value: 'invalid' }]);
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toEqual(false);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
expect(mediaMock.transcode).not.toHaveBeenCalled();
});
@ -1548,12 +1549,12 @@ describe(MediaService.name, () => {
);
});
it('should return false for qsv if no hw devices', async () => {
it('should fail for qsv if no hw devices', async () => {
storageMock.readdir.mockResolvedValue([]);
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
configMock.load.mockResolvedValue([{ key: SystemConfigKey.FFMPEG_ACCEL, value: TranscodeHWAccel.QSV }]);
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toEqual(false);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
expect(mediaMock.transcode).not.toHaveBeenCalled();
});
@ -1777,12 +1778,12 @@ describe(MediaService.name, () => {
);
});
it('should return false for vaapi if no hw devices', async () => {
it('should fail for vaapi if no hw devices', async () => {
storageMock.readdir.mockResolvedValue([]);
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
configMock.load.mockResolvedValue([{ key: SystemConfigKey.FFMPEG_ACCEL, value: TranscodeHWAccel.VAAPI }]);
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toEqual(false);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
expect(mediaMock.transcode).not.toHaveBeenCalled();
});

View File

@ -24,6 +24,7 @@ import {
IStorageRepository,
ISystemConfigRepository,
JobItem,
JobStatus,
VideoCodecHWConfig,
VideoStreamInfo,
WithoutProperty,
@ -70,7 +71,7 @@ export class MediaService {
);
}
async handleQueueGenerateThumbnails({ force }: IBaseJob) {
async handleQueueGenerateThumbnails({ force }: IBaseJob): Promise<JobStatus> {
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination)
@ -118,10 +119,10 @@ export class MediaService {
await this.jobRepository.queueAll(jobs);
return true;
return JobStatus.SUCCESS;
}
async handleQueueMigration() {
async handleQueueMigration(): Promise<JobStatus> {
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getAll(pagination),
);
@ -148,31 +149,31 @@ export class MediaService {
);
}
return true;
return JobStatus.SUCCESS;
}
async handleAssetMigration({ id }: IEntityJob) {
async handleAssetMigration({ id }: IEntityJob): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return false;
return JobStatus.FAILED;
}
await this.storageCore.moveAssetFile(asset, AssetPathType.JPEG_THUMBNAIL);
await this.storageCore.moveAssetFile(asset, AssetPathType.WEBP_THUMBNAIL);
await this.storageCore.moveAssetFile(asset, AssetPathType.ENCODED_VIDEO);
return true;
return JobStatus.SUCCESS;
}
async handleGenerateJpegThumbnail({ id }: IEntityJob) {
async handleGenerateJpegThumbnail({ id }: IEntityJob): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id], { exifInfo: true });
if (!asset) {
return false;
return JobStatus.FAILED;
}
const resizePath = await this.generateThumbnail(asset, 'jpeg');
await this.assetRepository.save({ id: asset.id, resizePath });
return true;
return JobStatus.SUCCESS;
}
private async generateThumbnail(asset: AssetEntity, format: 'jpeg' | 'webp') {
@ -214,30 +215,30 @@ export class MediaService {
return path;
}
async handleGenerateWebpThumbnail({ id }: IEntityJob) {
async handleGenerateWebpThumbnail({ id }: IEntityJob): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id], { exifInfo: true });
if (!asset) {
return false;
return JobStatus.FAILED;
}
const webpPath = await this.generateThumbnail(asset, 'webp');
await this.assetRepository.save({ id: asset.id, webpPath });
return true;
return JobStatus.SUCCESS;
}
async handleGenerateThumbhashThumbnail({ id }: IEntityJob): Promise<boolean> {
async handleGenerateThumbhashThumbnail({ id }: IEntityJob): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset?.resizePath) {
return false;
return JobStatus.FAILED;
}
const thumbhash = await this.mediaRepository.generateThumbhash(asset.resizePath);
await this.assetRepository.save({ id: asset.id, thumbhash });
return true;
return JobStatus.SUCCESS;
}
async handleQueueVideoConversion(job: IBaseJob) {
async handleQueueVideoConversion(job: IBaseJob): Promise<JobStatus> {
const { force } = job;
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
@ -252,13 +253,13 @@ export class MediaService {
);
}
return true;
return JobStatus.SUCCESS;
}
async handleVideoConversion({ id }: IEntityJob) {
async handleVideoConversion({ id }: IEntityJob): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset || asset.type !== AssetType.VIDEO) {
return false;
return JobStatus.FAILED;
}
const input = asset.originalPath;
@ -270,12 +271,12 @@ export class MediaService {
const mainAudioStream = this.getMainStream(audioStreams);
const containerExtension = format.formatName;
if (!mainVideoStream || !containerExtension) {
return false;
return JobStatus.FAILED;
}
if (!mainVideoStream.height || !mainVideoStream.width) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
return false;
return JobStatus.FAILED;
}
const { ffmpeg: config } = await this.configCore.getConfig();
@ -288,7 +289,7 @@ export class MediaService {
await this.assetRepository.save({ id: asset.id, encodedVideoPath: null });
}
return true;
return JobStatus.SKIPPED;
}
let transcodeOptions;
@ -298,7 +299,7 @@ export class MediaService {
);
} catch (error) {
this.logger.error(`An error occurred while configuring transcoding options: ${error}`);
return false;
return JobStatus.FAILED;
}
this.logger.log(`Started encoding video ${asset.id} ${JSON.stringify(transcodeOptions)}`);
@ -322,7 +323,7 @@ export class MediaService {
await this.assetRepository.save({ id: asset.id, encodedVideoPath: output });
return true;
return JobStatus.SUCCESS;
}
private getMainStream<T extends VideoStreamInfo | AudioStreamInfo>(streams: T[]): T {

View File

@ -37,6 +37,7 @@ import {
IStorageRepository,
ISystemConfigRepository,
ImmichTags,
JobStatus,
WithoutProperty,
} from '../repositories';
import { MetadataService, Orientation } from './metadata.service';
@ -113,7 +114,7 @@ describe(MetadataService.name, () => {
describe('handleLivePhotoLinking', () => {
it('should handle an asset that could not be found', async () => {
await expect(sut.handleLivePhotoLinking({ id: assetStub.image.id })).resolves.toBe(false);
await expect(sut.handleLivePhotoLinking({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
expect(assetMock.findLivePhotoMatch).not.toHaveBeenCalled();
expect(assetMock.save).not.toHaveBeenCalled();
@ -123,7 +124,7 @@ describe(MetadataService.name, () => {
it('should handle an asset without exif info', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.image, exifInfo: undefined }]);
await expect(sut.handleLivePhotoLinking({ id: assetStub.image.id })).resolves.toBe(false);
await expect(sut.handleLivePhotoLinking({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
expect(assetMock.findLivePhotoMatch).not.toHaveBeenCalled();
expect(assetMock.save).not.toHaveBeenCalled();
@ -133,7 +134,7 @@ describe(MetadataService.name, () => {
it('should handle livePhotoCID not set', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.image }]);
await expect(sut.handleLivePhotoLinking({ id: assetStub.image.id })).resolves.toBe(true);
await expect(sut.handleLivePhotoLinking({ id: assetStub.image.id })).resolves.toBe(JobStatus.SKIPPED);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
expect(assetMock.findLivePhotoMatch).not.toHaveBeenCalled();
expect(assetMock.save).not.toHaveBeenCalled();
@ -148,7 +149,9 @@ describe(MetadataService.name, () => {
},
]);
await expect(sut.handleLivePhotoLinking({ id: assetStub.livePhotoMotionAsset.id })).resolves.toBe(true);
await expect(sut.handleLivePhotoLinking({ id: assetStub.livePhotoMotionAsset.id })).resolves.toBe(
JobStatus.SKIPPED,
);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id], { exifInfo: true });
expect(assetMock.findLivePhotoMatch).toHaveBeenCalledWith({
livePhotoCID: assetStub.livePhotoStillAsset.id,
@ -169,7 +172,9 @@ describe(MetadataService.name, () => {
]);
assetMock.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
await expect(sut.handleLivePhotoLinking({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(true);
await expect(sut.handleLivePhotoLinking({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoStillAsset.id], { exifInfo: true });
expect(assetMock.findLivePhotoMatch).toHaveBeenCalledWith({
livePhotoCID: assetStub.livePhotoMotionAsset.id,
@ -194,7 +199,9 @@ describe(MetadataService.name, () => {
]);
assetMock.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
await expect(sut.handleLivePhotoLinking({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(true);
await expect(sut.handleLivePhotoLinking({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
expect(communicationMock.send).toHaveBeenCalledWith(
ClientEvent.ASSET_HIDDEN,
assetStub.livePhotoMotionAsset.ownerId,
@ -207,7 +214,7 @@ describe(MetadataService.name, () => {
it('should queue metadata extraction for all assets without exif values', async () => {
assetMock.getWithout.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
await expect(sut.handleQueueMetadataExtraction({ force: false })).resolves.toBe(true);
await expect(sut.handleQueueMetadataExtraction({ force: false })).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.getWithout).toHaveBeenCalled();
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
@ -220,7 +227,7 @@ describe(MetadataService.name, () => {
it('should queue metadata extraction for all assets', async () => {
assetMock.getAll.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
await expect(sut.handleQueueMetadataExtraction({ force: true })).resolves.toBe(true);
await expect(sut.handleQueueMetadataExtraction({ force: true })).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.getAll).toHaveBeenCalled();
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
@ -237,7 +244,7 @@ describe(MetadataService.name, () => {
});
it('should handle an asset that could not be found', async () => {
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(false);
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
expect(assetMock.upsertExif).not.toHaveBeenCalled();
@ -630,19 +637,13 @@ describe(MetadataService.name, () => {
describe('handleSidecarSync', () => {
it('should do nothing if asset could not be found', async () => {
assetMock.getByIds.mockResolvedValue([]);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(false);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
expect(assetMock.save).not.toHaveBeenCalled();
});
it('should do nothing if asset has no sidecar path', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(false);
expect(assetMock.save).not.toHaveBeenCalled();
});
it('should do nothing if asset has no sidecar path', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(false);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
expect(assetMock.save).not.toHaveBeenCalled();
});
@ -650,7 +651,7 @@ describe(MetadataService.name, () => {
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
storageMock.checkFileExists.mockResolvedValue(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS);
expect(storageMock.checkFileExists).toHaveBeenCalledWith(`${assetStub.sidecar.originalPath}.xmp`, constants.R_OK);
expect(assetMock.save).toHaveBeenCalledWith({
id: assetStub.sidecar.id,
@ -663,7 +664,7 @@ describe(MetadataService.name, () => {
storageMock.checkFileExists.mockResolvedValueOnce(false);
storageMock.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecarWithoutExt.id })).resolves.toBe(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecarWithoutExt.id })).resolves.toBe(JobStatus.SUCCESS);
expect(storageMock.checkFileExists).toHaveBeenNthCalledWith(
2,
assetStub.sidecarWithoutExt.sidecarPath,
@ -680,7 +681,7 @@ describe(MetadataService.name, () => {
storageMock.checkFileExists.mockResolvedValueOnce(true);
storageMock.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS);
expect(storageMock.checkFileExists).toHaveBeenNthCalledWith(1, assetStub.sidecar.sidecarPath, constants.R_OK);
expect(storageMock.checkFileExists).toHaveBeenNthCalledWith(
2,
@ -697,7 +698,7 @@ describe(MetadataService.name, () => {
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
storageMock.checkFileExists.mockResolvedValue(false);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS);
expect(storageMock.checkFileExists).toHaveBeenCalledWith(`${assetStub.sidecar.originalPath}.xmp`, constants.R_OK);
expect(assetMock.save).toHaveBeenCalledWith({
id: assetStub.sidecar.id,
@ -754,13 +755,13 @@ describe(MetadataService.name, () => {
describe('handleSidecarWrite', () => {
it('should skip assets that do not exist anymore', async () => {
assetMock.getByIds.mockResolvedValue([]);
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(false);
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.FAILED);
expect(metadataMock.writeTags).not.toHaveBeenCalled();
});
it('should skip jobs with not metadata', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
await expect(sut.handleSidecarWrite({ id: assetStub.sidecar.id })).resolves.toBe(true);
await expect(sut.handleSidecarWrite({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SKIPPED);
expect(metadataMock.writeTags).not.toHaveBeenCalled();
});
@ -778,7 +779,7 @@ describe(MetadataService.name, () => {
longitude: gps,
dateTimeOriginal: date,
}),
).resolves.toBe(true);
).resolves.toBe(JobStatus.SUCCESS);
expect(metadataMock.writeTags).toHaveBeenCalledWith(assetStub.sidecar.sidecarPath, {
ImageDescription: description,
CreationDate: date,

View File

@ -26,6 +26,7 @@ import {
IStorageRepository,
ISystemConfigRepository,
ImmichTags,
JobStatus,
WithoutProperty,
} from '../repositories';
import { StorageCore } from '../storage';
@ -151,15 +152,15 @@ export class MetadataService {
await this.repository.teardown();
}
async handleLivePhotoLinking(job: IEntityJob) {
async handleLivePhotoLinking(job: IEntityJob): Promise<JobStatus> {
const { id } = job;
const [asset] = await this.assetRepository.getByIds([id], { exifInfo: true });
if (!asset?.exifInfo) {
return false;
return JobStatus.FAILED;
}
if (!asset.exifInfo.livePhotoCID) {
return true;
return JobStatus.SKIPPED;
}
const otherType = asset.type === AssetType.VIDEO ? AssetType.IMAGE : AssetType.VIDEO;
@ -171,7 +172,7 @@ export class MetadataService {
});
if (!match) {
return true;
return JobStatus.SKIPPED;
}
const [photoAsset, motionAsset] = asset.type === AssetType.IMAGE ? [asset, match] : [match, asset];
@ -183,10 +184,10 @@ export class MetadataService {
// Notify clients to hide the linked live photo asset
this.communicationRepository.send(ClientEvent.ASSET_HIDDEN, motionAsset.ownerId, motionAsset.id);
return true;
return JobStatus.SUCCESS;
}
async handleQueueMetadataExtraction(job: IBaseJob) {
async handleQueueMetadataExtraction(job: IBaseJob): Promise<JobStatus> {
const { force } = job;
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
@ -200,13 +201,13 @@ export class MetadataService {
);
}
return true;
return JobStatus.SUCCESS;
}
async handleMetadataExtraction({ id }: IEntityJob) {
async handleMetadataExtraction({ id }: IEntityJob): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return false;
return JobStatus.FAILED;
}
const { exifData, tags } = await this.exifData(asset);
@ -260,10 +261,10 @@ export class MetadataService {
metadataExtractedAt: new Date(),
});
return true;
return JobStatus.SUCCESS;
}
async handleQueueSidecar(job: IBaseJob) {
async handleQueueSidecar(job: IBaseJob): Promise<JobStatus> {
const { force } = job;
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
@ -280,22 +281,22 @@ export class MetadataService {
);
}
return true;
return JobStatus.SUCCESS;
}
handleSidecarSync({ id }: IEntityJob) {
handleSidecarSync({ id }: IEntityJob): Promise<JobStatus> {
return this.processSidecar(id, true);
}
handleSidecarDiscovery({ id }: IEntityJob) {
handleSidecarDiscovery({ id }: IEntityJob): Promise<JobStatus> {
return this.processSidecar(id, false);
}
async handleSidecarWrite(job: ISidecarWriteJob) {
async handleSidecarWrite(job: ISidecarWriteJob): Promise<JobStatus> {
const { id, description, dateTimeOriginal, latitude, longitude } = job;
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return false;
return JobStatus.FAILED;
}
const sidecarPath = asset.sidecarPath || `${asset.originalPath}.xmp`;
@ -310,7 +311,7 @@ export class MetadataService {
);
if (Object.keys(exif).length === 0) {
return true;
return JobStatus.SKIPPED;
}
await this.repository.writeTags(sidecarPath, exif);
@ -319,7 +320,7 @@ export class MetadataService {
await this.assetRepository.save({ id, sidecarPath });
}
return true;
return JobStatus.SUCCESS;
}
private async applyReverseGeocoding(asset: AssetEntity, exifData: ExifEntityWithoutGeocodeAndTypeOrm) {
@ -552,19 +553,19 @@ export class MetadataService {
return Duration.fromObject({ seconds: _seconds }).toFormat('hh:mm:ss.SSS');
}
private async processSidecar(id: string, isSync: boolean) {
private async processSidecar(id: string, isSync: boolean): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return false;
return JobStatus.FAILED;
}
if (isSync && !asset.sidecarPath) {
return false;
return JobStatus.FAILED;
}
if (!isSync && (!asset.isVisible || asset.sidecarPath)) {
return false;
return JobStatus.FAILED;
}
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
@ -587,11 +588,11 @@ export class MetadataService {
if (sidecarPath) {
await this.assetRepository.save({ id: asset.id, sidecarPath });
return true;
return JobStatus.SUCCESS;
}
if (!isSync) {
return false;
return JobStatus.FAILED;
}
this.logger.debug(
@ -599,6 +600,6 @@ export class MetadataService {
);
await this.assetRepository.save({ id: asset.id, sidecarPath: null });
return true;
return JobStatus.SUCCESS;
}
}

View File

@ -34,6 +34,7 @@ import {
ISearchRepository,
IStorageRepository,
ISystemConfigRepository,
JobStatus,
WithoutProperty,
} from '../repositories';
import { PersonResponseDto, mapFaces, mapPerson } from './person.dto';
@ -357,7 +358,7 @@ describe(PersonService.name, () => {
describe('handlePersonMigration', () => {
it('should not move person files', async () => {
personMock.getById.mockResolvedValue(null);
await expect(sut.handlePersonMigration(personStub.noName)).resolves.toStrictEqual(false);
await expect(sut.handlePersonMigration(personStub.noName)).resolves.toBe(JobStatus.FAILED);
});
});
@ -454,10 +455,10 @@ describe(PersonService.name, () => {
});
describe('handleQueueDetectFaces', () => {
it('should return if machine learning is disabled', async () => {
it('should skip if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleQueueDetectFaces({})).resolves.toBe(true);
await expect(sut.handleQueueDetectFaces({})).resolves.toBe(JobStatus.SKIPPED);
expect(jobMock.queue).not.toHaveBeenCalled();
expect(jobMock.queueAll).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
@ -530,19 +531,19 @@ describe(PersonService.name, () => {
});
describe('handleQueueRecognizeFaces', () => {
it('should return if machine learning is disabled', async () => {
it('should skip if machine learning is disabled', async () => {
jobMock.getJobCounts.mockResolvedValue({ active: 1, waiting: 0, paused: 0, completed: 0, failed: 0, delayed: 0 });
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(true);
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED);
expect(jobMock.queueAll).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
});
it('should return if recognition jobs are already queued', async () => {
it('should skip if recognition jobs are already queued', async () => {
jobMock.getJobCounts.mockResolvedValue({ active: 1, waiting: 1, paused: 0, completed: 0, failed: 0, delayed: 0 });
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(true);
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED);
expect(jobMock.queueAll).not.toHaveBeenCalled();
});
@ -612,10 +613,10 @@ describe(PersonService.name, () => {
});
describe('handleDetectFaces', () => {
it('should return if machine learning is disabled', async () => {
it('should skip if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleDetectFaces({ id: 'foo' })).resolves.toBe(true);
await expect(sut.handleDetectFaces({ id: 'foo' })).resolves.toBe(JobStatus.SKIPPED);
expect(assetMock.getByIds).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
});
@ -701,31 +702,31 @@ describe(PersonService.name, () => {
});
describe('handleRecognizeFaces', () => {
it('should return false if face does not exist', async () => {
it('should fail if face does not exist', async () => {
personMock.getFaceByIdWithAssets.mockResolvedValue(null);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(false);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.FAILED);
expect(personMock.reassignFaces).not.toHaveBeenCalled();
expect(personMock.create).not.toHaveBeenCalled();
expect(personMock.createFaces).not.toHaveBeenCalled();
});
it('should return false if face does not have asset', async () => {
it('should fail if face does not have asset', async () => {
const face = { ...faceStub.face1, asset: null } as AssetFaceEntity & { asset: null };
personMock.getFaceByIdWithAssets.mockResolvedValue(face);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(false);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.FAILED);
expect(personMock.reassignFaces).not.toHaveBeenCalled();
expect(personMock.create).not.toHaveBeenCalled();
expect(personMock.createFaces).not.toHaveBeenCalled();
});
it('should return true if face already has an assigned person', async () => {
it('should skip if face already has an assigned person', async () => {
personMock.getFaceByIdWithAssets.mockResolvedValue(faceStub.face1);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(true);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.SKIPPED);
expect(personMock.reassignFaces).not.toHaveBeenCalled();
expect(personMock.create).not.toHaveBeenCalled();
@ -852,10 +853,10 @@ describe(PersonService.name, () => {
});
describe('handleGeneratePersonThumbnail', () => {
it('should return if machine learning is disabled', async () => {
it('should skip if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(true);
await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(JobStatus.SKIPPED);
expect(assetMock.getByIds).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
});

View File

@ -24,6 +24,7 @@ import {
IStorageRepository,
ISystemConfigRepository,
JobItem,
JobStatus,
UpdateFacesData,
WithoutProperty,
} from '../repositories';
@ -265,16 +266,16 @@ export class PersonService {
}
}
async handlePersonCleanup() {
async handlePersonCleanup(): Promise<JobStatus> {
const people = await this.repository.getAllWithoutFaces();
await this.delete(people);
return true;
return JobStatus.SUCCESS;
}
async handleQueueDetectFaces({ force }: IBaseJob) {
async handleQueueDetectFaces({ force }: IBaseJob): Promise<JobStatus> {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
return JobStatus.SKIPPED;
}
if (force) {
@ -294,13 +295,13 @@ export class PersonService {
);
}
return true;
return JobStatus.SUCCESS;
}
async handleDetectFaces({ id }: IEntityJob) {
async handleDetectFaces({ id }: IEntityJob): Promise<JobStatus> {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
return JobStatus.SKIPPED;
}
const relations = {
@ -311,7 +312,7 @@ export class PersonService {
};
const [asset] = await this.assetRepository.getByIds([id], relations);
if (!asset || !asset.resizePath || asset.faces?.length > 0) {
return false;
return JobStatus.FAILED;
}
const faces = await this.machineLearningRepository.detectFaces(
@ -346,13 +347,13 @@ export class PersonService {
facesRecognizedAt: new Date(),
});
return true;
return JobStatus.SUCCESS;
}
async handleQueueRecognizeFaces({ force }: IBaseJob) {
async handleQueueRecognizeFaces({ force }: IBaseJob): Promise<JobStatus> {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
return JobStatus.SKIPPED;
}
await this.jobRepository.waitForQueueCompletion(QueueName.THUMBNAIL_GENERATION, QueueName.FACE_DETECTION);
@ -364,7 +365,7 @@ export class PersonService {
this.logger.debug(
`Skipping facial recognition queueing because ${waiting} job${waiting > 1 ? 's are' : ' is'} already queued`,
);
return true;
return JobStatus.SKIPPED;
}
const facePagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
@ -377,13 +378,13 @@ export class PersonService {
);
}
return true;
return JobStatus.SUCCESS;
}
async handleRecognizeFaces({ id, deferred }: IDeferrableJob) {
async handleRecognizeFaces({ id, deferred }: IDeferrableJob): Promise<JobStatus> {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
return JobStatus.SKIPPED;
}
const face = await this.repository.getFaceByIdWithAssets(
@ -393,12 +394,12 @@ export class PersonService {
);
if (!face || !face.asset) {
this.logger.warn(`Face ${id} not found`);
return false;
return JobStatus.FAILED;
}
if (face.personId) {
this.logger.debug(`Face ${id} already has a person assigned`);
return true;
return JobStatus.SKIPPED;
}
const matches = await this.smartInfoRepository.searchFaces({
@ -411,7 +412,7 @@ export class PersonService {
// `matches` also includes the face itself
if (machineLearning.facialRecognition.minFaces > 1 && matches.length <= 1) {
this.logger.debug(`Face ${id} only matched the face itself, skipping`);
return true;
return JobStatus.SKIPPED;
}
this.logger.debug(`Face ${id} has ${matches.length} matches`);
@ -420,7 +421,7 @@ export class PersonService {
if (!isCore && !deferred) {
this.logger.debug(`Deferring non-core face ${id} for later processing`);
await this.jobRepository.queue({ name: JobName.FACIAL_RECOGNITION, data: { id, deferred: true } });
return true;
return JobStatus.SKIPPED;
}
let personId = matches.find((match) => match.face.personId)?.face.personId;
@ -450,34 +451,34 @@ export class PersonService {
await this.repository.reassignFaces({ faceIds: [id], newPersonId: personId });
}
return true;
return JobStatus.SUCCESS;
}
async handlePersonMigration({ id }: IEntityJob) {
async handlePersonMigration({ id }: IEntityJob): Promise<JobStatus> {
const person = await this.repository.getById(id);
if (!person) {
return false;
return JobStatus.FAILED;
}
await this.storageCore.movePersonFile(person, PersonPathType.FACE);
return true;
return JobStatus.SUCCESS;
}
async handleGeneratePersonThumbnail(data: IEntityJob) {
async handleGeneratePersonThumbnail(data: IEntityJob): Promise<JobStatus> {
const { machineLearning, thumbnail } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
return JobStatus.SKIPPED;
}
const person = await this.repository.getById(data.id);
if (!person?.faceAssetId) {
return false;
return JobStatus.FAILED;
}
const face = await this.repository.getFaceByIdWithAssets(person.faceAssetId);
if (face === null) {
return false;
return JobStatus.FAILED;
}
const {
@ -492,7 +493,7 @@ export class PersonService {
const [asset] = await this.assetRepository.getByIds([assetId]);
if (!asset?.resizePath) {
return false;
return JobStatus.FAILED;
}
this.logger.verbose(`Cropping face for person: ${person.id}`);
const thumbnailPath = StorageCore.getPersonThumbnailPath(person);
@ -533,7 +534,7 @@ export class PersonService {
await this.mediaRepository.resize(croppedOutput, thumbnailPath, thumbnailOptions);
await this.repository.update({ id: person.id, thumbnailPath });
return true;
return JobStatus.SUCCESS;
}
async mergePerson(auth: AuthDto, id: string, dto: MergePersonDto): Promise<BulkIdResponseDto[]> {

View File

@ -94,7 +94,13 @@ export type JobItem =
| { name: JobName.LIBRARY_QUEUE_SCAN_ALL; data: IBaseJob }
| { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob };
export type JobHandler<T = any> = (data: T) => boolean | Promise<boolean>;
export enum JobStatus {
SUCCESS = 'success',
FAILED = 'failed',
SKIPPED = 'skipped',
}
export type JobHandler<T = any> = (data: T) => Promise<JobStatus>;
export type JobItemHandler = (item: JobItem) => Promise<void>;
export const IJobRepository = 'IJobRepository';

View File

@ -10,6 +10,7 @@ import {
IMachineLearningRepository,
ISearchRepository,
ISystemConfigRepository,
JobStatus,
WithoutProperty,
} from '../repositories';
import { SystemConfigCore } from '../system-config';
@ -44,10 +45,10 @@ export class SmartInfoService {
await this.jobRepository.resume(QueueName.SMART_SEARCH);
}
async handleQueueEncodeClip({ force }: IBaseJob) {
async handleQueueEncodeClip({ force }: IBaseJob): Promise<JobStatus> {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.clip.enabled) {
return true;
return JobStatus.SKIPPED;
}
if (force) {
@ -66,22 +67,22 @@ export class SmartInfoService {
);
}
return true;
return JobStatus.SUCCESS;
}
async handleEncodeClip({ id }: IEntityJob) {
async handleEncodeClip({ id }: IEntityJob): Promise<JobStatus> {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.clip.enabled) {
return true;
return JobStatus.SKIPPED;
}
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return false;
return JobStatus.FAILED;
}
if (!asset.resizePath) {
return false;
return JobStatus.FAILED;
}
const clipEmbedding = await this.machineLearning.encodeImage(
@ -97,6 +98,6 @@ export class SmartInfoService {
await this.repository.upsert({ assetId: asset.id }, clipEmbedding);
return true;
return JobStatus.SUCCESS;
}
}

View File

@ -8,6 +8,7 @@ import {
IStorageRepository,
ISystemConfigRepository,
IUserRepository,
JobStatus,
StorageTemplateService,
defaults,
} from '@app/domain';
@ -76,7 +77,7 @@ describe(StorageTemplateService.name, () => {
describe('handleMigrationSingle', () => {
it('should skip when storage template is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.STORAGE_TEMPLATE_ENABLED, value: false }]);
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(true);
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SKIPPED);
expect(assetMock.getByIds).not.toHaveBeenCalled();
expect(storageMock.checkFileExists).not.toHaveBeenCalled();
expect(storageMock.rename).not.toHaveBeenCalled();
@ -138,7 +139,9 @@ describe(StorageTemplateService.name, () => {
newPath: newMotionPicturePath,
});
await expect(sut.handleMigrationSingle({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(true);
await expect(sut.handleMigrationSingle({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoStillAsset.id], { exifInfo: true });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id], { exifInfo: true });
@ -190,7 +193,7 @@ describe(StorageTemplateService.name, () => {
newPath,
});
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(true);
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(3);
@ -247,7 +250,7 @@ describe(StorageTemplateService.name, () => {
newPath,
});
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(true);
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(3);
@ -298,7 +301,7 @@ describe(StorageTemplateService.name, () => {
newPath,
});
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(true);
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(1);
@ -364,7 +367,7 @@ describe(StorageTemplateService.name, () => {
newPath,
});
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(true);
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(3);

View File

@ -18,6 +18,7 @@ import {
IStorageRepository,
ISystemConfigRepository,
IUserRepository,
JobStatus,
} from '../repositories';
import { StorageCore, StorageFolder } from '../storage';
import {
@ -85,16 +86,16 @@ export class StorageTemplateService {
);
}
async handleMigrationSingle({ id }: IEntityJob) {
async handleMigrationSingle({ id }: IEntityJob): Promise<JobStatus> {
const config = await this.configCore.getConfig();
const storageTemplateEnabled = config.storageTemplate.enabled;
if (!storageTemplateEnabled) {
return true;
return JobStatus.SKIPPED;
}
const [asset] = await this.assetRepository.getByIds([id], { exifInfo: true });
if (!asset) {
return false;
return JobStatus.FAILED;
}
const user = await this.userRepository.get(asset.ownerId, {});
@ -106,21 +107,21 @@ export class StorageTemplateService {
if (asset.livePhotoVideoId) {
const [livePhotoVideo] = await this.assetRepository.getByIds([asset.livePhotoVideoId], { exifInfo: true });
if (!livePhotoVideo) {
return false;
return JobStatus.FAILED;
}
const motionFilename = getLivePhotoMotionFilename(filename, livePhotoVideo.originalPath);
await this.moveAsset(livePhotoVideo, { storageLabel, filename: motionFilename });
}
return true;
return JobStatus.SUCCESS;
}
async handleMigration() {
async handleMigration(): Promise<JobStatus> {
this.logger.log('Starting storage template migration');
const { storageTemplate } = await this.configCore.getConfig();
const { enabled } = storageTemplate;
if (!enabled) {
this.logger.log('Storage template migration disabled, skipping');
return true;
return JobStatus.SKIPPED;
}
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getAll(pagination, { withExif: true }),
@ -142,7 +143,7 @@ export class StorageTemplateService {
this.logger.log('Finished storage template migration');
return true;
return JobStatus.SUCCESS;
}
async moveAsset(asset: AssetEntity, metadata: MoveAssetMetadata) {

View File

@ -1,7 +1,7 @@
import { ImmichLogger } from '@app/infra/logger';
import { Inject, Injectable } from '@nestjs/common';
import { IDeleteFilesJob } from '../job';
import { IStorageRepository } from '../repositories';
import { IStorageRepository, JobStatus } from '../repositories';
import { StorageCore, StorageFolder } from './storage.core';
@Injectable()
@ -31,6 +31,6 @@ export class StorageService {
}
}
return true;
return JobStatus.SUCCESS;
}
}

View File

@ -14,6 +14,7 @@ import {
IStorageRepository,
ISystemConfigRepository,
IUserRepository,
JobStatus,
UserFindOptions,
} from '../repositories';
import { StorageCore, StorageFolder } from '../storage';
@ -143,12 +144,12 @@ export class UserService {
return { admin, password, provided: !!providedPassword };
}
async handleUserSyncUsage() {
async handleUserSyncUsage(): Promise<JobStatus> {
await this.userRepository.syncUsage();
return true;
return JobStatus.SUCCESS;
}
async handleUserDeleteCheck() {
async handleUserDeleteCheck(): Promise<JobStatus> {
const users = await this.userRepository.getDeletedUsers();
const config = await this.configCore.getConfig();
await this.jobRepository.queueAll(
@ -158,20 +159,20 @@ export class UserService {
: [],
),
);
return true;
return JobStatus.SUCCESS;
}
async handleUserDelete({ id, force }: IEntityJob) {
async handleUserDelete({ id, force }: IEntityJob): Promise<JobStatus> {
const config = await this.configCore.getConfig();
const user = await this.userRepository.get(id, { withDeleted: true });
if (!user) {
return false;
return JobStatus.FAILED;
}
// just for extra protection here
if (!force && !this.isReadyForDeletion(user, config.user.deleteDelay)) {
this.logger.warn(`Skipped user that was not ready for deletion: id=${id}`);
return false;
return JobStatus.SKIPPED;
}
this.logger.log(`Deleting user: ${user.id}`);
@ -193,7 +194,7 @@ export class UserService {
await this.albumRepository.deleteAll(user.id);
await this.userRepository.delete(user, true);
return true;
return JobStatus.SUCCESS;
}
private isReadyForDeletion(user: UserEntity, deleteDelay: number): boolean {