You've already forked immich
							
							
				mirror of
				https://github.com/immich-app/immich.git
				synced 2025-10-31 00:18:28 +02:00 
			
		
		
		
	refactor(server): person thumbnail job (#4233)
* refactor(server): person thumbnail job * fix(server): set feature photo
This commit is contained in:
		| @@ -26,45 +26,19 @@ import { FacialRecognitionService } from './facial-recognition.services'; | ||||
|  | ||||
| const croppedFace = Buffer.from('Cropped Face'); | ||||
|  | ||||
| const face = { | ||||
|   start: { | ||||
|     assetId: 'asset-1', | ||||
|     personId: 'person-1', | ||||
|     boundingBox: { | ||||
|       x1: 5, | ||||
|       y1: 5, | ||||
|       x2: 505, | ||||
|       y2: 505, | ||||
|     }, | ||||
|     imageHeight: 1000, | ||||
|     imageWidth: 1000, | ||||
|   }, | ||||
|   middle: { | ||||
|     assetId: 'asset-1', | ||||
|     personId: 'person-1', | ||||
|     boundingBox: { | ||||
|       x1: 100, | ||||
|       y1: 100, | ||||
|       x2: 200, | ||||
|       y2: 200, | ||||
|     }, | ||||
|     imageHeight: 500, | ||||
|     imageWidth: 400, | ||||
|     embedding: [1, 2, 3, 4], | ||||
|     score: 0.2, | ||||
|   }, | ||||
|   end: { | ||||
|     assetId: 'asset-1', | ||||
|     personId: 'person-1', | ||||
|     boundingBox: { | ||||
|       x1: 300, | ||||
|       y1: 300, | ||||
|       x2: 495, | ||||
|       y2: 495, | ||||
|     }, | ||||
|     imageHeight: 500, | ||||
|     imageWidth: 500, | ||||
| const detectFaceMock = { | ||||
|   assetId: 'asset-1', | ||||
|   personId: 'person-1', | ||||
|   boundingBox: { | ||||
|     x1: 100, | ||||
|     y1: 100, | ||||
|     x2: 200, | ||||
|     y2: 200, | ||||
|   }, | ||||
|   imageHeight: 500, | ||||
|   imageWidth: 400, | ||||
|   embedding: [1, 2, 3, 4], | ||||
|   score: 0.2, | ||||
| }; | ||||
|  | ||||
| const faceSearch = { | ||||
| @@ -214,7 +188,7 @@ describe(FacialRecognitionService.name, () => { | ||||
|     }); | ||||
|  | ||||
|     it('should match existing people', async () => { | ||||
|       machineLearningMock.detectFaces.mockResolvedValue([face.middle]); | ||||
|       machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]); | ||||
|       searchMock.searchFaces.mockResolvedValue(faceSearch.oneMatch); | ||||
|       assetMock.getByIds.mockResolvedValue([assetStub.image]); | ||||
|       await sut.handleRecognizeFaces({ id: assetStub.image.id }); | ||||
| @@ -233,7 +207,7 @@ describe(FacialRecognitionService.name, () => { | ||||
|     }); | ||||
|  | ||||
|     it('should create a new person', async () => { | ||||
|       machineLearningMock.detectFaces.mockResolvedValue([face.middle]); | ||||
|       machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]); | ||||
|       searchMock.searchFaces.mockResolvedValue(faceSearch.oneRemoteMatch); | ||||
|       personMock.create.mockResolvedValue(personStub.noName); | ||||
|       assetMock.getByIds.mockResolvedValue([assetStub.image]); | ||||
| @@ -253,60 +227,56 @@ describe(FacialRecognitionService.name, () => { | ||||
|         imageWidth: 400, | ||||
|       }); | ||||
|       expect(jobMock.queue.mock.calls).toEqual([ | ||||
|         [ | ||||
|           { | ||||
|             name: JobName.GENERATE_FACE_THUMBNAIL, | ||||
|             data: { | ||||
|               assetId: 'asset-1', | ||||
|               personId: 'person-1', | ||||
|               boundingBox: { | ||||
|                 x1: 100, | ||||
|                 y1: 100, | ||||
|                 x2: 200, | ||||
|                 y2: 200, | ||||
|               }, | ||||
|               imageHeight: 500, | ||||
|               imageWidth: 400, | ||||
|               score: 0.2, | ||||
|             }, | ||||
|           }, | ||||
|         ], | ||||
|         [{ name: JobName.SEARCH_INDEX_FACE, data: { personId: 'person-1', assetId: 'asset-id' } }], | ||||
|         [{ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: 'person-1' } }], | ||||
|       ]); | ||||
|     }); | ||||
|   }); | ||||
|  | ||||
|   describe('handleGenerateFaceThumbnail', () => { | ||||
|   describe('handleGeneratePersonThumbnail', () => { | ||||
|     it('should return if machine learning is disabled', async () => { | ||||
|       configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]); | ||||
|  | ||||
|       await expect(sut.handleGenerateFaceThumbnail(face.middle)).resolves.toBe(true); | ||||
|       await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(true); | ||||
|       expect(assetMock.getByIds).not.toHaveBeenCalled(); | ||||
|       expect(configMock.load).toHaveBeenCalled(); | ||||
|     }); | ||||
|  | ||||
|     it('should skip an asset not found', async () => { | ||||
|       assetMock.getByIds.mockResolvedValue([]); | ||||
|  | ||||
|       await sut.handleGenerateFaceThumbnail(face.middle); | ||||
|  | ||||
|     it('should skip a person not found', async () => { | ||||
|       personMock.getById.mockResolvedValue(null); | ||||
|       await sut.handleGeneratePersonThumbnail({ id: 'person-1' }); | ||||
|       expect(mediaMock.crop).not.toHaveBeenCalled(); | ||||
|     }); | ||||
|  | ||||
|     it('should skip an asset without a thumbnail', async () => { | ||||
|     it('should skip a person without a face asset id', async () => { | ||||
|       personMock.getById.mockResolvedValue(personStub.noThumbnail); | ||||
|       await sut.handleGeneratePersonThumbnail({ id: 'person-1' }); | ||||
|       expect(mediaMock.crop).not.toHaveBeenCalled(); | ||||
|     }); | ||||
|  | ||||
|     it('should skip an person with a face asset id not found', async () => { | ||||
|       personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId }); | ||||
|       faceMock.getByIds.mockResolvedValue([faceStub.face1]); | ||||
|       await sut.handleGeneratePersonThumbnail({ id: 'person-1' }); | ||||
|       expect(mediaMock.crop).not.toHaveBeenCalled(); | ||||
|     }); | ||||
|  | ||||
|     it('should skip a person with a face asset id without a thumbnail', async () => { | ||||
|       personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId }); | ||||
|       faceMock.getByIds.mockResolvedValue([faceStub.face1]); | ||||
|       assetMock.getByIds.mockResolvedValue([assetStub.noResizePath]); | ||||
|  | ||||
|       await sut.handleGenerateFaceThumbnail(face.middle); | ||||
|  | ||||
|       await sut.handleGeneratePersonThumbnail({ id: 'person-1' }); | ||||
|       expect(mediaMock.crop).not.toHaveBeenCalled(); | ||||
|     }); | ||||
|  | ||||
|     it('should generate a thumbnail', async () => { | ||||
|       personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId }); | ||||
|       faceMock.getByIds.mockResolvedValue([faceStub.middle]); | ||||
|       assetMock.getByIds.mockResolvedValue([assetStub.image]); | ||||
|  | ||||
|       await sut.handleGenerateFaceThumbnail(face.middle); | ||||
|       await sut.handleGeneratePersonThumbnail({ id: 'person-1' }); | ||||
|  | ||||
|       expect(assetMock.getByIds).toHaveBeenCalledWith(['asset-1']); | ||||
|       expect(assetMock.getByIds).toHaveBeenCalledWith([faceStub.middle.assetId]); | ||||
|       expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/pe/rs'); | ||||
|       expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', { | ||||
|         left: 95, | ||||
| @@ -321,16 +291,17 @@ describe(FacialRecognitionService.name, () => { | ||||
|         colorspace: Colorspace.P3, | ||||
|       }); | ||||
|       expect(personMock.update).toHaveBeenCalledWith({ | ||||
|         faceAssetId: 'asset-1', | ||||
|         id: 'person-1', | ||||
|         thumbnailPath: 'upload/thumbs/user-id/pe/rs/person-1.jpeg', | ||||
|       }); | ||||
|     }); | ||||
|  | ||||
|     it('should generate a thumbnail without going negative', async () => { | ||||
|       personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.start.assetId }); | ||||
|       faceMock.getByIds.mockResolvedValue([faceStub.start]); | ||||
|       assetMock.getByIds.mockResolvedValue([assetStub.image]); | ||||
|  | ||||
|       await sut.handleGenerateFaceThumbnail(face.start); | ||||
|       await sut.handleGeneratePersonThumbnail({ id: 'person-1' }); | ||||
|  | ||||
|       expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', { | ||||
|         left: 0, | ||||
| @@ -347,9 +318,11 @@ describe(FacialRecognitionService.name, () => { | ||||
|     }); | ||||
|  | ||||
|     it('should generate a thumbnail without overflowing', async () => { | ||||
|       personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.end.assetId }); | ||||
|       faceMock.getByIds.mockResolvedValue([faceStub.end]); | ||||
|       assetMock.getByIds.mockResolvedValue([assetStub.image]); | ||||
|  | ||||
|       await sut.handleGenerateFaceThumbnail(face.end); | ||||
|       await sut.handleGeneratePersonThumbnail({ id: 'person-1' }); | ||||
|  | ||||
|       expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', { | ||||
|         left: 297, | ||||
|   | ||||
| @@ -1,7 +1,8 @@ | ||||
| import { PersonEntity } from '@app/infra/entities'; | ||||
| import { Inject, Logger } from '@nestjs/common'; | ||||
| import { IAssetRepository, WithoutProperty } from '../asset'; | ||||
| import { usePagination } from '../domain.util'; | ||||
| import { IBaseJob, IEntityJob, IFaceThumbnailJob, IJobRepository, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job'; | ||||
| import { IBaseJob, IEntityJob, IJobRepository, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job'; | ||||
| import { CropOptions, FACE_THUMBNAIL_SIZE, IMediaRepository } from '../media'; | ||||
| import { IPersonRepository } from '../person/person.repository'; | ||||
| import { ISearchRepository } from '../search/search.repository'; | ||||
| @@ -89,18 +90,14 @@ export class FacialRecognitionService { | ||||
|         personId = faceSearchResult.items[0].personId; | ||||
|       } | ||||
|  | ||||
|       let newPerson: PersonEntity | null = null; | ||||
|       if (!personId) { | ||||
|         this.logger.debug('No matches, creating a new person.'); | ||||
|         const person = await this.personRepository.create({ ownerId: asset.ownerId }); | ||||
|         personId = person.id; | ||||
|         await this.jobRepository.queue({ | ||||
|           name: JobName.GENERATE_FACE_THUMBNAIL, | ||||
|           data: { assetId: asset.id, personId, ...rest }, | ||||
|         }); | ||||
|         newPerson = await this.personRepository.create({ ownerId: asset.ownerId }); | ||||
|         personId = newPerson.id; | ||||
|       } | ||||
|  | ||||
|       const faceId: AssetFaceId = { assetId: asset.id, personId }; | ||||
|  | ||||
|       await this.faceRepository.create({ | ||||
|         ...faceId, | ||||
|         embedding, | ||||
| @@ -112,6 +109,11 @@ export class FacialRecognitionService { | ||||
|         boundingBoxY2: rest.boundingBox.y2, | ||||
|       }); | ||||
|       await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_FACE, data: faceId }); | ||||
|  | ||||
|       if (newPerson) { | ||||
|         await this.personRepository.update({ id: personId, faceAssetId: asset.id }); | ||||
|         await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: newPerson.id } }); | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     return true; | ||||
| @@ -132,24 +134,41 @@ export class FacialRecognitionService { | ||||
|     return true; | ||||
|   } | ||||
|  | ||||
|   async handleGenerateFaceThumbnail(data: IFaceThumbnailJob) { | ||||
|     const { machineLearning } = await this.configCore.getConfig(); | ||||
|   async handleGeneratePersonThumbnail(data: IEntityJob) { | ||||
|     const { machineLearning, thumbnail } = await this.configCore.getConfig(); | ||||
|     if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) { | ||||
|       return true; | ||||
|     } | ||||
|  | ||||
|     const { assetId, personId, boundingBox, imageWidth, imageHeight } = data; | ||||
|     const person = await this.personRepository.getById(data.id); | ||||
|     if (!person?.faceAssetId) { | ||||
|       return false; | ||||
|     } | ||||
|  | ||||
|     const [face] = await this.faceRepository.getByIds([{ personId: person.id, assetId: person.faceAssetId }]); | ||||
|     if (!face) { | ||||
|       return false; | ||||
|     } | ||||
|  | ||||
|     const { | ||||
|       assetId, | ||||
|       personId, | ||||
|       boundingBoxX1: x1, | ||||
|       boundingBoxX2: x2, | ||||
|       boundingBoxY1: y1, | ||||
|       boundingBoxY2: y2, | ||||
|       imageWidth, | ||||
|       imageHeight, | ||||
|     } = face; | ||||
|  | ||||
|     const [asset] = await this.assetRepository.getByIds([assetId]); | ||||
|     if (!asset || !asset.resizePath) { | ||||
|     if (!asset?.resizePath) { | ||||
|       return false; | ||||
|     } | ||||
|  | ||||
|     this.logger.verbose(`Cropping face for person: ${personId}`); | ||||
|  | ||||
|     const output = this.storageCore.ensurePath(StorageFolder.THUMBNAILS, asset.ownerId, `${personId}.jpeg`); | ||||
|  | ||||
|     const { x1, y1, x2, y2 } = boundingBox; | ||||
|     const thumbnailPath = this.storageCore.ensurePath(StorageFolder.THUMBNAILS, asset.ownerId, `${personId}.jpeg`); | ||||
|  | ||||
|     const halfWidth = (x2 - x1) / 2; | ||||
|     const halfHeight = (y2 - y1) / 2; | ||||
| @@ -175,7 +194,6 @@ export class FacialRecognitionService { | ||||
|       height: newHalfSize * 2, | ||||
|     }; | ||||
|  | ||||
|     const { thumbnail } = await this.configCore.getConfig(); | ||||
|     const croppedOutput = await this.mediaRepository.crop(asset.resizePath, cropOptions); | ||||
|     const thumbnailOptions = { | ||||
|       format: 'jpeg', | ||||
| @@ -183,8 +201,9 @@ export class FacialRecognitionService { | ||||
|       colorspace: thumbnail.colorspace, | ||||
|       quality: thumbnail.quality, | ||||
|     } as const; | ||||
|     await this.mediaRepository.resize(croppedOutput, output, thumbnailOptions); | ||||
|     await this.personRepository.update({ id: personId, thumbnailPath: output, faceAssetId: data.assetId }); | ||||
|  | ||||
|     await this.mediaRepository.resize(croppedOutput, thumbnailPath, thumbnailOptions); | ||||
|     await this.personRepository.update({ id: personId, thumbnailPath }); | ||||
|  | ||||
|     return true; | ||||
|   } | ||||
|   | ||||
| @@ -30,7 +30,7 @@ export enum JobName { | ||||
|   GENERATE_JPEG_THUMBNAIL = 'generate-jpeg-thumbnail', | ||||
|   GENERATE_WEBP_THUMBNAIL = 'generate-webp-thumbnail', | ||||
|   GENERATE_THUMBHASH_THUMBNAIL = 'generate-thumbhash-thumbnail', | ||||
|   GENERATE_FACE_THUMBNAIL = 'generate-face-thumbnail', | ||||
|   GENERATE_PERSON_THUMBNAIL = 'generate-person-thumbnail', | ||||
|  | ||||
|   // metadata | ||||
|   QUEUE_METADATA_EXTRACTION = 'queue-metadata-extraction', | ||||
| @@ -113,7 +113,7 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = { | ||||
|   [JobName.GENERATE_JPEG_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION, | ||||
|   [JobName.GENERATE_WEBP_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION, | ||||
|   [JobName.GENERATE_THUMBHASH_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION, | ||||
|   [JobName.GENERATE_FACE_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION, | ||||
|   [JobName.GENERATE_PERSON_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION, | ||||
|  | ||||
|   // metadata | ||||
|   [JobName.QUEUE_METADATA_EXTRACTION]: QueueName.METADATA_EXTRACTION, | ||||
|   | ||||
| @@ -1,5 +1,3 @@ | ||||
| import { BoundingBox } from '../smart-info'; | ||||
|  | ||||
| export interface IBaseJob { | ||||
|   force?: boolean; | ||||
| } | ||||
| @@ -9,14 +7,6 @@ export interface IAssetFaceJob extends IBaseJob { | ||||
|   personId: string; | ||||
| } | ||||
|  | ||||
| export interface IFaceThumbnailJob extends IAssetFaceJob { | ||||
|   imageWidth: number; | ||||
|   imageHeight: number; | ||||
|   boundingBox: BoundingBox; | ||||
|   assetId: string; | ||||
|   personId: string; | ||||
| } | ||||
|  | ||||
| export interface IEntityJob extends IBaseJob { | ||||
|   id: string; | ||||
|   source?: 'upload'; | ||||
|   | ||||
| @@ -6,7 +6,6 @@ import { | ||||
|   IBulkEntityJob, | ||||
|   IDeleteFilesJob, | ||||
|   IEntityJob, | ||||
|   IFaceThumbnailJob, | ||||
|   ILibraryFileJob, | ||||
|   ILibraryRefreshJob, | ||||
|   IOfflineLibraryFileJob, | ||||
| @@ -68,7 +67,7 @@ export type JobItem = | ||||
|   // Recognize Faces | ||||
|   | { name: JobName.QUEUE_RECOGNIZE_FACES; data: IBaseJob } | ||||
|   | { name: JobName.RECOGNIZE_FACES; data: IEntityJob } | ||||
|   | { name: JobName.GENERATE_FACE_THUMBNAIL; data: IFaceThumbnailJob } | ||||
|   | { name: JobName.GENERATE_PERSON_THUMBNAIL; data: IEntityJob } | ||||
|  | ||||
|   // Clip Embedding | ||||
|   | { name: JobName.QUEUE_ENCODE_CLIP; data: IBaseJob } | ||||
|   | ||||
| @@ -73,19 +73,8 @@ describe(MediaService.name, () => { | ||||
|       expect(personMock.getAll).toHaveBeenCalled(); | ||||
|       expect(personMock.getAllWithoutThumbnail).not.toHaveBeenCalled(); | ||||
|       expect(jobMock.queue).toHaveBeenCalledWith({ | ||||
|         name: JobName.GENERATE_FACE_THUMBNAIL, | ||||
|         data: { | ||||
|           imageWidth: faceStub.face1.imageWidth, | ||||
|           imageHeight: faceStub.face1.imageHeight, | ||||
|           boundingBox: { | ||||
|             x1: faceStub.face1.boundingBoxX1, | ||||
|             x2: faceStub.face1.boundingBoxX2, | ||||
|             y1: faceStub.face1.boundingBoxY1, | ||||
|             y2: faceStub.face1.boundingBoxY2, | ||||
|           }, | ||||
|           assetId: faceStub.face1.assetId, | ||||
|           personId: personStub.newThumbnail.id, | ||||
|         }, | ||||
|         name: JobName.GENERATE_PERSON_THUMBNAIL, | ||||
|         data: { id: personStub.newThumbnail.id }, | ||||
|       }); | ||||
|     }); | ||||
|  | ||||
| @@ -106,18 +95,9 @@ describe(MediaService.name, () => { | ||||
|       expect(personMock.getAllWithoutThumbnail).toHaveBeenCalled(); | ||||
|       expect(personMock.getRandomFace).toHaveBeenCalled(); | ||||
|       expect(jobMock.queue).toHaveBeenCalledWith({ | ||||
|         name: JobName.GENERATE_FACE_THUMBNAIL, | ||||
|         name: JobName.GENERATE_PERSON_THUMBNAIL, | ||||
|         data: { | ||||
|           imageWidth: faceStub.face1.imageWidth, | ||||
|           imageHeight: faceStub.face1.imageHeight, | ||||
|           boundingBox: { | ||||
|             x1: faceStub.face1.boundingBoxX1, | ||||
|             x2: faceStub.face1.boundingBoxX2, | ||||
|             y1: faceStub.face1.boundingBoxY1, | ||||
|             y2: faceStub.face1.boundingBoxY2, | ||||
|           }, | ||||
|           assetId: faceStub.face1.assetId, | ||||
|           personId: personStub.newThumbnail.id, | ||||
|           id: personStub.newThumbnail.id, | ||||
|         }, | ||||
|       }); | ||||
|     }); | ||||
|   | ||||
| @@ -53,27 +53,16 @@ export class MediaService { | ||||
|     const people = force ? await this.personRepository.getAll() : await this.personRepository.getAllWithoutThumbnail(); | ||||
|  | ||||
|     for (const person of people) { | ||||
|       // use stored asset for generating thumbnail or pick a random one if not present | ||||
|       const face = person.faceAssetId | ||||
|         ? await this.personRepository.getFaceById({ personId: person.id, assetId: person.faceAssetId }) | ||||
|         : await this.personRepository.getRandomFace(person.id); | ||||
|       if (face) { | ||||
|         await this.jobRepository.queue({ | ||||
|           name: JobName.GENERATE_FACE_THUMBNAIL, | ||||
|           data: { | ||||
|             imageWidth: face.imageWidth, | ||||
|             imageHeight: face.imageHeight, | ||||
|             boundingBox: { | ||||
|               x1: face.boundingBoxX1, | ||||
|               x2: face.boundingBoxX2, | ||||
|               y1: face.boundingBoxY1, | ||||
|               y2: face.boundingBoxY2, | ||||
|             }, | ||||
|             assetId: face.assetId, | ||||
|             personId: person.id, | ||||
|           }, | ||||
|         }); | ||||
|       if (!person.faceAssetId) { | ||||
|         const face = await this.personRepository.getRandomFace(person.id); | ||||
|         if (!face) { | ||||
|           continue; | ||||
|         } | ||||
|  | ||||
|         await this.personRepository.update({ id: person.id, faceAssetId: face.assetId }); | ||||
|       } | ||||
|  | ||||
|       await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } }); | ||||
|     } | ||||
|  | ||||
|     return true; | ||||
|   | ||||
| @@ -249,7 +249,9 @@ describe(PersonService.name, () => { | ||||
|  | ||||
|     it("should update a person's thumbnailPath", async () => { | ||||
|       personMock.getById.mockResolvedValue(personStub.withName); | ||||
|       personMock.update.mockResolvedValue(personStub.withName); | ||||
|       personMock.getFaceById.mockResolvedValue(faceStub.face1); | ||||
|       accessMock.asset.hasOwnerAccess.mockResolvedValue(true); | ||||
|       accessMock.person.hasOwnerAccess.mockResolvedValue(true); | ||||
|  | ||||
|       await expect( | ||||
| @@ -257,25 +259,12 @@ describe(PersonService.name, () => { | ||||
|       ).resolves.toEqual(responseDto); | ||||
|  | ||||
|       expect(personMock.getById).toHaveBeenCalledWith('person-1'); | ||||
|       expect(personMock.update).toHaveBeenCalledWith({ id: 'person-1', faceAssetId: faceStub.face1.assetId }); | ||||
|       expect(personMock.getFaceById).toHaveBeenCalledWith({ | ||||
|         assetId: faceStub.face1.assetId, | ||||
|         personId: 'person-1', | ||||
|       }); | ||||
|       expect(jobMock.queue).toHaveBeenCalledWith({ | ||||
|         name: JobName.GENERATE_FACE_THUMBNAIL, | ||||
|         data: { | ||||
|           assetId: faceStub.face1.assetId, | ||||
|           personId: 'person-1', | ||||
|           boundingBox: { | ||||
|             x1: faceStub.face1.boundingBoxX1, | ||||
|             x2: faceStub.face1.boundingBoxX2, | ||||
|             y1: faceStub.face1.boundingBoxY1, | ||||
|             y2: faceStub.face1.boundingBoxY2, | ||||
|           }, | ||||
|           imageHeight: faceStub.face1.imageHeight, | ||||
|           imageWidth: faceStub.face1.imageWidth, | ||||
|         }, | ||||
|       }); | ||||
|       expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: 'person-1' } }); | ||||
|       expect(accessMock.person.hasOwnerAccess).toHaveBeenCalledWith(authStub.admin.id, 'person-1'); | ||||
|     }); | ||||
|  | ||||
|   | ||||
| @@ -77,8 +77,10 @@ export class PersonService { | ||||
|     await this.access.requirePermission(authUser, Permission.PERSON_WRITE, id); | ||||
|     let person = await this.findOrFail(id); | ||||
|  | ||||
|     if (dto.name !== undefined || dto.birthDate !== undefined || dto.isHidden !== undefined) { | ||||
|       person = await this.repository.update({ id, name: dto.name, birthDate: dto.birthDate, isHidden: dto.isHidden }); | ||||
|     const { name, birthDate, isHidden, featureFaceAssetId: assetId } = dto; | ||||
|  | ||||
|     if (name !== undefined || birthDate !== undefined || isHidden !== undefined) { | ||||
|       person = await this.repository.update({ id, name, birthDate, isHidden }); | ||||
|       if (this.needsSearchIndexUpdate(dto)) { | ||||
|         const assets = await this.repository.getAssets(id); | ||||
|         const ids = assets.map((asset) => asset.id); | ||||
| @@ -86,28 +88,15 @@ export class PersonService { | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     if (dto.featureFaceAssetId) { | ||||
|       const assetId = dto.featureFaceAssetId; | ||||
|     if (assetId) { | ||||
|       await this.access.requirePermission(authUser, Permission.ASSET_READ, assetId); | ||||
|       const face = await this.repository.getFaceById({ personId: id, assetId }); | ||||
|       if (!face) { | ||||
|         throw new BadRequestException('Invalid assetId for feature face'); | ||||
|       } | ||||
|  | ||||
|       await this.jobRepository.queue({ | ||||
|         name: JobName.GENERATE_FACE_THUMBNAIL, | ||||
|         data: { | ||||
|           personId: id, | ||||
|           assetId, | ||||
|           boundingBox: { | ||||
|             x1: face.boundingBoxX1, | ||||
|             x2: face.boundingBoxX2, | ||||
|             y1: face.boundingBoxY1, | ||||
|             y2: face.boundingBoxY2, | ||||
|           }, | ||||
|           imageHeight: face.imageHeight, | ||||
|           imageWidth: face.imageWidth, | ||||
|         }, | ||||
|       }); | ||||
|       person = await this.repository.update({ id, faceAssetId: assetId }); | ||||
|       await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id } }); | ||||
|     } | ||||
|  | ||||
|     return mapPerson(person); | ||||
|   | ||||
| @@ -70,7 +70,7 @@ export class JobRepository implements IJobRepository { | ||||
|  | ||||
|   private getJobOptions(item: JobItem): JobsOptions | null { | ||||
|     switch (item.name) { | ||||
|       case JobName.GENERATE_FACE_THUMBNAIL: | ||||
|       case JobName.GENERATE_PERSON_THUMBNAIL: | ||||
|         return { priority: 1 }; | ||||
|  | ||||
|       default: | ||||
|   | ||||
| @@ -78,7 +78,7 @@ export class AppService { | ||||
|       [JobName.LINK_LIVE_PHOTOS]: (data) => this.metadataProcessor.handleLivePhotoLinking(data), | ||||
|       [JobName.QUEUE_RECOGNIZE_FACES]: (data) => this.facialRecognitionService.handleQueueRecognizeFaces(data), | ||||
|       [JobName.RECOGNIZE_FACES]: (data) => this.facialRecognitionService.handleRecognizeFaces(data), | ||||
|       [JobName.GENERATE_FACE_THUMBNAIL]: (data) => this.facialRecognitionService.handleGenerateFaceThumbnail(data), | ||||
|       [JobName.GENERATE_PERSON_THUMBNAIL]: (data) => this.facialRecognitionService.handleGeneratePersonThumbnail(data), | ||||
|       [JobName.PERSON_CLEANUP]: () => this.personService.handlePersonCleanup(), | ||||
|       [JobName.QUEUE_SIDECAR]: (data) => this.metadataService.handleQueueSidecar(data), | ||||
|       [JobName.SIDECAR_DISCOVERY]: (data) => this.metadataService.handleSidecarDiscovery(data), | ||||
|   | ||||
							
								
								
									
										39
									
								
								server/test/fixtures/face.stub.ts
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										39
									
								
								server/test/fixtures/face.stub.ts
									
									
									
									
										vendored
									
									
								
							| @@ -55,4 +55,43 @@ export const faceStub = { | ||||
|     imageHeight: 1024, | ||||
|     imageWidth: 1024, | ||||
|   }), | ||||
|   start: Object.freeze<AssetFaceEntity>({ | ||||
|     assetId: assetStub.image.id, | ||||
|     asset: assetStub.image, | ||||
|     personId: personStub.newThumbnail.id, | ||||
|     person: personStub.newThumbnail, | ||||
|     embedding: [1, 2, 3, 4], | ||||
|     boundingBoxX1: 5, | ||||
|     boundingBoxY1: 5, | ||||
|     boundingBoxX2: 505, | ||||
|     boundingBoxY2: 505, | ||||
|     imageHeight: 1000, | ||||
|     imageWidth: 1000, | ||||
|   }), | ||||
|   middle: Object.freeze<AssetFaceEntity>({ | ||||
|     assetId: assetStub.image.id, | ||||
|     asset: assetStub.image, | ||||
|     personId: personStub.newThumbnail.id, | ||||
|     person: personStub.newThumbnail, | ||||
|     embedding: [1, 2, 3, 4], | ||||
|     boundingBoxX1: 100, | ||||
|     boundingBoxY1: 100, | ||||
|     boundingBoxX2: 200, | ||||
|     boundingBoxY2: 200, | ||||
|     imageHeight: 500, | ||||
|     imageWidth: 400, | ||||
|   }), | ||||
|   end: Object.freeze<AssetFaceEntity>({ | ||||
|     assetId: assetStub.image.id, | ||||
|     asset: assetStub.image, | ||||
|     personId: personStub.newThumbnail.id, | ||||
|     person: personStub.newThumbnail, | ||||
|     embedding: [1, 2, 3, 4], | ||||
|     boundingBoxX1: 300, | ||||
|     boundingBoxY1: 300, | ||||
|     boundingBoxX2: 495, | ||||
|     boundingBoxY2: 495, | ||||
|     imageHeight: 500, | ||||
|     imageWidth: 500, | ||||
|   }), | ||||
| }; | ||||
|   | ||||
		Reference in New Issue
	
	Block a user