mirror of
https://github.com/immich-app/immich.git
synced 2024-12-25 10:43:13 +02:00
refactor(server): merge facial-recognition and person (#4237)
* move facial recognition service into person service * merge face repository and person repository * fix imports
This commit is contained in:
parent
c3d6d69262
commit
0a22e64799
@ -4,7 +4,6 @@ import { APIKeyService } from './api-key';
|
||||
import { AssetService } from './asset';
|
||||
import { AuditService } from './audit';
|
||||
import { AuthService } from './auth';
|
||||
import { FacialRecognitionService } from './facial-recognition';
|
||||
import { JobService } from './job';
|
||||
import { LibraryService } from './library';
|
||||
import { MediaService } from './media';
|
||||
@ -27,7 +26,6 @@ const providers: Provider[] = [
|
||||
AssetService,
|
||||
AuditService,
|
||||
AuthService,
|
||||
FacialRecognitionService,
|
||||
JobService,
|
||||
MediaService,
|
||||
MetadataService,
|
||||
|
@ -1,14 +0,0 @@
|
||||
import { AssetFaceEntity } from '@app/infra/entities';
|
||||
|
||||
export const IFaceRepository = 'IFaceRepository';
|
||||
|
||||
export interface AssetFaceId {
|
||||
assetId: string;
|
||||
personId: string;
|
||||
}
|
||||
|
||||
export interface IFaceRepository {
|
||||
getAll(): Promise<AssetFaceEntity[]>;
|
||||
getByIds(ids: AssetFaceId[]): Promise<AssetFaceEntity[]>;
|
||||
create(entity: Partial<AssetFaceEntity>): Promise<AssetFaceEntity>;
|
||||
}
|
@ -1,341 +0,0 @@
|
||||
import { Colorspace, SystemConfigKey } from '@app/infra/entities';
|
||||
import {
|
||||
assetStub,
|
||||
faceStub,
|
||||
newAssetRepositoryMock,
|
||||
newFaceRepositoryMock,
|
||||
newJobRepositoryMock,
|
||||
newMachineLearningRepositoryMock,
|
||||
newMediaRepositoryMock,
|
||||
newPersonRepositoryMock,
|
||||
newSearchRepositoryMock,
|
||||
newStorageRepositoryMock,
|
||||
newSystemConfigRepositoryMock,
|
||||
personStub,
|
||||
} from '@test';
|
||||
import { IAssetRepository, WithoutProperty } from '../asset';
|
||||
import { IJobRepository, JobName } from '../job';
|
||||
import { IMediaRepository } from '../media';
|
||||
import { IPersonRepository } from '../person';
|
||||
import { ISearchRepository } from '../search';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
import { IStorageRepository } from '../storage';
|
||||
import { ISystemConfigRepository } from '../system-config';
|
||||
import { IFaceRepository } from './face.repository';
|
||||
import { FacialRecognitionService } from './facial-recognition.services';
|
||||
|
||||
const croppedFace = Buffer.from('Cropped Face');
|
||||
|
||||
const detectFaceMock = {
|
||||
assetId: 'asset-1',
|
||||
personId: 'person-1',
|
||||
boundingBox: {
|
||||
x1: 100,
|
||||
y1: 100,
|
||||
x2: 200,
|
||||
y2: 200,
|
||||
},
|
||||
imageHeight: 500,
|
||||
imageWidth: 400,
|
||||
embedding: [1, 2, 3, 4],
|
||||
score: 0.2,
|
||||
};
|
||||
|
||||
const faceSearch = {
|
||||
noMatch: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
page: 1,
|
||||
items: [],
|
||||
distances: [],
|
||||
facets: [],
|
||||
},
|
||||
oneMatch: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
page: 1,
|
||||
items: [faceStub.face1],
|
||||
distances: [0.1],
|
||||
facets: [],
|
||||
},
|
||||
oneRemoteMatch: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
page: 1,
|
||||
items: [faceStub.face1],
|
||||
distances: [0.8],
|
||||
facets: [],
|
||||
},
|
||||
};
|
||||
|
||||
describe(FacialRecognitionService.name, () => {
|
||||
let sut: FacialRecognitionService;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let faceMock: jest.Mocked<IFaceRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let machineLearningMock: jest.Mocked<IMachineLearningRepository>;
|
||||
let mediaMock: jest.Mocked<IMediaRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
let searchMock: jest.Mocked<ISearchRepository>;
|
||||
let storageMock: jest.Mocked<IStorageRepository>;
|
||||
|
||||
beforeEach(async () => {
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
faceMock = newFaceRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
machineLearningMock = newMachineLearningRepositoryMock();
|
||||
mediaMock = newMediaRepositoryMock();
|
||||
personMock = newPersonRepositoryMock();
|
||||
searchMock = newSearchRepositoryMock();
|
||||
storageMock = newStorageRepositoryMock();
|
||||
|
||||
mediaMock.crop.mockResolvedValue(croppedFace);
|
||||
|
||||
sut = new FacialRecognitionService(
|
||||
assetMock,
|
||||
configMock,
|
||||
faceMock,
|
||||
jobMock,
|
||||
machineLearningMock,
|
||||
mediaMock,
|
||||
personMock,
|
||||
searchMock,
|
||||
storageMock,
|
||||
);
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('handleQueueRecognizeFaces', () => {
|
||||
it('should return if machine learning is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
|
||||
|
||||
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(true);
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should queue missing assets', async () => {
|
||||
assetMock.getWithout.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
await sut.handleQueueRecognizeFaces({});
|
||||
|
||||
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.FACES);
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.RECOGNIZE_FACES,
|
||||
data: { id: assetStub.image.id },
|
||||
});
|
||||
});
|
||||
|
||||
it('should queue all assets', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
personMock.deleteAll.mockResolvedValue(5);
|
||||
searchMock.deleteAllFaces.mockResolvedValue(100);
|
||||
|
||||
await sut.handleQueueRecognizeFaces({ force: true });
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.RECOGNIZE_FACES,
|
||||
data: { id: assetStub.image.id },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleRecognizeFaces', () => {
|
||||
it('should return if machine learning is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
|
||||
|
||||
await expect(sut.handleRecognizeFaces({ id: 'foo' })).resolves.toBe(true);
|
||||
expect(assetMock.getByIds).not.toHaveBeenCalled();
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip when no resize path', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.noResizePath]);
|
||||
await sut.handleRecognizeFaces({ id: assetStub.noResizePath.id });
|
||||
expect(machineLearningMock.detectFaces).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle no results', async () => {
|
||||
machineLearningMock.detectFaces.mockResolvedValue([]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
await sut.handleRecognizeFaces({ id: assetStub.image.id });
|
||||
expect(machineLearningMock.detectFaces).toHaveBeenCalledWith(
|
||||
'http://immich-machine-learning:3003',
|
||||
{
|
||||
imagePath: assetStub.image.resizePath,
|
||||
},
|
||||
{
|
||||
enabled: true,
|
||||
maxDistance: 0.6,
|
||||
minScore: 0.7,
|
||||
minFaces: 1,
|
||||
modelName: 'buffalo_l',
|
||||
},
|
||||
);
|
||||
expect(faceMock.create).not.toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should match existing people', async () => {
|
||||
machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]);
|
||||
searchMock.searchFaces.mockResolvedValue(faceSearch.oneMatch);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
await sut.handleRecognizeFaces({ id: assetStub.image.id });
|
||||
|
||||
expect(faceMock.create).toHaveBeenCalledWith({
|
||||
personId: 'person-1',
|
||||
assetId: 'asset-id',
|
||||
embedding: [1, 2, 3, 4],
|
||||
boundingBoxX1: 100,
|
||||
boundingBoxY1: 100,
|
||||
boundingBoxX2: 200,
|
||||
boundingBoxY2: 200,
|
||||
imageHeight: 500,
|
||||
imageWidth: 400,
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a new person', async () => {
|
||||
machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]);
|
||||
searchMock.searchFaces.mockResolvedValue(faceSearch.oneRemoteMatch);
|
||||
personMock.create.mockResolvedValue(personStub.noName);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await sut.handleRecognizeFaces({ id: assetStub.image.id });
|
||||
|
||||
expect(personMock.create).toHaveBeenCalledWith({ ownerId: assetStub.image.ownerId });
|
||||
expect(faceMock.create).toHaveBeenCalledWith({
|
||||
personId: 'person-1',
|
||||
assetId: 'asset-id',
|
||||
embedding: [1, 2, 3, 4],
|
||||
boundingBoxX1: 100,
|
||||
boundingBoxY1: 100,
|
||||
boundingBoxX2: 200,
|
||||
boundingBoxY2: 200,
|
||||
imageHeight: 500,
|
||||
imageWidth: 400,
|
||||
});
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[{ name: JobName.SEARCH_INDEX_FACE, data: { personId: 'person-1', assetId: 'asset-id' } }],
|
||||
[{ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: 'person-1' } }],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleGeneratePersonThumbnail', () => {
|
||||
it('should return if machine learning is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
|
||||
|
||||
await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(true);
|
||||
expect(assetMock.getByIds).not.toHaveBeenCalled();
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip a person not found', async () => {
|
||||
personMock.getById.mockResolvedValue(null);
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
expect(mediaMock.crop).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip a person without a face asset id', async () => {
|
||||
personMock.getById.mockResolvedValue(personStub.noThumbnail);
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
expect(mediaMock.crop).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip an person with a face asset id not found', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId });
|
||||
faceMock.getByIds.mockResolvedValue([faceStub.face1]);
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
expect(mediaMock.crop).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip a person with a face asset id without a thumbnail', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId });
|
||||
faceMock.getByIds.mockResolvedValue([faceStub.face1]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.noResizePath]);
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
expect(mediaMock.crop).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should generate a thumbnail', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId });
|
||||
faceMock.getByIds.mockResolvedValue([faceStub.middle]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([faceStub.middle.assetId]);
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/pe/rs');
|
||||
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
|
||||
left: 95,
|
||||
top: 95,
|
||||
width: 110,
|
||||
height: 110,
|
||||
});
|
||||
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/pe/rs/person-1.jpeg', {
|
||||
format: 'jpeg',
|
||||
size: 250,
|
||||
quality: 80,
|
||||
colorspace: Colorspace.P3,
|
||||
});
|
||||
expect(personMock.update).toHaveBeenCalledWith({
|
||||
id: 'person-1',
|
||||
thumbnailPath: 'upload/thumbs/user-id/pe/rs/person-1.jpeg',
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate a thumbnail without going negative', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.start.assetId });
|
||||
faceMock.getByIds.mockResolvedValue([faceStub.start]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
|
||||
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
|
||||
left: 0,
|
||||
top: 0,
|
||||
width: 510,
|
||||
height: 510,
|
||||
});
|
||||
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/pe/rs/person-1.jpeg', {
|
||||
format: 'jpeg',
|
||||
size: 250,
|
||||
quality: 80,
|
||||
colorspace: Colorspace.P3,
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate a thumbnail without overflowing', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.end.assetId });
|
||||
faceMock.getByIds.mockResolvedValue([faceStub.end]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
|
||||
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
|
||||
left: 297,
|
||||
top: 297,
|
||||
width: 202,
|
||||
height: 202,
|
||||
});
|
||||
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/pe/rs/person-1.jpeg', {
|
||||
format: 'jpeg',
|
||||
size: 250,
|
||||
quality: 80,
|
||||
colorspace: Colorspace.P3,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
@ -1,210 +0,0 @@
|
||||
import { PersonEntity } from '@app/infra/entities';
|
||||
import { Inject, Logger } from '@nestjs/common';
|
||||
import { IAssetRepository, WithoutProperty } from '../asset';
|
||||
import { usePagination } from '../domain.util';
|
||||
import { IBaseJob, IEntityJob, IJobRepository, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
|
||||
import { CropOptions, FACE_THUMBNAIL_SIZE, IMediaRepository } from '../media';
|
||||
import { IPersonRepository } from '../person/person.repository';
|
||||
import { ISearchRepository } from '../search/search.repository';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
import { IStorageRepository, StorageCore, StorageFolder } from '../storage';
|
||||
import { ISystemConfigRepository, SystemConfigCore } from '../system-config';
|
||||
import { AssetFaceId, IFaceRepository } from './face.repository';
|
||||
|
||||
export class FacialRecognitionService {
|
||||
private logger = new Logger(FacialRecognitionService.name);
|
||||
private configCore: SystemConfigCore;
|
||||
private storageCore: StorageCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IFaceRepository) private faceRepository: IFaceRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearning: IMachineLearningRepository,
|
||||
@Inject(IMediaRepository) private mediaRepository: IMediaRepository,
|
||||
@Inject(IPersonRepository) private personRepository: IPersonRepository,
|
||||
@Inject(ISearchRepository) private searchRepository: ISearchRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
) {
|
||||
this.configCore = new SystemConfigCore(configRepository);
|
||||
this.storageCore = new StorageCore(storageRepository);
|
||||
}
|
||||
|
||||
async handleQueueRecognizeFaces({ force }: IBaseJob) {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination, { order: 'DESC' })
|
||||
: this.assetRepository.getWithout(pagination, WithoutProperty.FACES);
|
||||
});
|
||||
|
||||
if (force) {
|
||||
const people = await this.personRepository.deleteAll();
|
||||
const faces = await this.searchRepository.deleteAllFaces();
|
||||
this.logger.debug(`Deleted ${people} people and ${faces} faces`);
|
||||
}
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
for (const asset of assets) {
|
||||
await this.jobRepository.queue({ name: JobName.RECOGNIZE_FACES, data: { id: asset.id } });
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async handleRecognizeFaces({ id }: IEntityJob) {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset || !asset.resizePath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const faces = await this.machineLearning.detectFaces(
|
||||
machineLearning.url,
|
||||
{ imagePath: asset.resizePath },
|
||||
machineLearning.facialRecognition,
|
||||
);
|
||||
|
||||
this.logger.debug(`${faces.length} faces detected in ${asset.resizePath}`);
|
||||
this.logger.verbose(faces.map((face) => ({ ...face, embedding: `float[${face.embedding.length}]` })));
|
||||
|
||||
for (const { embedding, ...rest } of faces) {
|
||||
const faceSearchResult = await this.searchRepository.searchFaces(embedding, { ownerId: asset.ownerId });
|
||||
|
||||
let personId: string | null = null;
|
||||
|
||||
// try to find a matching face and link to the associated person
|
||||
// The closer to 0, the better the match. Range is from 0 to 2
|
||||
if (faceSearchResult.total && faceSearchResult.distances[0] <= machineLearning.facialRecognition.maxDistance) {
|
||||
this.logger.verbose(`Match face with distance ${faceSearchResult.distances[0]}`);
|
||||
personId = faceSearchResult.items[0].personId;
|
||||
}
|
||||
|
||||
let newPerson: PersonEntity | null = null;
|
||||
if (!personId) {
|
||||
this.logger.debug('No matches, creating a new person.');
|
||||
newPerson = await this.personRepository.create({ ownerId: asset.ownerId });
|
||||
personId = newPerson.id;
|
||||
}
|
||||
|
||||
const faceId: AssetFaceId = { assetId: asset.id, personId };
|
||||
await this.faceRepository.create({
|
||||
...faceId,
|
||||
embedding,
|
||||
imageHeight: rest.imageHeight,
|
||||
imageWidth: rest.imageWidth,
|
||||
boundingBoxX1: rest.boundingBox.x1,
|
||||
boundingBoxX2: rest.boundingBox.x2,
|
||||
boundingBoxY1: rest.boundingBox.y1,
|
||||
boundingBoxY2: rest.boundingBox.y2,
|
||||
});
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_FACE, data: faceId });
|
||||
|
||||
if (newPerson) {
|
||||
await this.personRepository.update({ id: personId, faceAssetId: asset.id });
|
||||
await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: newPerson.id } });
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async handlePersonMigration({ id }: IEntityJob) {
|
||||
const person = await this.personRepository.getById(id);
|
||||
if (!person) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const path = this.storageCore.ensurePath(StorageFolder.THUMBNAILS, person.ownerId, `${id}.jpeg`);
|
||||
if (person.thumbnailPath && person.thumbnailPath !== path) {
|
||||
await this.storageRepository.moveFile(person.thumbnailPath, path);
|
||||
await this.personRepository.update({ id, thumbnailPath: path });
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async handleGeneratePersonThumbnail(data: IEntityJob) {
|
||||
const { machineLearning, thumbnail } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const person = await this.personRepository.getById(data.id);
|
||||
if (!person?.faceAssetId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const [face] = await this.faceRepository.getByIds([{ personId: person.id, assetId: person.faceAssetId }]);
|
||||
if (!face) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const {
|
||||
assetId,
|
||||
personId,
|
||||
boundingBoxX1: x1,
|
||||
boundingBoxX2: x2,
|
||||
boundingBoxY1: y1,
|
||||
boundingBoxY2: y2,
|
||||
imageWidth,
|
||||
imageHeight,
|
||||
} = face;
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([assetId]);
|
||||
if (!asset?.resizePath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.logger.verbose(`Cropping face for person: ${personId}`);
|
||||
|
||||
const thumbnailPath = this.storageCore.ensurePath(StorageFolder.THUMBNAILS, asset.ownerId, `${personId}.jpeg`);
|
||||
|
||||
const halfWidth = (x2 - x1) / 2;
|
||||
const halfHeight = (y2 - y1) / 2;
|
||||
|
||||
const middleX = Math.round(x1 + halfWidth);
|
||||
const middleY = Math.round(y1 + halfHeight);
|
||||
|
||||
// zoom out 10%
|
||||
const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1);
|
||||
|
||||
// get the longest distance from the center of the image without overflowing
|
||||
const newHalfSize = Math.min(
|
||||
middleX - Math.max(0, middleX - targetHalfSize),
|
||||
middleY - Math.max(0, middleY - targetHalfSize),
|
||||
Math.min(imageWidth - 1, middleX + targetHalfSize) - middleX,
|
||||
Math.min(imageHeight - 1, middleY + targetHalfSize) - middleY,
|
||||
);
|
||||
|
||||
const cropOptions: CropOptions = {
|
||||
left: middleX - newHalfSize,
|
||||
top: middleY - newHalfSize,
|
||||
width: newHalfSize * 2,
|
||||
height: newHalfSize * 2,
|
||||
};
|
||||
|
||||
const croppedOutput = await this.mediaRepository.crop(asset.resizePath, cropOptions);
|
||||
const thumbnailOptions = {
|
||||
format: 'jpeg',
|
||||
size: FACE_THUMBNAIL_SIZE,
|
||||
colorspace: thumbnail.colorspace,
|
||||
quality: thumbnail.quality,
|
||||
} as const;
|
||||
|
||||
await this.mediaRepository.resize(croppedOutput, thumbnailPath, thumbnailOptions);
|
||||
await this.personRepository.update({ id: personId, thumbnailPath });
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
@ -1,2 +0,0 @@
|
||||
export * from './face.repository';
|
||||
export * from './facial-recognition.services';
|
@ -10,7 +10,6 @@ export * from './domain.config';
|
||||
export * from './domain.constant';
|
||||
export * from './domain.module';
|
||||
export * from './domain.util';
|
||||
export * from './facial-recognition';
|
||||
export * from './job';
|
||||
export * from './library';
|
||||
export * from './media';
|
||||
|
@ -59,7 +59,7 @@ describe(MediaService.name, () => {
|
||||
hasNextPage: false,
|
||||
});
|
||||
personMock.getAll.mockResolvedValue([personStub.newThumbnail]);
|
||||
personMock.getFaceById.mockResolvedValue(faceStub.face1);
|
||||
personMock.getFacesByIds.mockResolvedValue([faceStub.face1]);
|
||||
|
||||
await sut.handleQueueGenerateThumbnails({ force: true });
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { AssetFaceId } from '@app/domain';
|
||||
import { AssetEntity, AssetFaceEntity, PersonEntity } from '@app/infra/entities';
|
||||
export const IPersonRepository = 'IPersonRepository';
|
||||
|
||||
@ -7,6 +6,11 @@ export interface PersonSearchOptions {
|
||||
withHidden: boolean;
|
||||
}
|
||||
|
||||
export interface AssetFaceId {
|
||||
assetId: string;
|
||||
personId: string;
|
||||
}
|
||||
|
||||
export interface UpdateFacesData {
|
||||
oldPersonId: string;
|
||||
newPersonId: string;
|
||||
@ -28,6 +32,8 @@ export interface IPersonRepository {
|
||||
delete(entity: PersonEntity): Promise<PersonEntity | null>;
|
||||
deleteAll(): Promise<number>;
|
||||
|
||||
getFaceById(payload: AssetFaceId): Promise<AssetFaceEntity | null>;
|
||||
getAllFaces(): Promise<AssetFaceEntity[]>;
|
||||
getFacesByIds(ids: AssetFaceId[]): Promise<AssetFaceEntity[]>;
|
||||
getRandomFace(personId: string): Promise<AssetFaceEntity | null>;
|
||||
createFace(entity: Partial<AssetFaceEntity>): Promise<AssetFaceEntity>;
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { Colorspace, SystemConfigKey } from '@app/infra/entities';
|
||||
import { BadRequestException, NotFoundException } from '@nestjs/common';
|
||||
import {
|
||||
IAccessRepositoryMock,
|
||||
@ -5,14 +6,21 @@ import {
|
||||
authStub,
|
||||
faceStub,
|
||||
newAccessRepositoryMock,
|
||||
newAssetRepositoryMock,
|
||||
newJobRepositoryMock,
|
||||
newMachineLearningRepositoryMock,
|
||||
newMediaRepositoryMock,
|
||||
newPersonRepositoryMock,
|
||||
newSearchRepositoryMock,
|
||||
newStorageRepositoryMock,
|
||||
newSystemConfigRepositoryMock,
|
||||
personStub,
|
||||
} from '@test';
|
||||
import { BulkIdErrorReason } from '../asset';
|
||||
import { BulkIdErrorReason, IAssetRepository, WithoutProperty } from '../asset';
|
||||
import { IJobRepository, JobName } from '../job';
|
||||
import { IMediaRepository } from '../media';
|
||||
import { ISearchRepository } from '../search';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
import { IStorageRepository } from '../storage';
|
||||
import { ISystemConfigRepository } from '../system-config';
|
||||
import { PersonResponseDto } from './person.dto';
|
||||
@ -27,21 +35,85 @@ const responseDto: PersonResponseDto = {
|
||||
isHidden: false,
|
||||
};
|
||||
|
||||
const croppedFace = Buffer.from('Cropped Face');
|
||||
|
||||
const detectFaceMock = {
|
||||
assetId: 'asset-1',
|
||||
personId: 'person-1',
|
||||
boundingBox: {
|
||||
x1: 100,
|
||||
y1: 100,
|
||||
x2: 200,
|
||||
y2: 200,
|
||||
},
|
||||
imageHeight: 500,
|
||||
imageWidth: 400,
|
||||
embedding: [1, 2, 3, 4],
|
||||
score: 0.2,
|
||||
};
|
||||
|
||||
const faceSearch = {
|
||||
noMatch: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
page: 1,
|
||||
items: [],
|
||||
distances: [],
|
||||
facets: [],
|
||||
},
|
||||
oneMatch: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
page: 1,
|
||||
items: [faceStub.face1],
|
||||
distances: [0.1],
|
||||
facets: [],
|
||||
},
|
||||
oneRemoteMatch: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
page: 1,
|
||||
items: [faceStub.face1],
|
||||
distances: [0.8],
|
||||
facets: [],
|
||||
},
|
||||
};
|
||||
|
||||
describe(PersonService.name, () => {
|
||||
let accessMock: IAccessRepositoryMock;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let machineLearningMock: jest.Mocked<IMachineLearningRepository>;
|
||||
let mediaMock: jest.Mocked<IMediaRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
let searchMock: jest.Mocked<ISearchRepository>;
|
||||
let storageMock: jest.Mocked<IStorageRepository>;
|
||||
let sut: PersonService;
|
||||
|
||||
beforeEach(async () => {
|
||||
accessMock = newAccessRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
machineLearningMock = newMachineLearningRepositoryMock();
|
||||
mediaMock = newMediaRepositoryMock();
|
||||
personMock = newPersonRepositoryMock();
|
||||
searchMock = newSearchRepositoryMock();
|
||||
storageMock = newStorageRepositoryMock();
|
||||
sut = new PersonService(accessMock, personMock, configMock, storageMock, jobMock);
|
||||
sut = new PersonService(
|
||||
accessMock,
|
||||
assetMock,
|
||||
machineLearningMock,
|
||||
mediaMock,
|
||||
personMock,
|
||||
searchMock,
|
||||
configMock,
|
||||
storageMock,
|
||||
jobMock,
|
||||
);
|
||||
|
||||
mediaMock.crop.mockResolvedValue(croppedFace);
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
@ -250,7 +322,7 @@ describe(PersonService.name, () => {
|
||||
it("should update a person's thumbnailPath", async () => {
|
||||
personMock.getById.mockResolvedValue(personStub.withName);
|
||||
personMock.update.mockResolvedValue(personStub.withName);
|
||||
personMock.getFaceById.mockResolvedValue(faceStub.face1);
|
||||
personMock.getFacesByIds.mockResolvedValue([faceStub.face1]);
|
||||
accessMock.asset.hasOwnerAccess.mockResolvedValue(true);
|
||||
accessMock.person.hasOwnerAccess.mockResolvedValue(true);
|
||||
|
||||
@ -260,10 +332,12 @@ describe(PersonService.name, () => {
|
||||
|
||||
expect(personMock.getById).toHaveBeenCalledWith('person-1');
|
||||
expect(personMock.update).toHaveBeenCalledWith({ id: 'person-1', faceAssetId: faceStub.face1.assetId });
|
||||
expect(personMock.getFaceById).toHaveBeenCalledWith({
|
||||
assetId: faceStub.face1.assetId,
|
||||
personId: 'person-1',
|
||||
});
|
||||
expect(personMock.getFacesByIds).toHaveBeenCalledWith([
|
||||
{
|
||||
assetId: faceStub.face1.assetId,
|
||||
personId: 'person-1',
|
||||
},
|
||||
]);
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: 'person-1' } });
|
||||
expect(accessMock.person.hasOwnerAccess).toHaveBeenCalledWith(authStub.admin.id, 'person-1');
|
||||
});
|
||||
@ -307,6 +381,234 @@ describe(PersonService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueRecognizeFaces', () => {
|
||||
it('should return if machine learning is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
|
||||
|
||||
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(true);
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should queue missing assets', async () => {
|
||||
assetMock.getWithout.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
await sut.handleQueueRecognizeFaces({});
|
||||
|
||||
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.FACES);
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.RECOGNIZE_FACES,
|
||||
data: { id: assetStub.image.id },
|
||||
});
|
||||
});
|
||||
|
||||
it('should queue all assets', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
personMock.deleteAll.mockResolvedValue(5);
|
||||
searchMock.deleteAllFaces.mockResolvedValue(100);
|
||||
|
||||
await sut.handleQueueRecognizeFaces({ force: true });
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.RECOGNIZE_FACES,
|
||||
data: { id: assetStub.image.id },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleRecognizeFaces', () => {
|
||||
it('should return if machine learning is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
|
||||
|
||||
await expect(sut.handleRecognizeFaces({ id: 'foo' })).resolves.toBe(true);
|
||||
expect(assetMock.getByIds).not.toHaveBeenCalled();
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip when no resize path', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.noResizePath]);
|
||||
await sut.handleRecognizeFaces({ id: assetStub.noResizePath.id });
|
||||
expect(machineLearningMock.detectFaces).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle no results', async () => {
|
||||
machineLearningMock.detectFaces.mockResolvedValue([]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
await sut.handleRecognizeFaces({ id: assetStub.image.id });
|
||||
expect(machineLearningMock.detectFaces).toHaveBeenCalledWith(
|
||||
'http://immich-machine-learning:3003',
|
||||
{
|
||||
imagePath: assetStub.image.resizePath,
|
||||
},
|
||||
{
|
||||
enabled: true,
|
||||
maxDistance: 0.6,
|
||||
minScore: 0.7,
|
||||
minFaces: 1,
|
||||
modelName: 'buffalo_l',
|
||||
},
|
||||
);
|
||||
expect(personMock.createFace).not.toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should match existing people', async () => {
|
||||
machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]);
|
||||
searchMock.searchFaces.mockResolvedValue(faceSearch.oneMatch);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
await sut.handleRecognizeFaces({ id: assetStub.image.id });
|
||||
|
||||
expect(personMock.createFace).toHaveBeenCalledWith({
|
||||
personId: 'person-1',
|
||||
assetId: 'asset-id',
|
||||
embedding: [1, 2, 3, 4],
|
||||
boundingBoxX1: 100,
|
||||
boundingBoxY1: 100,
|
||||
boundingBoxX2: 200,
|
||||
boundingBoxY2: 200,
|
||||
imageHeight: 500,
|
||||
imageWidth: 400,
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a new person', async () => {
|
||||
machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]);
|
||||
searchMock.searchFaces.mockResolvedValue(faceSearch.oneRemoteMatch);
|
||||
personMock.create.mockResolvedValue(personStub.noName);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await sut.handleRecognizeFaces({ id: assetStub.image.id });
|
||||
|
||||
expect(personMock.create).toHaveBeenCalledWith({ ownerId: assetStub.image.ownerId });
|
||||
expect(personMock.createFace).toHaveBeenCalledWith({
|
||||
personId: 'person-1',
|
||||
assetId: 'asset-id',
|
||||
embedding: [1, 2, 3, 4],
|
||||
boundingBoxX1: 100,
|
||||
boundingBoxY1: 100,
|
||||
boundingBoxX2: 200,
|
||||
boundingBoxY2: 200,
|
||||
imageHeight: 500,
|
||||
imageWidth: 400,
|
||||
});
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[{ name: JobName.SEARCH_INDEX_FACE, data: { personId: 'person-1', assetId: 'asset-id' } }],
|
||||
[{ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: 'person-1' } }],
|
||||
]);
|
||||
});
|
||||
});
|
||||
describe('handleGeneratePersonThumbnail', () => {
|
||||
it('should return if machine learning is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
|
||||
|
||||
await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(true);
|
||||
expect(assetMock.getByIds).not.toHaveBeenCalled();
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip a person not found', async () => {
|
||||
personMock.getById.mockResolvedValue(null);
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
expect(mediaMock.crop).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip a person without a face asset id', async () => {
|
||||
personMock.getById.mockResolvedValue(personStub.noThumbnail);
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
expect(mediaMock.crop).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip an person with a face asset id not found', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId });
|
||||
personMock.getFacesByIds.mockResolvedValue([faceStub.face1]);
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
expect(mediaMock.crop).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip a person with a face asset id without a thumbnail', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId });
|
||||
personMock.getFacesByIds.mockResolvedValue([faceStub.face1]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.noResizePath]);
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
expect(mediaMock.crop).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should generate a thumbnail', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId });
|
||||
personMock.getFacesByIds.mockResolvedValue([faceStub.middle]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([faceStub.middle.assetId]);
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/pe/rs');
|
||||
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
|
||||
left: 95,
|
||||
top: 95,
|
||||
width: 110,
|
||||
height: 110,
|
||||
});
|
||||
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/pe/rs/person-1.jpeg', {
|
||||
format: 'jpeg',
|
||||
size: 250,
|
||||
quality: 80,
|
||||
colorspace: Colorspace.P3,
|
||||
});
|
||||
expect(personMock.update).toHaveBeenCalledWith({
|
||||
id: 'person-1',
|
||||
thumbnailPath: 'upload/thumbs/user-id/pe/rs/person-1.jpeg',
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate a thumbnail without going negative', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.start.assetId });
|
||||
personMock.getFacesByIds.mockResolvedValue([faceStub.start]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
|
||||
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
|
||||
left: 0,
|
||||
top: 0,
|
||||
width: 510,
|
||||
height: 510,
|
||||
});
|
||||
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/pe/rs/person-1.jpeg', {
|
||||
format: 'jpeg',
|
||||
size: 250,
|
||||
quality: 80,
|
||||
colorspace: Colorspace.P3,
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate a thumbnail without overflowing', async () => {
|
||||
personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.end.assetId });
|
||||
personMock.getFacesByIds.mockResolvedValue([faceStub.end]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
|
||||
|
||||
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
|
||||
left: 297,
|
||||
top: 297,
|
||||
width: 202,
|
||||
height: 202,
|
||||
});
|
||||
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/pe/rs/person-1.jpeg', {
|
||||
format: 'jpeg',
|
||||
size: 250,
|
||||
quality: 80,
|
||||
colorspace: Colorspace.P3,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergePerson', () => {
|
||||
it('should require person.write and person.merge permission', async () => {
|
||||
personMock.getById.mockResolvedValueOnce(personStub.primaryPerson);
|
||||
|
@ -1,10 +1,22 @@
|
||||
import { PersonEntity } from '@app/infra/entities';
|
||||
import { BadRequestException, Inject, Injectable, Logger, NotFoundException } from '@nestjs/common';
|
||||
import { AccessCore, IAccessRepository, Permission } from '../access';
|
||||
import { AssetResponseDto, BulkIdErrorReason, BulkIdResponseDto, mapAsset } from '../asset';
|
||||
import {
|
||||
AssetResponseDto,
|
||||
BulkIdErrorReason,
|
||||
BulkIdResponseDto,
|
||||
IAssetRepository,
|
||||
WithoutProperty,
|
||||
mapAsset,
|
||||
} from '../asset';
|
||||
import { AuthUserDto } from '../auth';
|
||||
import { mimeTypes } from '../domain.constant';
|
||||
import { IJobRepository, JobName } from '../job';
|
||||
import { IStorageRepository, ImmichReadStream } from '../storage';
|
||||
import { usePagination } from '../domain.util';
|
||||
import { IBaseJob, IEntityJob, IJobRepository, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
|
||||
import { CropOptions, FACE_THUMBNAIL_SIZE, IMediaRepository } from '../media';
|
||||
import { ISearchRepository } from '../search';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
import { IStorageRepository, ImmichReadStream, StorageCore, StorageFolder } from '../storage';
|
||||
import { ISystemConfigRepository, SystemConfigCore } from '../system-config';
|
||||
import {
|
||||
MergePersonDto,
|
||||
@ -15,22 +27,28 @@ import {
|
||||
PersonUpdateDto,
|
||||
mapPerson,
|
||||
} from './person.dto';
|
||||
import { IPersonRepository, UpdateFacesData } from './person.repository';
|
||||
import { AssetFaceId, IPersonRepository, UpdateFacesData } from './person.repository';
|
||||
|
||||
@Injectable()
|
||||
export class PersonService {
|
||||
private access: AccessCore;
|
||||
private configCore: SystemConfigCore;
|
||||
private storageCore: StorageCore;
|
||||
readonly logger = new Logger(PersonService.name);
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) private accessRepository: IAccessRepository,
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearningRepository: IMachineLearningRepository,
|
||||
@Inject(IMediaRepository) private mediaRepository: IMediaRepository,
|
||||
@Inject(IPersonRepository) private repository: IPersonRepository,
|
||||
@Inject(ISearchRepository) private searchRepository: ISearchRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
) {
|
||||
this.access = new AccessCore(accessRepository);
|
||||
this.storageCore = new StorageCore(storageRepository);
|
||||
this.configCore = new SystemConfigCore(configRepository);
|
||||
}
|
||||
|
||||
@ -90,7 +108,7 @@ export class PersonService {
|
||||
|
||||
if (assetId) {
|
||||
await this.access.requirePermission(authUser, Permission.ASSET_READ, assetId);
|
||||
const face = await this.repository.getFaceById({ personId: id, assetId });
|
||||
const [face] = await this.repository.getFacesByIds([{ personId: id, assetId }]);
|
||||
if (!face) {
|
||||
throw new BadRequestException('Invalid assetId for feature face');
|
||||
}
|
||||
@ -136,6 +154,183 @@ export class PersonService {
|
||||
return true;
|
||||
}
|
||||
|
||||
async handleQueueRecognizeFaces({ force }: IBaseJob) {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination, { order: 'DESC' })
|
||||
: this.assetRepository.getWithout(pagination, WithoutProperty.FACES);
|
||||
});
|
||||
|
||||
if (force) {
|
||||
const people = await this.repository.deleteAll();
|
||||
const faces = await this.searchRepository.deleteAllFaces();
|
||||
this.logger.debug(`Deleted ${people} people and ${faces} faces`);
|
||||
}
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
for (const asset of assets) {
|
||||
await this.jobRepository.queue({ name: JobName.RECOGNIZE_FACES, data: { id: asset.id } });
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async handleRecognizeFaces({ id }: IEntityJob) {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset || !asset.resizePath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const faces = await this.machineLearningRepository.detectFaces(
|
||||
machineLearning.url,
|
||||
{ imagePath: asset.resizePath },
|
||||
machineLearning.facialRecognition,
|
||||
);
|
||||
|
||||
this.logger.debug(`${faces.length} faces detected in ${asset.resizePath}`);
|
||||
this.logger.verbose(faces.map((face) => ({ ...face, embedding: `float[${face.embedding.length}]` })));
|
||||
|
||||
for (const { embedding, ...rest } of faces) {
|
||||
const faceSearchResult = await this.searchRepository.searchFaces(embedding, { ownerId: asset.ownerId });
|
||||
|
||||
let personId: string | null = null;
|
||||
|
||||
// try to find a matching face and link to the associated person
|
||||
// The closer to 0, the better the match. Range is from 0 to 2
|
||||
if (faceSearchResult.total && faceSearchResult.distances[0] <= machineLearning.facialRecognition.maxDistance) {
|
||||
this.logger.verbose(`Match face with distance ${faceSearchResult.distances[0]}`);
|
||||
personId = faceSearchResult.items[0].personId;
|
||||
}
|
||||
|
||||
let newPerson: PersonEntity | null = null;
|
||||
if (!personId) {
|
||||
this.logger.debug('No matches, creating a new person.');
|
||||
newPerson = await this.repository.create({ ownerId: asset.ownerId });
|
||||
personId = newPerson.id;
|
||||
}
|
||||
|
||||
const faceId: AssetFaceId = { assetId: asset.id, personId };
|
||||
await this.repository.createFace({
|
||||
...faceId,
|
||||
embedding,
|
||||
imageHeight: rest.imageHeight,
|
||||
imageWidth: rest.imageWidth,
|
||||
boundingBoxX1: rest.boundingBox.x1,
|
||||
boundingBoxX2: rest.boundingBox.x2,
|
||||
boundingBoxY1: rest.boundingBox.y1,
|
||||
boundingBoxY2: rest.boundingBox.y2,
|
||||
});
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_FACE, data: faceId });
|
||||
|
||||
if (newPerson) {
|
||||
await this.repository.update({ id: personId, faceAssetId: asset.id });
|
||||
await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: newPerson.id } });
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async handlePersonMigration({ id }: IEntityJob) {
|
||||
const person = await this.repository.getById(id);
|
||||
if (!person) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const path = this.storageCore.ensurePath(StorageFolder.THUMBNAILS, person.ownerId, `${id}.jpeg`);
|
||||
if (person.thumbnailPath && person.thumbnailPath !== path) {
|
||||
await this.storageRepository.moveFile(person.thumbnailPath, path);
|
||||
await this.repository.update({ id, thumbnailPath: path });
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async handleGeneratePersonThumbnail(data: IEntityJob) {
|
||||
const { machineLearning, thumbnail } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const person = await this.repository.getById(data.id);
|
||||
if (!person?.faceAssetId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const [face] = await this.repository.getFacesByIds([{ personId: person.id, assetId: person.faceAssetId }]);
|
||||
if (!face) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const {
|
||||
assetId,
|
||||
personId,
|
||||
boundingBoxX1: x1,
|
||||
boundingBoxX2: x2,
|
||||
boundingBoxY1: y1,
|
||||
boundingBoxY2: y2,
|
||||
imageWidth,
|
||||
imageHeight,
|
||||
} = face;
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([assetId]);
|
||||
if (!asset?.resizePath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.logger.verbose(`Cropping face for person: ${personId}`);
|
||||
|
||||
const thumbnailPath = this.storageCore.ensurePath(StorageFolder.THUMBNAILS, asset.ownerId, `${personId}.jpeg`);
|
||||
|
||||
const halfWidth = (x2 - x1) / 2;
|
||||
const halfHeight = (y2 - y1) / 2;
|
||||
|
||||
const middleX = Math.round(x1 + halfWidth);
|
||||
const middleY = Math.round(y1 + halfHeight);
|
||||
|
||||
// zoom out 10%
|
||||
const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1);
|
||||
|
||||
// get the longest distance from the center of the image without overflowing
|
||||
const newHalfSize = Math.min(
|
||||
middleX - Math.max(0, middleX - targetHalfSize),
|
||||
middleY - Math.max(0, middleY - targetHalfSize),
|
||||
Math.min(imageWidth - 1, middleX + targetHalfSize) - middleX,
|
||||
Math.min(imageHeight - 1, middleY + targetHalfSize) - middleY,
|
||||
);
|
||||
|
||||
const cropOptions: CropOptions = {
|
||||
left: middleX - newHalfSize,
|
||||
top: middleY - newHalfSize,
|
||||
width: newHalfSize * 2,
|
||||
height: newHalfSize * 2,
|
||||
};
|
||||
|
||||
const croppedOutput = await this.mediaRepository.crop(asset.resizePath, cropOptions);
|
||||
const thumbnailOptions = {
|
||||
format: 'jpeg',
|
||||
size: FACE_THUMBNAIL_SIZE,
|
||||
colorspace: thumbnail.colorspace,
|
||||
quality: thumbnail.quality,
|
||||
} as const;
|
||||
|
||||
await this.mediaRepository.resize(croppedOutput, thumbnailPath, thumbnailOptions);
|
||||
await this.repository.update({ id: personId, thumbnailPath });
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async mergePerson(authUser: AuthUserDto, id: string, dto: MergePersonDto): Promise<BulkIdResponseDto[]> {
|
||||
const mergeIds = dto.ids;
|
||||
await this.access.requirePermission(authUser, Permission.PERSON_WRITE, id);
|
||||
|
@ -7,9 +7,9 @@ import {
|
||||
faceStub,
|
||||
newAlbumRepositoryMock,
|
||||
newAssetRepositoryMock,
|
||||
newFaceRepositoryMock,
|
||||
newJobRepositoryMock,
|
||||
newMachineLearningRepositoryMock,
|
||||
newPersonRepositoryMock,
|
||||
newSearchRepositoryMock,
|
||||
newSystemConfigRepositoryMock,
|
||||
searchStub,
|
||||
@ -18,9 +18,9 @@ import { plainToInstance } from 'class-transformer';
|
||||
import { IAlbumRepository } from '../album/album.repository';
|
||||
import { mapAsset } from '../asset';
|
||||
import { IAssetRepository } from '../asset/asset.repository';
|
||||
import { IFaceRepository } from '../facial-recognition';
|
||||
import { JobName } from '../job';
|
||||
import { IJobRepository } from '../job/job.repository';
|
||||
import { IPersonRepository } from '../person/person.repository';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
import { ISystemConfigRepository } from '../system-config';
|
||||
import { SearchDto } from './dto';
|
||||
@ -34,8 +34,8 @@ describe(SearchService.name, () => {
|
||||
let albumMock: jest.Mocked<IAlbumRepository>;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let faceMock: jest.Mocked<IFaceRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
let machineMock: jest.Mocked<IMachineLearningRepository>;
|
||||
let searchMock: jest.Mocked<ISearchRepository>;
|
||||
|
||||
@ -43,12 +43,12 @@ describe(SearchService.name, () => {
|
||||
albumMock = newAlbumRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
faceMock = newFaceRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
personMock = newPersonRepositoryMock();
|
||||
machineMock = newMachineLearningRepositoryMock();
|
||||
searchMock = newSearchRepositoryMock();
|
||||
|
||||
sut = new SearchService(albumMock, assetMock, configMock, faceMock, jobMock, machineMock, searchMock);
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, machineMock, personMock, searchMock, configMock);
|
||||
|
||||
searchMock.checkMigrationStatus.mockResolvedValue({ assets: false, albums: false, faces: false });
|
||||
|
||||
@ -314,7 +314,7 @@ describe(SearchService.name, () => {
|
||||
|
||||
describe('handleIndexFaces', () => {
|
||||
it('should call done, even when there are no faces', async () => {
|
||||
faceMock.getAll.mockResolvedValue([]);
|
||||
personMock.getAllFaces.mockResolvedValue([]);
|
||||
|
||||
await sut.handleIndexFaces();
|
||||
|
||||
@ -322,7 +322,7 @@ describe(SearchService.name, () => {
|
||||
});
|
||||
|
||||
it('should index all the faces', async () => {
|
||||
faceMock.getAll.mockResolvedValue([faceStub.face1]);
|
||||
personMock.getAllFaces.mockResolvedValue([faceStub.face1]);
|
||||
|
||||
await sut.handleIndexFaces();
|
||||
|
||||
@ -358,15 +358,15 @@ describe(SearchService.name, () => {
|
||||
sut.handleIndexFace({ assetId: 'asset-1', personId: 'person-1' });
|
||||
|
||||
expect(searchMock.importFaces).not.toHaveBeenCalled();
|
||||
expect(faceMock.getByIds).not.toHaveBeenCalled();
|
||||
expect(personMock.getFacesByIds).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should index the face', () => {
|
||||
faceMock.getByIds.mockResolvedValue([faceStub.face1]);
|
||||
personMock.getFacesByIds.mockResolvedValue([faceStub.face1]);
|
||||
|
||||
sut.handleIndexFace({ assetId: 'asset-1', personId: 'person-1' });
|
||||
|
||||
expect(faceMock.getByIds).toHaveBeenCalledWith([{ assetId: 'asset-1', personId: 'person-1' }]);
|
||||
expect(personMock.getFacesByIds).toHaveBeenCalledWith([{ assetId: 'asset-1', personId: 'person-1' }]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -6,8 +6,8 @@ import { AssetResponseDto, mapAsset } from '../asset';
|
||||
import { IAssetRepository } from '../asset/asset.repository';
|
||||
import { AuthUserDto } from '../auth';
|
||||
import { usePagination } from '../domain.util';
|
||||
import { AssetFaceId, IFaceRepository } from '../facial-recognition';
|
||||
import { IAssetFaceJob, IBulkEntityJob, IJobRepository, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
|
||||
import { AssetFaceId, IPersonRepository } from '../person';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
import { FeatureFlag, ISystemConfigRepository, SystemConfigCore } from '../system-config';
|
||||
import { SearchDto } from './dto';
|
||||
@ -51,11 +51,11 @@ export class SearchService {
|
||||
constructor(
|
||||
@Inject(IAlbumRepository) private albumRepository: IAlbumRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IFaceRepository) private faceRepository: IFaceRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearning: IMachineLearningRepository,
|
||||
@Inject(IPersonRepository) private personRepository: IPersonRepository,
|
||||
@Inject(ISearchRepository) private searchRepository: ISearchRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
) {
|
||||
this.configCore = new SystemConfigCore(configRepository);
|
||||
}
|
||||
@ -198,7 +198,7 @@ export class SearchService {
|
||||
await this.searchRepository.deleteAllFaces();
|
||||
|
||||
// TODO: do this in batches based on searchIndexVersion
|
||||
const faces = this.patchFaces(await this.faceRepository.getAll());
|
||||
const faces = this.patchFaces(await this.personRepository.getAllFaces());
|
||||
this.logger.log(`Indexing ${faces.length} faces`);
|
||||
|
||||
const chunkSize = 1000;
|
||||
@ -340,7 +340,7 @@ export class SearchService {
|
||||
}
|
||||
|
||||
private async idsToFaces(ids: AssetFaceId[]): Promise<OwnedFaceEntity[]> {
|
||||
return this.patchFaces(await this.faceRepository.getByIds(ids));
|
||||
return this.patchFaces(await this.personRepository.getFacesByIds(ids));
|
||||
}
|
||||
|
||||
private patchAssets(assets: AssetEntity[]): AssetEntity[] {
|
||||
|
@ -5,7 +5,6 @@ import {
|
||||
IAuditRepository,
|
||||
ICommunicationRepository,
|
||||
ICryptoRepository,
|
||||
IFaceRepository,
|
||||
IJobRepository,
|
||||
IKeyRepository,
|
||||
ILibraryRepository,
|
||||
@ -40,7 +39,6 @@ import {
|
||||
AuditRepository,
|
||||
CommunicationRepository,
|
||||
CryptoRepository,
|
||||
FaceRepository,
|
||||
FilesystemProvider,
|
||||
JobRepository,
|
||||
LibraryRepository,
|
||||
@ -65,7 +63,6 @@ const providers: Provider[] = [
|
||||
{ provide: IAuditRepository, useClass: AuditRepository },
|
||||
{ provide: ICommunicationRepository, useClass: CommunicationRepository },
|
||||
{ provide: ICryptoRepository, useClass: CryptoRepository },
|
||||
{ provide: IFaceRepository, useClass: FaceRepository },
|
||||
{ provide: IJobRepository, useClass: JobRepository },
|
||||
{ provide: ILibraryRepository, useClass: LibraryRepository },
|
||||
{ provide: IKeyRepository, useClass: APIKeyRepository },
|
||||
|
@ -1,22 +0,0 @@
|
||||
import { AssetFaceId, IFaceRepository } from '@app/domain';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Repository } from 'typeorm';
|
||||
import { AssetFaceEntity } from '../entities/asset-face.entity';
|
||||
|
||||
@Injectable()
|
||||
export class FaceRepository implements IFaceRepository {
|
||||
constructor(@InjectRepository(AssetFaceEntity) private repository: Repository<AssetFaceEntity>) {}
|
||||
|
||||
getAll(): Promise<AssetFaceEntity[]> {
|
||||
return this.repository.find({ relations: { asset: true } });
|
||||
}
|
||||
|
||||
getByIds(ids: AssetFaceId[]): Promise<AssetFaceEntity[]> {
|
||||
return this.repository.find({ where: ids, relations: { asset: true } });
|
||||
}
|
||||
|
||||
create(entity: Partial<AssetFaceEntity>): Promise<AssetFaceEntity> {
|
||||
return this.repository.save(entity);
|
||||
}
|
||||
}
|
@ -5,7 +5,6 @@ export * from './asset.repository';
|
||||
export * from './audit.repository';
|
||||
export * from './communication.repository';
|
||||
export * from './crypto.repository';
|
||||
export * from './face.repository';
|
||||
export * from './filesystem.provider';
|
||||
export * from './job.repository';
|
||||
export * from './library.repository';
|
||||
|
@ -50,6 +50,10 @@ export class PersonRepository implements IPersonRepository {
|
||||
return people.length;
|
||||
}
|
||||
|
||||
getAllFaces(): Promise<AssetFaceEntity[]> {
|
||||
return this.assetFaceRepository.find({ relations: { asset: true } });
|
||||
}
|
||||
|
||||
getAll(): Promise<PersonEntity[]> {
|
||||
return this.personRepository.find();
|
||||
}
|
||||
@ -117,13 +121,17 @@ export class PersonRepository implements IPersonRepository {
|
||||
return this.personRepository.save(entity);
|
||||
}
|
||||
|
||||
createFace(entity: Partial<AssetFaceEntity>): Promise<AssetFaceEntity> {
|
||||
return this.assetFaceRepository.save(entity);
|
||||
}
|
||||
|
||||
async update(entity: Partial<PersonEntity>): Promise<PersonEntity> {
|
||||
const { id } = await this.personRepository.save(entity);
|
||||
return this.personRepository.findOneByOrFail({ id });
|
||||
}
|
||||
|
||||
async getFaceById({ personId, assetId }: AssetFaceId): Promise<AssetFaceEntity | null> {
|
||||
return this.assetFaceRepository.findOneBy({ assetId, personId });
|
||||
async getFacesByIds(ids: AssetFaceId[]): Promise<AssetFaceEntity[]> {
|
||||
return this.assetFaceRepository.find({ where: ids, relations: { asset: true } });
|
||||
}
|
||||
|
||||
async getRandomFace(personId: string): Promise<AssetFaceEntity | null> {
|
||||
|
@ -1,6 +1,5 @@
|
||||
import {
|
||||
AuditService,
|
||||
FacialRecognitionService,
|
||||
IDeleteFilesJob,
|
||||
JobName,
|
||||
JobService,
|
||||
@ -23,7 +22,6 @@ export class AppService {
|
||||
private logger = new Logger(AppService.name);
|
||||
|
||||
constructor(
|
||||
private facialRecognitionService: FacialRecognitionService,
|
||||
private jobService: JobService,
|
||||
private mediaService: MediaService,
|
||||
private metadataService: MetadataService,
|
||||
@ -61,7 +59,7 @@ export class AppService {
|
||||
[JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE]: (data) => this.storageTemplateService.handleMigrationSingle(data),
|
||||
[JobName.QUEUE_MIGRATION]: () => this.mediaService.handleQueueMigration(),
|
||||
[JobName.MIGRATE_ASSET]: (data) => this.mediaService.handleAssetMigration(data),
|
||||
[JobName.MIGRATE_PERSON]: (data) => this.facialRecognitionService.handlePersonMigration(data),
|
||||
[JobName.MIGRATE_PERSON]: (data) => this.personService.handlePersonMigration(data),
|
||||
[JobName.SYSTEM_CONFIG_CHANGE]: () => this.systemConfigService.refreshConfig(),
|
||||
[JobName.QUEUE_GENERATE_THUMBNAILS]: (data) => this.mediaService.handleQueueGenerateThumbnails(data),
|
||||
[JobName.GENERATE_JPEG_THUMBNAIL]: (data) => this.mediaService.handleGenerateJpegThumbnail(data),
|
||||
@ -72,9 +70,9 @@ export class AppService {
|
||||
[JobName.QUEUE_METADATA_EXTRACTION]: (data) => this.metadataService.handleQueueMetadataExtraction(data),
|
||||
[JobName.METADATA_EXTRACTION]: (data) => this.metadataService.handleMetadataExtraction(data),
|
||||
[JobName.LINK_LIVE_PHOTOS]: (data) => this.metadataService.handleLivePhotoLinking(data),
|
||||
[JobName.QUEUE_RECOGNIZE_FACES]: (data) => this.facialRecognitionService.handleQueueRecognizeFaces(data),
|
||||
[JobName.RECOGNIZE_FACES]: (data) => this.facialRecognitionService.handleRecognizeFaces(data),
|
||||
[JobName.GENERATE_PERSON_THUMBNAIL]: (data) => this.facialRecognitionService.handleGeneratePersonThumbnail(data),
|
||||
[JobName.QUEUE_RECOGNIZE_FACES]: (data) => this.personService.handleQueueRecognizeFaces(data),
|
||||
[JobName.RECOGNIZE_FACES]: (data) => this.personService.handleRecognizeFaces(data),
|
||||
[JobName.GENERATE_PERSON_THUMBNAIL]: (data) => this.personService.handleGeneratePersonThumbnail(data),
|
||||
[JobName.PERSON_CLEANUP]: () => this.personService.handlePersonCleanup(),
|
||||
[JobName.QUEUE_SIDECAR]: (data) => this.metadataService.handleQueueSidecar(data),
|
||||
[JobName.SIDECAR_DISCOVERY]: (data) => this.metadataService.handleSidecarDiscovery(data),
|
||||
|
@ -1,11 +1,4 @@
|
||||
import {
|
||||
AssetResponseDto,
|
||||
IAssetRepository,
|
||||
IFaceRepository,
|
||||
IPersonRepository,
|
||||
LoginResponseDto,
|
||||
TimeBucketSize,
|
||||
} from '@app/domain';
|
||||
import { AssetResponseDto, IAssetRepository, IPersonRepository, LoginResponseDto, TimeBucketSize } from '@app/domain';
|
||||
import { AppModule, AssetController } from '@app/immich';
|
||||
import { AssetEntity, AssetType } from '@app/infra/entities';
|
||||
import { INestApplication } from '@nestjs/common';
|
||||
@ -254,8 +247,7 @@ describe(`${AssetController.name} (e2e)`, () => {
|
||||
const personRepository = app.get<IPersonRepository>(IPersonRepository);
|
||||
const person = await personRepository.create({ ownerId: asset1.ownerId, name: 'Test Person' });
|
||||
|
||||
const faceRepository = app.get<IFaceRepository>(IFaceRepository);
|
||||
await faceRepository.create({ assetId: asset1.id, personId: person.id });
|
||||
await personRepository.createFace({ assetId: asset1.id, personId: person.id });
|
||||
|
||||
const { status, body } = await request(server)
|
||||
.put(`/asset/${asset1.id}`)
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { IFaceRepository, IPersonRepository, LoginResponseDto } from '@app/domain';
|
||||
import { IPersonRepository, LoginResponseDto } from '@app/domain';
|
||||
import { AppModule, PersonController } from '@app/immich';
|
||||
import { PersonEntity } from '@app/infra/entities';
|
||||
import { INestApplication } from '@nestjs/common';
|
||||
@ -14,7 +14,6 @@ describe(`${PersonController.name}`, () => {
|
||||
let loginResponse: LoginResponseDto;
|
||||
let accessToken: string;
|
||||
let personRepository: IPersonRepository;
|
||||
let faceRepository: IFaceRepository;
|
||||
let visiblePerson: PersonEntity;
|
||||
let hiddenPerson: PersonEntity;
|
||||
|
||||
@ -26,7 +25,6 @@ describe(`${PersonController.name}`, () => {
|
||||
app = await moduleFixture.createNestApplication().init();
|
||||
server = app.getHttpServer();
|
||||
personRepository = app.get<IPersonRepository>(IPersonRepository);
|
||||
faceRepository = app.get<IFaceRepository>(IFaceRepository);
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
@ -41,7 +39,7 @@ describe(`${PersonController.name}`, () => {
|
||||
name: 'visible_person',
|
||||
thumbnailPath: '/thumbnail/face_asset',
|
||||
});
|
||||
await faceRepository.create({ assetId: faceAsset.id, personId: visiblePerson.id });
|
||||
await personRepository.createFace({ assetId: faceAsset.id, personId: visiblePerson.id });
|
||||
|
||||
hiddenPerson = await personRepository.create({
|
||||
ownerId: loginResponse.userId,
|
||||
@ -49,7 +47,7 @@ describe(`${PersonController.name}`, () => {
|
||||
isHidden: true,
|
||||
thumbnailPath: '/thumbnail/face_asset',
|
||||
});
|
||||
await faceRepository.create({ assetId: faceAsset.id, personId: hiddenPerson.id });
|
||||
await personRepository.createFace({ assetId: faceAsset.id, personId: hiddenPerson.id });
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
@ -1,9 +0,0 @@
|
||||
import { IFaceRepository } from '@app/domain';
|
||||
|
||||
export const newFaceRepositoryMock = (): jest.Mocked<IFaceRepository> => {
|
||||
return {
|
||||
getAll: jest.fn(),
|
||||
getByIds: jest.fn(),
|
||||
create: jest.fn(),
|
||||
};
|
||||
};
|
@ -5,7 +5,6 @@ export * from './asset.repository.mock';
|
||||
export * from './audit.repository.mock';
|
||||
export * from './communication.repository.mock';
|
||||
export * from './crypto.repository.mock';
|
||||
export * from './face.repository.mock';
|
||||
export * from './job.repository.mock';
|
||||
export * from './library.repository.mock';
|
||||
export * from './machine-learning.repository.mock';
|
||||
|
@ -14,9 +14,11 @@ export const newPersonRepositoryMock = (): jest.Mocked<IPersonRepository> => {
|
||||
deleteAll: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
|
||||
getFaceById: jest.fn(),
|
||||
getAllFaces: jest.fn(),
|
||||
getFacesByIds: jest.fn(),
|
||||
getRandomFace: jest.fn(),
|
||||
prepareReassignFaces: jest.fn(),
|
||||
reassignFaces: jest.fn(),
|
||||
createFace: jest.fn(),
|
||||
};
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user