1
0
mirror of https://github.com/immich-app/immich.git synced 2024-11-28 09:33:27 +02:00

feat(server): refresh face detection (#12335)

* refresh faces

handle non-ml faces

* fix metadata face handling

* updated tests

* added todo comment
This commit is contained in:
Mert 2024-10-03 21:58:28 -04:00 committed by GitHub
parent 9edc9d6151
commit 2c87683fd4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 389 additions and 144 deletions

Binary file not shown.

Binary file not shown.

View File

@ -8215,8 +8215,9 @@
},
"AssetJobName": {
"enum": [
"regenerate-thumbnail",
"refresh-faces",
"refresh-metadata",
"regenerate-thumbnail",
"transcode-video"
],
"type": "string"
@ -9277,8 +9278,7 @@
}
},
"required": [
"command",
"force"
"command"
],
"type": "object"
},

View File

@ -554,7 +554,7 @@ export type JobCreateDto = {
};
export type JobCommandDto = {
command: JobCommand;
force: boolean;
force?: boolean;
};
export type LibraryResponseDto = {
assetCount: number;
@ -3426,8 +3426,9 @@ export enum Reason {
UnsupportedFormat = "unsupported-format"
}
export enum AssetJobName {
RegenerateThumbnail = "regenerate-thumbnail",
RefreshFaces = "refresh-faces",
RefreshMetadata = "refresh-metadata",
RegenerateThumbnail = "regenerate-thumbnail",
TranscodeVideo = "transcode-video"
}
export enum AssetMediaSize {

View File

@ -92,8 +92,9 @@ export class AssetIdsDto {
}
export enum AssetJobName {
REGENERATE_THUMBNAIL = 'regenerate-thumbnail',
REFRESH_FACES = 'refresh-faces',
REFRESH_METADATA = 'refresh-metadata',
REGENERATE_THUMBNAIL = 'regenerate-thumbnail',
TRANSCODE_VIDEO = 'transcode-video',
}

View File

@ -18,7 +18,7 @@ export class JobCommandDto {
command!: JobCommand;
@ValidateBoolean({ optional: true })
force!: boolean;
force?: boolean; // TODO: this uses undefined as a third state, which should be refactored to be more explicit
}
export class JobCreateDto {

View File

@ -1,5 +1,6 @@
import { AssetFaceEntity } from 'src/entities/asset-face.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { FaceSearchEntity } from 'src/entities/face-search.entity';
import { PersonEntity } from 'src/entities/person.entity';
import { SourceType } from 'src/enum';
import { Paginated, PaginationOptions } from 'src/utils/pagination';
@ -63,7 +64,11 @@ export interface IPersonRepository {
delete(entities: PersonEntity[]): Promise<void>;
deleteAll(): Promise<void>;
deleteFaces(options: DeleteFacesOptions): Promise<void>;
replaceFaces(assetId: string, entities: Partial<AssetFaceEntity>[], sourceType?: string): Promise<string[]>;
refreshFaces(
facesToAdd: Partial<AssetFaceEntity>[],
faceIdsToRemove: string[],
embeddingsToAdd?: FaceSearchEntity[],
): Promise<void>;
getAllFaces(pagination: PaginationOptions, options?: FindManyOptions<AssetFaceEntity>): Paginated<AssetFaceEntity>;
getFaceById(id: string): Promise<AssetFaceEntity>;
getFaceByIdWithAssets(

View File

@ -5,6 +5,7 @@ import { ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { AssetFaceEntity } from 'src/entities/asset-face.entity';
import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { FaceSearchEntity } from 'src/entities/face-search.entity';
import { PersonEntity } from 'src/entities/person.entity';
import { PaginationMode, SourceType } from 'src/enum';
import {
@ -31,6 +32,7 @@ export class PersonRepository implements IPersonRepository {
@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
@InjectRepository(PersonEntity) private personRepository: Repository<PersonEntity>,
@InjectRepository(AssetFaceEntity) private assetFaceRepository: Repository<AssetFaceEntity>,
@InjectRepository(FaceSearchEntity) private faceSearchRepository: Repository<FaceSearchEntity>,
@InjectRepository(AssetJobStatusEntity) private jobStatusRepository: Repository<AssetJobStatusEntity>,
) {}
@ -296,12 +298,31 @@ export class PersonRepository implements IPersonRepository {
return res.map((row) => row.id);
}
async replaceFaces(assetId: string, entities: AssetFaceEntity[], sourceType: string): Promise<string[]> {
return this.dataSource.transaction(async (manager) => {
await manager.delete(AssetFaceEntity, { assetId, sourceType });
const assetFaces = await manager.save(AssetFaceEntity, entities);
return assetFaces.map(({ id }) => id);
});
async refreshFaces(
facesToAdd: Partial<AssetFaceEntity>[],
faceIdsToRemove: string[],
embeddingsToAdd?: FaceSearchEntity[],
): Promise<void> {
const query = this.faceSearchRepository.createQueryBuilder().select('1');
if (facesToAdd.length > 0) {
const insertCte = this.assetFaceRepository.createQueryBuilder().insert().values(facesToAdd);
query.addCommonTableExpression(insertCte, 'added');
}
if (faceIdsToRemove.length > 0) {
const deleteCte = this.assetFaceRepository
.createQueryBuilder()
.delete()
.where('id = any(:faceIdsToRemove)', { faceIdsToRemove });
query.addCommonTableExpression(deleteCte, 'deleted');
}
if (embeddingsToAdd?.length) {
const embeddingCte = this.faceSearchRepository.createQueryBuilder().insert().values(embeddingsToAdd).orIgnore();
query.addCommonTableExpression(embeddingCte, 'embeddings');
}
await query.execute();
}
async update(person: Partial<PersonEntity>): Promise<PersonEntity> {

View File

@ -92,9 +92,9 @@ export class AssetService extends BaseService {
id,
{
exifInfo: true,
tags: true,
sharedLinks: true,
smartInfo: true,
tags: true,
owner: true,
faces: {
person: true,
@ -290,6 +290,11 @@ export class AssetService extends BaseService {
for (const id of dto.assetIds) {
switch (dto.name) {
case AssetJobName.REFRESH_FACES: {
jobs.push({ name: JobName.FACE_DETECTION, data: { id } });
break;
}
case AssetJobName.REFRESH_METADATA: {
jobs.push({ name: JobName.METADATA_EXTRACTION, data: { id } });
break;

View File

@ -247,7 +247,7 @@ describe(MetadataService.name, () => {
it('should handle an asset that could not be found', async () => {
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
expect(assetMock.upsertExif).not.toHaveBeenCalled();
expect(assetMock.update).not.toHaveBeenCalled();
});
@ -265,7 +265,7 @@ describe(MetadataService.name, () => {
});
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.sidecar.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.sidecar.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: sidecarDate }));
expect(assetMock.update).toHaveBeenCalledWith({
id: assetStub.image.id,
@ -280,7 +280,7 @@ describe(MetadataService.name, () => {
metadataMock.readTags.mockResolvedValue({ ISO: [160] });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ iso: 160 }));
expect(assetMock.update).toHaveBeenCalledWith({
id: assetStub.image.id,
@ -300,7 +300,7 @@ describe(MetadataService.name, () => {
});
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }),
);
@ -320,7 +320,7 @@ describe(MetadataService.name, () => {
});
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ latitude: null, longitude: null }));
});
@ -482,7 +482,9 @@ describe(MetadataService.name, () => {
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
await sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id], {
faces: { person: false },
});
expect(storageMock.createOrOverwriteFile).not.toHaveBeenCalled();
expect(jobMock.queue).not.toHaveBeenCalled();
expect(jobMock.queueAll).not.toHaveBeenCalled();
@ -508,7 +510,7 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.video.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ orientation: Orientation.Rotate270CW.toString() }),
);
@ -536,7 +538,9 @@ describe(MetadataService.name, () => {
assetStub.livePhotoWithOriginalFileName.originalPath,
'MotionPhotoVideo',
);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoWithOriginalFileName.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoWithOriginalFileName.id], {
faces: { person: false },
});
expect(assetMock.create).toHaveBeenCalledWith({
checksum: expect.any(Buffer),
deviceAssetId: 'NONE',
@ -579,7 +583,9 @@ describe(MetadataService.name, () => {
assetStub.livePhotoWithOriginalFileName.originalPath,
'EmbeddedVideoFile',
);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoWithOriginalFileName.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoWithOriginalFileName.id], {
faces: { person: false },
});
expect(assetMock.create).toHaveBeenCalledWith({
checksum: expect.any(Buffer),
deviceAssetId: 'NONE',
@ -619,7 +625,9 @@ describe(MetadataService.name, () => {
storageMock.readFile.mockResolvedValue(video);
await sut.handleMetadataExtraction({ id: assetStub.livePhotoWithOriginalFileName.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoWithOriginalFileName.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoWithOriginalFileName.id], {
faces: { person: false },
});
expect(storageMock.readFile).toHaveBeenCalledWith(
assetStub.livePhotoWithOriginalFileName.originalPath,
expect.any(Object),
@ -768,7 +776,7 @@ describe(MetadataService.name, () => {
metadataMock.readTags.mockResolvedValue(tags);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalledWith({
assetId: assetStub.image.id,
bitsPerSample: expect.any(Number),
@ -826,7 +834,7 @@ describe(MetadataService.name, () => {
metadataMock.readTags.mockResolvedValue(tags);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
timeZone: 'UTC+0',
@ -846,7 +854,7 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.video.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalled();
expect(assetMock.update).toHaveBeenCalledWith(
expect.objectContaining({
@ -867,7 +875,7 @@ describe(MetadataService.name, () => {
});
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalled();
expect(assetMock.update).toHaveBeenCalledWith(
expect.objectContaining({
@ -889,7 +897,7 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.video.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalled();
expect(assetMock.update).toHaveBeenCalledWith(
expect.objectContaining({
@ -911,7 +919,7 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.video.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalled();
expect(assetMock.update).toHaveBeenCalledWith(
expect.objectContaining({
@ -975,11 +983,10 @@ describe(MetadataService.name, () => {
metadataMock.readTags.mockResolvedValue(metadataStub.withFaceNoName);
personMock.getDistinctNames.mockResolvedValue([]);
personMock.createAll.mockResolvedValue([]);
personMock.replaceFaces.mockResolvedValue([]);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(personMock.createAll).toHaveBeenCalledWith([]);
expect(personMock.replaceFaces).toHaveBeenCalledWith(assetStub.primaryImage.id, [], SourceType.EXIF);
expect(personMock.updateAll).toHaveBeenCalledWith([]);
expect(personMock.createAll).not.toHaveBeenCalled();
expect(personMock.refreshFaces).not.toHaveBeenCalled();
expect(personMock.updateAll).not.toHaveBeenCalled();
});
it('should skip importing faces with empty name', async () => {
@ -988,11 +995,10 @@ describe(MetadataService.name, () => {
metadataMock.readTags.mockResolvedValue(metadataStub.withFaceEmptyName);
personMock.getDistinctNames.mockResolvedValue([]);
personMock.createAll.mockResolvedValue([]);
personMock.replaceFaces.mockResolvedValue([]);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(personMock.createAll).toHaveBeenCalledWith([]);
expect(personMock.replaceFaces).toHaveBeenCalledWith(assetStub.primaryImage.id, [], SourceType.EXIF);
expect(personMock.updateAll).toHaveBeenCalledWith([]);
expect(personMock.createAll).not.toHaveBeenCalled();
expect(personMock.refreshFaces).not.toHaveBeenCalled();
expect(personMock.updateAll).not.toHaveBeenCalled();
});
it('should apply metadata face tags creating new persons', async () => {
@ -1001,14 +1007,12 @@ describe(MetadataService.name, () => {
metadataMock.readTags.mockResolvedValue(metadataStub.withFace);
personMock.getDistinctNames.mockResolvedValue([]);
personMock.createAll.mockResolvedValue([personStub.withName.id]);
personMock.replaceFaces.mockResolvedValue(['face-asset-uuid']);
personMock.update.mockResolvedValue(personStub.withName);
await sut.handleMetadataExtraction({ id: assetStub.primaryImage.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.primaryImage.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.primaryImage.id], { faces: { person: false } });
expect(personMock.getDistinctNames).toHaveBeenCalledWith(assetStub.primaryImage.ownerId, { withHidden: true });
expect(personMock.createAll).toHaveBeenCalledWith([expect.objectContaining({ name: personStub.withName.name })]);
expect(personMock.replaceFaces).toHaveBeenCalledWith(
assetStub.primaryImage.id,
expect(personMock.refreshFaces).toHaveBeenCalledWith(
[
{
id: 'random-uuid',
@ -1023,7 +1027,7 @@ describe(MetadataService.name, () => {
sourceType: SourceType.EXIF,
},
],
SourceType.EXIF,
[],
);
expect(personMock.updateAll).toHaveBeenCalledWith([{ id: 'random-uuid', faceAssetId: 'random-uuid' }]);
expect(jobMock.queueAll).toHaveBeenCalledWith([
@ -1040,14 +1044,12 @@ describe(MetadataService.name, () => {
metadataMock.readTags.mockResolvedValue(metadataStub.withFace);
personMock.getDistinctNames.mockResolvedValue([{ id: personStub.withName.id, name: personStub.withName.name }]);
personMock.createAll.mockResolvedValue([]);
personMock.replaceFaces.mockResolvedValue(['face-asset-uuid']);
personMock.update.mockResolvedValue(personStub.withName);
await sut.handleMetadataExtraction({ id: assetStub.primaryImage.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.primaryImage.id]);
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.primaryImage.id], { faces: { person: false } });
expect(personMock.getDistinctNames).toHaveBeenCalledWith(assetStub.primaryImage.ownerId, { withHidden: true });
expect(personMock.createAll).toHaveBeenCalledWith([]);
expect(personMock.replaceFaces).toHaveBeenCalledWith(
assetStub.primaryImage.id,
expect(personMock.createAll).not.toHaveBeenCalled();
expect(personMock.refreshFaces).toHaveBeenCalledWith(
[
{
id: 'random-uuid',
@ -1062,10 +1064,10 @@ describe(MetadataService.name, () => {
sourceType: SourceType.EXIF,
},
],
SourceType.EXIF,
[],
);
expect(personMock.updateAll).toHaveBeenCalledWith([]);
expect(jobMock.queueAll).toHaveBeenCalledWith([]);
expect(personMock.updateAll).not.toHaveBeenCalled();
expect(jobMock.queueAll).not.toHaveBeenCalledWith();
});
it('should handle invalid modify date', async () => {

View File

@ -178,7 +178,7 @@ export class MetadataService extends BaseService {
async handleMetadataExtraction({ id }: IEntityJob): Promise<JobStatus> {
const { metadata, reverseGeocoding } = await this.getConfig({ withCache: true });
const [asset] = await this.assetRepository.getByIds([id]);
const [asset] = await this.assetRepository.getByIds([id], { faces: { person: false } });
if (!asset) {
return JobStatus.FAILED;
}
@ -513,7 +513,7 @@ export class MetadataService extends BaseService {
return;
}
const discoveredFaces: Partial<AssetFaceEntity>[] = [];
const facesToAdd: Partial<AssetFaceEntity>[] = [];
const existingNames = await this.personRepository.getDistinctNames(asset.ownerId, { withHidden: true });
const existingNameMap = new Map(existingNames.map(({ id, name }) => [name.toLowerCase(), id]));
const missing: Partial<PersonEntity>[] = [];
@ -541,7 +541,7 @@ export class MetadataService extends BaseService {
sourceType: SourceType.EXIF,
};
discoveredFaces.push(face);
facesToAdd.push(face);
if (!existingNameMap.has(loweredName)) {
missing.push({ id: personId, ownerId: asset.ownerId, name: region.Name });
missingWithFaceAsset.push({ id: personId, faceAssetId: face.id });
@ -550,18 +550,27 @@ export class MetadataService extends BaseService {
if (missing.length > 0) {
this.logger.debug(`Creating missing persons: ${missing.map((p) => `${p.name}/${p.id}`)}`);
const newPersonIds = await this.personRepository.createAll(missing);
const jobs = newPersonIds.map((id) => ({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id } }) as const);
await this.jobRepository.queueAll(jobs);
}
const newPersonIds = await this.personRepository.createAll(missing);
const facesToRemove = asset.faces.filter((face) => face.sourceType === SourceType.EXIF).map((face) => face.id);
if (facesToRemove.length > 0) {
this.logger.debug(`Removing ${facesToRemove.length} faces for asset ${asset.id}`);
}
const faceIds = await this.personRepository.replaceFaces(asset.id, discoveredFaces, SourceType.EXIF);
this.logger.debug(`Created ${faceIds.length} faces for asset ${asset.id}`);
if (facesToAdd.length > 0) {
this.logger.debug(`Creating ${facesToAdd} faces from metadata for asset ${asset.id}`);
}
if (facesToRemove.length > 0 || facesToAdd.length > 0) {
await this.personRepository.refreshFaces(facesToAdd, facesToRemove);
}
if (missingWithFaceAsset.length > 0) {
await this.personRepository.updateAll(missingWithFaceAsset);
await this.jobRepository.queueAll(
newPersonIds.map((id) => ({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id } })),
);
}
}
private getDates(asset: AssetEntity, exifTags: ImmichTags) {

View File

@ -35,21 +35,33 @@ const responseDto: PersonResponseDto = {
const statistics = { assets: 3 };
const faceId = 'face-id';
const face = {
id: faceId,
assetId: 'asset-id',
boundingBoxX1: 100,
boundingBoxY1: 100,
boundingBoxX2: 200,
boundingBoxY2: 200,
imageHeight: 500,
imageWidth: 400,
};
const faceSearch = { faceId, embedding: [1, 2, 3, 4] };
const detectFaceMock: DetectedFaces = {
faces: [
{
boundingBox: {
x1: 100,
y1: 100,
x2: 200,
y2: 200,
x1: face.boundingBoxX1,
y1: face.boundingBoxY1,
x2: face.boundingBoxX2,
y2: face.boundingBoxY2,
},
embedding: [1, 2, 3, 4],
embedding: faceSearch.embedding,
score: 0.2,
},
],
imageHeight: 500,
imageWidth: 400,
imageHeight: face.imageHeight,
imageWidth: face.imageWidth,
};
describe(PersonService.name, () => {
@ -449,7 +461,7 @@ describe(PersonService.name, () => {
hasNextPage: false,
});
await sut.handleQueueDetectFaces({});
await sut.handleQueueDetectFaces({ force: false });
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.FACES);
expect(jobMock.queueAll).toHaveBeenCalledWith([
@ -465,14 +477,13 @@ describe(PersonService.name, () => {
items: [assetStub.image],
hasNextPage: false,
});
personMock.getAll.mockResolvedValue({
items: [personStub.withName],
hasNextPage: false,
});
personMock.getAllWithoutFaces.mockResolvedValue([]);
personMock.getAllWithoutFaces.mockResolvedValue([personStub.withName]);
await sut.handleQueueDetectFaces({ force: true });
expect(personMock.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MACHINE_LEARNING });
expect(personMock.delete).toHaveBeenCalledWith([personStub.withName]);
expect(storageMock.unlink).toHaveBeenCalledWith(personStub.withName.thumbnailPath);
expect(assetMock.getAll).toHaveBeenCalled();
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
@ -482,6 +493,27 @@ describe(PersonService.name, () => {
]);
});
it('should refresh all assets', async () => {
assetMock.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueDetectFaces({ force: undefined });
expect(personMock.delete).not.toHaveBeenCalled();
expect(personMock.deleteFaces).not.toHaveBeenCalled();
expect(storageMock.unlink).not.toHaveBeenCalled();
expect(assetMock.getAll).toHaveBeenCalled();
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
data: { id: assetStub.image.id },
},
]);
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.PERSON_CLEANUP });
});
it('should delete existing people and faces if forced', async () => {
personMock.getAll.mockResolvedValue({
items: [faceStub.face1.person, personStub.randomPerson],
@ -542,7 +574,7 @@ describe(PersonService.name, () => {
expect(personMock.getAllFaces).toHaveBeenCalledWith(
{ skip: 0, take: 1000 },
{ where: { personId: IsNull(), sourceType: IsNull() } },
{ where: { personId: IsNull(), sourceType: SourceType.MACHINE_LEARNING } },
);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
@ -663,6 +695,10 @@ describe(PersonService.name, () => {
});
describe('handleDetectFaces', () => {
beforeEach(() => {
cryptoMock.randomUUID.mockReturnValue(faceId);
});
it('should skip if machine learning is disabled', async () => {
systemMock.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
@ -719,27 +755,73 @@ describe(PersonService.name, () => {
it('should create a face with no person and queue recognition job', async () => {
personMock.createFaces.mockResolvedValue([faceStub.face1.id]);
machineLearningMock.detectFaces.mockResolvedValue(detectFaceMock);
searchMock.searchFaces.mockResolvedValue([{ face: faceStub.face1, distance: 0.7 }]);
assetMock.getByIds.mockResolvedValue([assetStub.image]);
const faceId = 'face-id';
cryptoMock.randomUUID.mockReturnValue(faceId);
const face = {
id: faceId,
assetId: 'asset-id',
boundingBoxX1: 100,
boundingBoxY1: 100,
boundingBoxX2: 200,
boundingBoxY2: 200,
imageHeight: 500,
imageWidth: 400,
faceSearch: { faceId, embedding: [1, 2, 3, 4] },
};
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(personMock.createFaces).toHaveBeenCalledWith([face]);
expect(personMock.refreshFaces).toHaveBeenCalledWith([face], [], [faceSearch]);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{ name: JobName.FACIAL_RECOGNITION, data: { id: faceStub.face1.id } },
{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } },
{ name: JobName.FACIAL_RECOGNITION, data: { id: faceId } },
]);
expect(personMock.reassignFace).not.toHaveBeenCalled();
expect(personMock.reassignFaces).not.toHaveBeenCalled();
});
it('should delete an existing face not among the new detected faces', async () => {
machineLearningMock.detectFaces.mockResolvedValue({ faces: [], imageHeight: 500, imageWidth: 400 });
assetMock.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.primaryFace1] }]);
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(personMock.refreshFaces).toHaveBeenCalledWith([], [faceStub.primaryFace1.id], []);
expect(jobMock.queueAll).not.toHaveBeenCalled();
expect(personMock.reassignFace).not.toHaveBeenCalled();
expect(personMock.reassignFaces).not.toHaveBeenCalled();
});
it('should add new face and delete an existing face not among the new detected faces', async () => {
personMock.createFaces.mockResolvedValue([faceStub.face1.id]);
machineLearningMock.detectFaces.mockResolvedValue(detectFaceMock);
assetMock.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.primaryFace1] }]);
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(personMock.refreshFaces).toHaveBeenCalledWith([face], [faceStub.primaryFace1.id], [faceSearch]);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } },
{ name: JobName.FACIAL_RECOGNITION, data: { id: faceId } },
]);
expect(personMock.reassignFace).not.toHaveBeenCalled();
expect(personMock.reassignFaces).not.toHaveBeenCalled();
});
it('should add embedding to matching metadata face', async () => {
machineLearningMock.detectFaces.mockResolvedValue(detectFaceMock);
assetMock.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.fromExif1] }]);
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(personMock.refreshFaces).toHaveBeenCalledWith(
[],
[],
[{ faceId: faceStub.fromExif1.id, embedding: faceSearch.embedding }],
);
expect(jobMock.queueAll).not.toHaveBeenCalled();
expect(personMock.reassignFace).not.toHaveBeenCalled();
expect(personMock.reassignFaces).not.toHaveBeenCalled();
});
it('should not add embedding to non-matching metadata face', async () => {
machineLearningMock.detectFaces.mockResolvedValue(detectFaceMock);
assetMock.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.fromExif2] }]);
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(personMock.refreshFaces).toHaveBeenCalledWith([face], [], [faceSearch]);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } },
{ name: JobName.FACIAL_RECOGNITION, data: { id: faceId } },
]);
expect(personMock.reassignFace).not.toHaveBeenCalled();
expect(personMock.reassignFaces).not.toHaveBeenCalled();

View File

@ -21,6 +21,7 @@ import {
} from 'src/dtos/person.dto';
import { AssetFaceEntity } from 'src/entities/asset-face.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { FaceSearchEntity } from 'src/entities/face-search.entity';
import { PersonEntity } from 'src/entities/person.entity';
import {
AssetType,
@ -256,14 +257,14 @@ export class PersonService extends BaseService {
}
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination, {
return force === false
? this.assetRepository.getWithout(pagination, WithoutProperty.FACES)
: this.assetRepository.getAll(pagination, {
orderDirection: 'DESC',
withFaces: true,
withArchived: true,
isVisible: true,
})
: this.assetRepository.getWithout(pagination, WithoutProperty.FACES);
});
});
for await (const assets of assetPagination) {
@ -272,6 +273,10 @@ export class PersonService extends BaseService {
);
}
if (force === undefined) {
await this.jobRepository.queue({ name: JobName.PERSON_CLEANUP });
}
return JobStatus.SUCCESS;
}
@ -290,11 +295,11 @@ export class PersonService extends BaseService {
};
const [asset] = await this.assetRepository.getByIds([id], relations);
const { previewFile } = getAssetFiles(asset.files);
if (!asset || !previewFile || asset.faces?.length > 0) {
if (!asset || !previewFile) {
return JobStatus.FAILED;
}
if (!asset.isVisible || asset.faces.length > 0) {
if (!asset.isVisible) {
return JobStatus.SKIPPED;
}
@ -303,39 +308,82 @@ export class PersonService extends BaseService {
previewFile.path,
machineLearning.facialRecognition,
);
this.logger.debug(`${faces.length} faces detected in ${previewFile.path}`);
if (faces.length > 0) {
await this.jobRepository.queue({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } });
const mappedFaces: Partial<AssetFaceEntity>[] = [];
for (const face of faces) {
const facesToAdd: (Partial<AssetFaceEntity> & { id: string })[] = [];
const embeddings: FaceSearchEntity[] = [];
const mlFaceIds = new Set<string>();
for (const face of asset.faces) {
if (face.sourceType === SourceType.MACHINE_LEARNING) {
mlFaceIds.add(face.id);
}
}
const heightScale = imageHeight / (asset.faces[0]?.imageHeight || 1);
const widthScale = imageWidth / (asset.faces[0]?.imageWidth || 1);
for (const { boundingBox, embedding } of faces) {
const scaledBox = {
x1: boundingBox.x1 * widthScale,
y1: boundingBox.y1 * heightScale,
x2: boundingBox.x2 * widthScale,
y2: boundingBox.y2 * heightScale,
};
const match = asset.faces.find((face) => this.iou(face, scaledBox) > 0.5);
if (match && !mlFaceIds.delete(match.id)) {
embeddings.push({ faceId: match.id, embedding });
} else {
const faceId = this.cryptoRepository.randomUUID();
mappedFaces.push({
facesToAdd.push({
id: faceId,
assetId: asset.id,
imageHeight,
imageWidth,
boundingBoxX1: face.boundingBox.x1,
boundingBoxY1: face.boundingBox.y1,
boundingBoxX2: face.boundingBox.x2,
boundingBoxY2: face.boundingBox.y2,
faceSearch: { faceId, embedding: face.embedding },
boundingBoxX1: boundingBox.x1,
boundingBoxY1: boundingBox.y1,
boundingBoxX2: boundingBox.x2,
boundingBoxY2: boundingBox.y2,
});
embeddings.push({ faceId, embedding });
}
}
const faceIdsToRemove = [...mlFaceIds];
if (facesToAdd.length > 0 || faceIdsToRemove.length > 0 || embeddings.length > 0) {
await this.personRepository.refreshFaces(facesToAdd, faceIdsToRemove, embeddings);
}
const faceIds = await this.personRepository.createFaces(mappedFaces);
await this.jobRepository.queueAll(faceIds.map((id) => ({ name: JobName.FACIAL_RECOGNITION, data: { id } })));
if (faceIdsToRemove.length > 0) {
this.logger.log(`Removed ${faceIdsToRemove.length} faces below detection threshold in asset ${id}`);
}
await this.assetRepository.upsertJobStatus({
assetId: asset.id,
facesRecognizedAt: new Date(),
});
if (facesToAdd.length > 0) {
this.logger.log(`Detected ${facesToAdd.length} new faces in asset ${id}`);
const jobs = facesToAdd.map((face) => ({ name: JobName.FACIAL_RECOGNITION, data: { id: face.id } }) as const);
await this.jobRepository.queueAll([{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } }, ...jobs]);
} else if (embeddings.length > 0) {
this.logger.log(`Added ${embeddings.length} face embeddings for asset ${id}`);
}
await this.assetRepository.upsertJobStatus({ assetId: asset.id, facesRecognizedAt: new Date() });
return JobStatus.SUCCESS;
}
private iou(face: AssetFaceEntity, newBox: BoundingBox): number {
const x1 = Math.max(face.boundingBoxX1, newBox.x1);
const y1 = Math.max(face.boundingBoxY1, newBox.y1);
const x2 = Math.min(face.boundingBoxX2, newBox.x2);
const y2 = Math.min(face.boundingBoxY2, newBox.y2);
const intersection = Math.max(0, x2 - x1) * Math.max(0, y2 - y1);
const area1 = (face.boundingBoxX2 - face.boundingBoxX1) * (face.boundingBoxY2 - face.boundingBoxY1);
const area2 = (newBox.x2 - newBox.x1) * (newBox.y2 - newBox.y1);
const union = area1 + area2 - intersection;
return intersection / union;
}
async handleQueueRecognizeFaces({ force, nightly }: INightlyJob): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: false });
if (!isFacialRecognitionEnabled(machineLearning)) {
@ -371,7 +419,7 @@ export class PersonService extends BaseService {
const lastRun = new Date().toISOString();
const facePagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.personRepository.getAllFaces(pagination, {
where: force ? undefined : { personId: IsNull(), sourceType: IsNull() },
where: force ? undefined : { personId: IsNull(), sourceType: SourceType.MACHINE_LEARNING },
}),
);

View File

@ -141,4 +141,32 @@ export const faceStub = {
sourceType: SourceType.MACHINE_LEARNING,
faceSearch: { faceId: 'assetFaceId9', embedding: [1, 2, 3, 4] },
}),
fromExif1: Object.freeze<AssetFaceEntity>({
id: 'assetFaceId9',
assetId: assetStub.image.id,
asset: assetStub.image,
personId: personStub.randomPerson.id,
person: personStub.randomPerson,
boundingBoxX1: 100,
boundingBoxY1: 100,
boundingBoxX2: 200,
boundingBoxY2: 200,
imageHeight: 500,
imageWidth: 400,
sourceType: SourceType.EXIF,
}),
fromExif2: Object.freeze<AssetFaceEntity>({
id: 'assetFaceId9',
assetId: assetStub.image.id,
asset: assetStub.image,
personId: personStub.randomPerson.id,
person: personStub.randomPerson,
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 1,
boundingBoxY2: 1,
imageHeight: 1024,
imageWidth: 1024,
sourceType: SourceType.EXIF,
}),
};

View File

@ -28,7 +28,7 @@ export const newPersonRepositoryMock = (): Mocked<IPersonRepository> => {
reassignFaces: vitest.fn(),
unassignFaces: vitest.fn(),
createFaces: vitest.fn(),
replaceFaces: vitest.fn(),
refreshFaces: vitest.fn(),
getFaces: vitest.fn(),
reassignFace: vitest.fn(),
getFaceById: vitest.fn(),

View File

@ -1,5 +1,5 @@
<script lang="ts" context="module">
export type Colors = 'light-gray' | 'gray';
export type Colors = 'light-gray' | 'gray' | 'dark-gray';
</script>
<script lang="ts">
@ -7,8 +7,9 @@
export let disabled = false;
const colorClasses: Record<Colors, string> = {
'light-gray': 'bg-gray-300/90 dark:bg-gray-600/90',
gray: 'bg-gray-300 dark:bg-gray-600',
'light-gray': 'bg-gray-300/80 dark:bg-gray-700',
gray: 'bg-gray-300/90 dark:bg-gray-700/90',
'dark-gray': 'bg-gray-300 dark:bg-gray-700/80',
};
const hoverClasses = disabled

View File

@ -9,6 +9,7 @@
mdiAllInclusive,
mdiClose,
mdiFastForward,
mdiImageRefreshOutline,
mdiPause,
mdiPlay,
mdiSelectionSearch,
@ -23,16 +24,17 @@
export let description: ComponentType | undefined;
export let jobCounts: JobCountsDto;
export let queueStatus: QueueStatusDto;
export let allowForceCommand = true;
export let icon: string;
export let disabled = false;
export let allText: string;
export let allText: string | undefined;
export let refreshText: string | undefined;
export let missingText: string;
export let onCommand: (command: JobCommandDto) => void;
$: waitingCount = jobCounts.waiting + jobCounts.paused + jobCounts.delayed;
$: isIdle = !queueStatus.isActive && !queueStatus.isPaused;
$: multipleButtons = allText || refreshText;
const commonClasses = 'flex place-items-center justify-between w-full py-2 sm:py-4 pr-4 pl-6';
</script>
@ -121,7 +123,9 @@
<Icon path={mdiAlertCircle} size="36" />
{$t('disabled').toUpperCase()}
</JobTileButton>
{:else if !isIdle}
{/if}
{#if !disabled && !isIdle}
{#if waitingCount > 0}
<JobTileButton color="gray" on:click={() => onCommand({ command: JobCommand.Empty, force: false })}>
<Icon path={mdiClose} size="24" />
@ -141,16 +145,28 @@
{$t('pause').toUpperCase()}
</JobTileButton>
{/if}
{:else if allowForceCommand}
<JobTileButton color="gray" on:click={() => onCommand({ command: JobCommand.Start, force: true })}>
{/if}
{#if !disabled && multipleButtons && isIdle}
{#if allText}
<JobTileButton color="dark-gray" on:click={() => onCommand({ command: JobCommand.Start, force: true })}>
<Icon path={mdiAllInclusive} size="24" />
{allText}
</JobTileButton>
{/if}
{#if refreshText}
<JobTileButton color="gray" on:click={() => onCommand({ command: JobCommand.Start, force: undefined })}>
<Icon path={mdiImageRefreshOutline} size="24" />
{refreshText}
</JobTileButton>
{/if}
<JobTileButton color="light-gray" on:click={() => onCommand({ command: JobCommand.Start, force: false })}>
<Icon path={mdiSelectionSearch} size="24" />
{missingText}
</JobTileButton>
{:else}
{/if}
{#if !disabled && !multipleButtons && isIdle}
<JobTileButton color="light-gray" on:click={() => onCommand({ command: JobCommand.Start, force: false })}>
<Icon path={mdiPlay} size="48" />
{$t('start').toUpperCase()}

View File

@ -32,10 +32,10 @@
subtitle?: string;
description?: ComponentType;
allText?: string;
missingText?: string;
refreshText?: string;
missingText: string;
disabled?: boolean;
icon: string;
allowForceCommand?: boolean;
handleCommand?: (jobId: JobName, jobCommand: JobCommandDto) => Promise<void>;
}
@ -61,43 +61,54 @@
icon: mdiFileJpgBox,
title: $getJobName(JobName.ThumbnailGeneration),
subtitle: $t('admin.thumbnail_generation_job_description'),
allText: $t('all'),
missingText: $t('missing'),
},
[JobName.MetadataExtraction]: {
icon: mdiTable,
title: $getJobName(JobName.MetadataExtraction),
subtitle: $t('admin.metadata_extraction_job_description'),
allText: $t('all'),
missingText: $t('missing'),
},
[JobName.Library]: {
icon: mdiLibraryShelves,
title: $getJobName(JobName.Library),
subtitle: $t('admin.library_tasks_description'),
allText: $t('all').toUpperCase(),
missingText: $t('refresh').toUpperCase(),
allText: $t('all'),
missingText: $t('refresh'),
},
[JobName.Sidecar]: {
title: $getJobName(JobName.Sidecar),
icon: mdiFileXmlBox,
subtitle: $t('admin.sidecar_job_description'),
allText: $t('sync').toUpperCase(),
missingText: $t('discover').toUpperCase(),
allText: $t('sync'),
missingText: $t('discover'),
disabled: !$featureFlags.sidecar,
},
[JobName.SmartSearch]: {
icon: mdiImageSearch,
title: $getJobName(JobName.SmartSearch),
subtitle: $t('admin.smart_search_job_description'),
allText: $t('all'),
missingText: $t('missing'),
disabled: !$featureFlags.smartSearch,
},
[JobName.DuplicateDetection]: {
icon: mdiContentDuplicate,
title: $getJobName(JobName.DuplicateDetection),
subtitle: $t('admin.duplicate_detection_job_description'),
allText: $t('all'),
missingText: $t('missing'),
disabled: !$featureFlags.duplicateDetection,
},
[JobName.FaceDetection]: {
icon: mdiFaceRecognition,
title: $getJobName(JobName.FaceDetection),
subtitle: $t('admin.face_detection_description'),
allText: $t('reset'),
refreshText: $t('refresh'),
missingText: $t('missing'),
handleCommand: handleConfirmCommand,
disabled: !$featureFlags.facialRecognition,
},
@ -105,6 +116,8 @@
icon: mdiTagFaces,
title: $getJobName(JobName.FacialRecognition),
subtitle: $t('admin.facial_recognition_job_description'),
allText: $t('reset'),
missingText: $t('missing'),
handleCommand: handleConfirmCommand,
disabled: !$featureFlags.facialRecognition,
},
@ -112,18 +125,20 @@
icon: mdiVideo,
title: $getJobName(JobName.VideoConversion),
subtitle: $t('admin.video_conversion_job_description'),
allText: $t('all'),
missingText: $t('missing'),
},
[JobName.StorageTemplateMigration]: {
icon: mdiFolderMove,
title: $getJobName(JobName.StorageTemplateMigration),
allowForceCommand: false,
missingText: $t('missing'),
description: StorageMigrationDescription,
},
[JobName.Migration]: {
icon: mdiFolderMove,
title: $getJobName(JobName.Migration),
subtitle: $t('admin.migration_job_description'),
allowForceCommand: false,
missingText: $t('missing'),
},
};
$: jobList = Object.entries(jobDetails) as [JobName, JobDetails][];
@ -150,7 +165,7 @@
</script>
<div class="flex flex-col gap-7">
{#each jobList as [jobName, { title, subtitle, description, disabled, allText, missingText, allowForceCommand, icon, handleCommand: handleCommandOverride }]}
{#each jobList as [jobName, { title, subtitle, description, disabled, allText, refreshText, missingText, icon, handleCommand: handleCommandOverride }]}
{@const { jobCounts, queueStatus } = jobs[jobName]}
<JobTile
{icon}
@ -158,9 +173,9 @@
{disabled}
{subtitle}
{description}
allText={allText || $t('all').toUpperCase()}
missingText={missingText || $t('missing').toUpperCase()}
{allowForceCommand}
allText={allText?.toUpperCase()}
refreshText={refreshText?.toUpperCase()}
missingText={missingText.toUpperCase()}
{jobCounts}
{queueStatus}
onCommand={(command) => (handleCommandOverride || handleCommand)(jobName, command)}

View File

@ -34,6 +34,7 @@
mdiContentCopy,
mdiDatabaseRefreshOutline,
mdiDotsVertical,
mdiHeadSyncOutline,
mdiImageRefreshOutline,
mdiImageSearch,
mdiMagnifyMinusOutline,
@ -166,6 +167,11 @@
/>
{/if}
<hr />
<MenuOption
icon={mdiHeadSyncOutline}
onClick={() => onRunJob(AssetJobName.RefreshFaces)}
text={$getAssetJobName(AssetJobName.RefreshFaces)}
/>
<MenuOption
icon={mdiDatabaseRefreshOutline}
onClick={() => onRunJob(AssetJobName.RefreshMetadata)}

View File

@ -49,8 +49,8 @@
"external_library_created_at": "External library (created on {date})",
"external_library_management": "External Library Management",
"face_detection": "Face detection",
"face_detection_description": "Detect the faces in assets using machine learning. For videos, only the thumbnail is considered. \"All\" (re-)processes all assets. \"Missing\" queues assets that haven't been processed yet. Detected faces will be queued for Facial Recognition after Face Detection is complete, grouping them into existing or new people.",
"facial_recognition_job_description": "Group detected faces into people. This step runs after Face Detection is complete. \"All\" (re-)clusters all faces. \"Missing\" queues faces that don't have a person assigned.",
"face_detection_description": "Detect the faces in assets using machine learning. For videos, only the thumbnail is considered. \"Refresh\" (re-)processes all assets. \"Reset\" additionally clears all current face data. \"Missing\" queues assets that haven't been processed yet. Detected faces will be queued for Facial Recognition after Face Detection is complete, grouping them into existing or new people.",
"facial_recognition_job_description": "Group detected faces into people. This step runs after Face Detection is complete. \"Reset\" (re-)clusters all faces. \"Missing\" queues faces that don't have a person assigned.",
"failed_job_command": "Command {command} failed for job: {job}",
"force_delete_user_warning": "WARNING: This will immediately remove the user and all assets. This cannot be undone and the files cannot be recovered.",
"forcing_refresh_library_files": "Forcing refresh of all library files",
@ -1014,11 +1014,13 @@
"recent_searches": "Recent searches",
"refresh": "Refresh",
"refresh_encoded_videos": "Refresh encoded videos",
"refresh_faces": "Refresh faces",
"refresh_metadata": "Refresh metadata",
"refresh_thumbnails": "Refresh thumbnails",
"refreshed": "Refreshed",
"refreshes_every_file": "Re-reads all existing and new files",
"refreshing_encoded_video": "Refreshing encoded video",
"refreshing_faces": "Refreshing faces",
"refreshing_metadata": "Refreshing metadata",
"regenerating_thumbnails": "Regenerating thumbnails",
"remove": "Remove",

View File

@ -21,7 +21,7 @@ import {
type SharedLinkResponseDto,
type UserResponseDto,
} from '@immich/sdk';
import { mdiCogRefreshOutline, mdiDatabaseRefreshOutline, mdiImageRefreshOutline } from '@mdi/js';
import { mdiCogRefreshOutline, mdiDatabaseRefreshOutline, mdiHeadSyncOutline, mdiImageRefreshOutline } from '@mdi/js';
import { sortBy } from 'lodash-es';
import { init, register, t } from 'svelte-i18n';
import { derived, get } from 'svelte/store';
@ -214,6 +214,7 @@ export const getPeopleThumbnailUrl = (person: PersonResponseDto, updatedAt?: str
export const getAssetJobName = derived(t, ($t) => {
return (job: AssetJobName) => {
const names: Record<AssetJobName, string> = {
[AssetJobName.RefreshFaces]: $t('refresh_faces'),
[AssetJobName.RefreshMetadata]: $t('refresh_metadata'),
[AssetJobName.RegenerateThumbnail]: $t('refresh_thumbnails'),
[AssetJobName.TranscodeVideo]: $t('refresh_encoded_videos'),
@ -226,6 +227,7 @@ export const getAssetJobName = derived(t, ($t) => {
export const getAssetJobMessage = derived(t, ($t) => {
return (job: AssetJobName) => {
const messages: Record<AssetJobName, string> = {
[AssetJobName.RefreshFaces]: $t('refreshing_faces'),
[AssetJobName.RefreshMetadata]: $t('refreshing_metadata'),
[AssetJobName.RegenerateThumbnail]: $t('regenerating_thumbnails'),
[AssetJobName.TranscodeVideo]: $t('refreshing_encoded_video'),
@ -237,6 +239,7 @@ export const getAssetJobMessage = derived(t, ($t) => {
export const getAssetJobIcon = (job: AssetJobName) => {
const names: Record<AssetJobName, string> = {
[AssetJobName.RefreshFaces]: mdiHeadSyncOutline,
[AssetJobName.RefreshMetadata]: mdiDatabaseRefreshOutline,
[AssetJobName.RegenerateThumbnail]: mdiImageRefreshOutline,
[AssetJobName.TranscodeVideo]: mdiCogRefreshOutline,