mirror of
https://github.com/immich-app/immich.git
synced 2024-12-10 11:40:53 +02:00
77e6a6d78b
* feat: faces-from-metadata - Import face regions from metadata Implements immich-app#1692. - OpenAPI spec changes to accomodate metadata face import configs. New settings to enable the feature. - Updates admin UI compoments - ML faces detection/recognition & Exif/Metadata faces compatibility Signed-off-by: BugFest <bugfest.dev@pm.me> * chore(web): remove unused file confirm-enable-import-faces * chore(web): format metadata-settings * fix(server): faces-from-metadata tests and format * fix(server): code refinements, nullable face asset sourceType * fix(server): Add RegionInfo to ImmichTags interface * fix(server): deleteAllFaces sourceType param can be undefined * fix(server): exiftool-vendored 27.0.0 moves readArgs into ExifToolOptions * fix(server): rename isImportFacesFromMetadataEnabled to isFaceImportEnabled * fix(server): simplify sourceType conditional * fix(server): small fixes * fix(server): handling sourceType * fix(server): sourceType enum * fix(server): refactor metadata applyTaggedFaces * fix(server): create/update signature changes * fix(server): reduce computational cost of Person.getManyByName * fix(server): use faceList instead of faceSet * fix(server): Skip regions without Name defined * fix(mobile): Update open-api (face assets feature changes) * fix(server): Face-Person reconciliation with map/index * fix(server): tags.RegionInfo.AppliedToDimensions must be defined to process face-region * fix(server): fix shared-link.service.ts format * fix(mobile): Update open-api after branch update * simplify * fix(server): minor fixes * fix(server): person create/update methods type enforcement * fix(server): style fixes * fix(server): remove unused metadata code * fix(server): metadata faces unit tests * fix(server): top level config metadata category * fix(server): rename upsertFaces to replaceFaces * fix(server): remove sourceType when unnecessary * fix(server): sourceType as ENUM * fix(server): format fixes * fix(server): fix tests after sourceType ENUM change * fix(server): remove unnecessary JobItem cast * fix(server): fix asset enum imports * fix(open-api): add metadata config * fix(mobile): update open-api after metadata open-api spec changes * fix(web): update web/api metadata config * fix(server): remove duplicated sourceType def * fix(server): update generated sql queries * fix(e2e): tests for metadata face import feature * fix(web): Fix check:typescript * fix(e2e): update subproject ref * fix(server): revert format changes to pass format checks after ci * fix(mobile): update open-api * fix(server,movile,open-api,mobile): sourceType as DB data type * fix(e2e): upload face asset after enabling metadata face import * fix(web): simplify metadata admin settings and i18n keys * Update person.repository.ts Co-authored-by: Jason Rasmussen <jason@rasm.me> * fix(server): asset_faces.sourceType column not nullable * fix(server): simplified syntax * fix(e2e): use SDK for everything except the endpoint being tested * fix(e2e): fix test format * chore: clean up * chore: clean up * chore: update e2e/test-assets --------- Signed-off-by: BugFest <bugfest.dev@pm.me> Co-authored-by: mertalev <101130780+mertalev@users.noreply.github.com> Co-authored-by: Jason Rasmussen <jason@rasm.me>
145 lines
4.4 KiB
TypeScript
145 lines
4.4 KiB
TypeScript
import { AssetFaceEntity } from 'src/entities/asset-face.entity';
|
|
import { SourceType } from 'src/enum';
|
|
import { assetStub } from 'test/fixtures/asset.stub';
|
|
import { personStub } from 'test/fixtures/person.stub';
|
|
|
|
type NonNullableProperty<T> = { [P in keyof T]: NonNullable<T[P]> };
|
|
|
|
export const faceStub = {
|
|
face1: Object.freeze<NonNullableProperty<AssetFaceEntity>>({
|
|
id: 'assetFaceId1',
|
|
assetId: assetStub.image.id,
|
|
asset: assetStub.image,
|
|
personId: personStub.withName.id,
|
|
person: personStub.withName,
|
|
boundingBoxX1: 0,
|
|
boundingBoxY1: 0,
|
|
boundingBoxX2: 1,
|
|
boundingBoxY2: 1,
|
|
imageHeight: 1024,
|
|
imageWidth: 1024,
|
|
sourceType: SourceType.MACHINE_LEARNING,
|
|
faceSearch: { faceId: 'assetFaceId1', embedding: [1, 2, 3, 4] },
|
|
}),
|
|
primaryFace1: Object.freeze<NonNullableProperty<AssetFaceEntity>>({
|
|
id: 'assetFaceId2',
|
|
assetId: assetStub.image.id,
|
|
asset: assetStub.image,
|
|
personId: personStub.primaryPerson.id,
|
|
person: personStub.primaryPerson,
|
|
boundingBoxX1: 0,
|
|
boundingBoxY1: 0,
|
|
boundingBoxX2: 1,
|
|
boundingBoxY2: 1,
|
|
imageHeight: 1024,
|
|
imageWidth: 1024,
|
|
sourceType: SourceType.MACHINE_LEARNING,
|
|
faceSearch: { faceId: 'assetFaceId2', embedding: [1, 2, 3, 4] },
|
|
}),
|
|
mergeFace1: Object.freeze<NonNullableProperty<AssetFaceEntity>>({
|
|
id: 'assetFaceId3',
|
|
assetId: assetStub.image.id,
|
|
asset: assetStub.image,
|
|
personId: personStub.mergePerson.id,
|
|
person: personStub.mergePerson,
|
|
boundingBoxX1: 0,
|
|
boundingBoxY1: 0,
|
|
boundingBoxX2: 1,
|
|
boundingBoxY2: 1,
|
|
imageHeight: 1024,
|
|
imageWidth: 1024,
|
|
sourceType: SourceType.MACHINE_LEARNING,
|
|
faceSearch: { faceId: 'assetFaceId3', embedding: [1, 2, 3, 4] },
|
|
}),
|
|
mergeFace2: Object.freeze<NonNullableProperty<AssetFaceEntity>>({
|
|
id: 'assetFaceId4',
|
|
assetId: assetStub.image1.id,
|
|
asset: assetStub.image1,
|
|
personId: personStub.mergePerson.id,
|
|
person: personStub.mergePerson,
|
|
boundingBoxX1: 0,
|
|
boundingBoxY1: 0,
|
|
boundingBoxX2: 1,
|
|
boundingBoxY2: 1,
|
|
imageHeight: 1024,
|
|
imageWidth: 1024,
|
|
sourceType: SourceType.MACHINE_LEARNING,
|
|
faceSearch: { faceId: 'assetFaceId4', embedding: [1, 2, 3, 4] },
|
|
}),
|
|
start: Object.freeze<NonNullableProperty<AssetFaceEntity>>({
|
|
id: 'assetFaceId5',
|
|
assetId: assetStub.image.id,
|
|
asset: assetStub.image,
|
|
personId: personStub.newThumbnail.id,
|
|
person: personStub.newThumbnail,
|
|
boundingBoxX1: 5,
|
|
boundingBoxY1: 5,
|
|
boundingBoxX2: 505,
|
|
boundingBoxY2: 505,
|
|
imageHeight: 2880,
|
|
imageWidth: 2160,
|
|
sourceType: SourceType.MACHINE_LEARNING,
|
|
faceSearch: { faceId: 'assetFaceId5', embedding: [1, 2, 3, 4] },
|
|
}),
|
|
middle: Object.freeze<NonNullableProperty<AssetFaceEntity>>({
|
|
id: 'assetFaceId6',
|
|
assetId: assetStub.image.id,
|
|
asset: assetStub.image,
|
|
personId: personStub.newThumbnail.id,
|
|
person: personStub.newThumbnail,
|
|
boundingBoxX1: 100,
|
|
boundingBoxY1: 100,
|
|
boundingBoxX2: 200,
|
|
boundingBoxY2: 200,
|
|
imageHeight: 500,
|
|
imageWidth: 400,
|
|
sourceType: SourceType.MACHINE_LEARNING,
|
|
faceSearch: { faceId: 'assetFaceId6', embedding: [1, 2, 3, 4] },
|
|
}),
|
|
end: Object.freeze<NonNullableProperty<AssetFaceEntity>>({
|
|
id: 'assetFaceId7',
|
|
assetId: assetStub.image.id,
|
|
asset: assetStub.image,
|
|
personId: personStub.newThumbnail.id,
|
|
person: personStub.newThumbnail,
|
|
boundingBoxX1: 300,
|
|
boundingBoxY1: 300,
|
|
boundingBoxX2: 495,
|
|
boundingBoxY2: 495,
|
|
imageHeight: 500,
|
|
imageWidth: 500,
|
|
sourceType: SourceType.MACHINE_LEARNING,
|
|
faceSearch: { faceId: 'assetFaceId7', embedding: [1, 2, 3, 4] },
|
|
}),
|
|
noPerson1: Object.freeze<AssetFaceEntity>({
|
|
id: 'assetFaceId8',
|
|
assetId: assetStub.image.id,
|
|
asset: assetStub.image,
|
|
personId: null,
|
|
person: null,
|
|
boundingBoxX1: 0,
|
|
boundingBoxY1: 0,
|
|
boundingBoxX2: 1,
|
|
boundingBoxY2: 1,
|
|
imageHeight: 1024,
|
|
imageWidth: 1024,
|
|
sourceType: SourceType.MACHINE_LEARNING,
|
|
faceSearch: { faceId: 'assetFaceId8', embedding: [1, 2, 3, 4] },
|
|
}),
|
|
noPerson2: Object.freeze<AssetFaceEntity>({
|
|
id: 'assetFaceId9',
|
|
assetId: assetStub.image.id,
|
|
asset: assetStub.image,
|
|
personId: null,
|
|
person: null,
|
|
boundingBoxX1: 0,
|
|
boundingBoxY1: 0,
|
|
boundingBoxX2: 1,
|
|
boundingBoxY2: 1,
|
|
imageHeight: 1024,
|
|
imageWidth: 1024,
|
|
sourceType: SourceType.MACHINE_LEARNING,
|
|
faceSearch: { faceId: 'assetFaceId9', embedding: [1, 2, 3, 4] },
|
|
}),
|
|
};
|