mirror of
https://github.com/immich-app/immich.git
synced 2024-12-25 10:43:13 +02:00
feat(server): Google Pixel motion photos (#3175)
* feat(server): Google Pixel motion photos Add support for motion photos taken on Pixel phones. They have the exif property 'MotionPhoto' set to 1, and an embedded mp4 file appended to the JPEG file. The implementation works like this: - on metadata extraction, if a live photo is detected, examine the metadata to determine where in the file the embedded MP4 is. - extract this MP4 and write it next to the JPEG. - link it using the existing mechanism for live photos. There is a "MotionPhotoPresentationTimestampUs" exif property, which we don't do anything with - I imagine that it refers to the timepoint in the video that the photo was taken at, but it probably warrants more investigation. * fix format * fix test --------- Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
This commit is contained in:
parent
82a5d54d2c
commit
4f59e6c7ab
@ -1,12 +1,16 @@
|
||||
import {
|
||||
IAssetRepository,
|
||||
IBaseJob,
|
||||
ICryptoRepository,
|
||||
IEntityJob,
|
||||
IGeocodingRepository,
|
||||
IJobRepository,
|
||||
IStorageRepository,
|
||||
JobName,
|
||||
JOBS_ASSET_PAGINATION_SIZE,
|
||||
QueueName,
|
||||
StorageCore,
|
||||
StorageFolder,
|
||||
usePagination,
|
||||
WithoutProperty,
|
||||
} from '@app/domain';
|
||||
@ -19,6 +23,7 @@ import { exiftool, Tags } from 'exiftool-vendored';
|
||||
import ffmpeg, { FfprobeData } from 'fluent-ffmpeg';
|
||||
import { Duration } from 'luxon';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import sharp from 'sharp';
|
||||
import { Repository } from 'typeorm/repository/Repository';
|
||||
import { promisify } from 'util';
|
||||
@ -29,18 +34,35 @@ import { toNumberOrNull } from '../utils/numbers';
|
||||
|
||||
const ffprobe = promisify<string, FfprobeData>(ffmpeg.ffprobe);
|
||||
|
||||
interface DirectoryItem {
|
||||
Length?: number;
|
||||
Mime: string;
|
||||
Padding?: number;
|
||||
Semantic?: string;
|
||||
}
|
||||
|
||||
interface DirectoryEntry {
|
||||
Item: DirectoryItem;
|
||||
}
|
||||
|
||||
interface ImmichTags extends Tags {
|
||||
ContentIdentifier?: string;
|
||||
MotionPhoto?: number;
|
||||
MotionPhotoVersion?: number;
|
||||
MotionPhotoPresentationTimestampUs?: number;
|
||||
}
|
||||
|
||||
export class MetadataExtractionProcessor {
|
||||
private logger = new Logger(MetadataExtractionProcessor.name);
|
||||
private reverseGeocodingEnabled: boolean;
|
||||
private storageCore = new StorageCore();
|
||||
|
||||
constructor(
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(IGeocodingRepository) private geocodingRepository: IGeocodingRepository,
|
||||
@Inject(ICryptoRepository) private cryptoRepository: ICryptoRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
|
||||
|
||||
configService: ConfigService,
|
||||
@ -100,6 +122,131 @@ export class MetadataExtractionProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
async addExtractedLivePhoto(sourceAsset: AssetEntity, video: string, created: Date | null): Promise<AssetEntity> {
|
||||
if (sourceAsset.livePhotoVideoId) {
|
||||
const [liveAsset] = await this.assetRepository.getByIds([sourceAsset.livePhotoVideoId]);
|
||||
// already exists so no need to generate ID.
|
||||
if (liveAsset.originalPath == video) {
|
||||
return liveAsset;
|
||||
}
|
||||
liveAsset.originalPath = video;
|
||||
return this.assetRepository.save(liveAsset);
|
||||
}
|
||||
const liveAsset = await this.assetRepository.save({
|
||||
ownerId: sourceAsset.ownerId,
|
||||
owner: sourceAsset.owner,
|
||||
|
||||
checksum: await this.cryptoRepository.hashFile(video),
|
||||
originalPath: video,
|
||||
|
||||
fileCreatedAt: created ?? sourceAsset.fileCreatedAt,
|
||||
fileModifiedAt: sourceAsset.fileModifiedAt,
|
||||
|
||||
deviceAssetId: 'NONE',
|
||||
deviceId: 'NONE',
|
||||
|
||||
type: AssetType.VIDEO,
|
||||
isFavorite: false,
|
||||
isArchived: sourceAsset.isArchived,
|
||||
duration: null,
|
||||
isVisible: false,
|
||||
livePhotoVideo: null,
|
||||
resizePath: null,
|
||||
webpPath: null,
|
||||
thumbhash: null,
|
||||
encodedVideoPath: null,
|
||||
tags: [],
|
||||
sharedLinks: [],
|
||||
originalFileName: path.parse(video).name,
|
||||
faces: [],
|
||||
sidecarPath: null,
|
||||
isReadOnly: sourceAsset.isReadOnly,
|
||||
});
|
||||
|
||||
sourceAsset.livePhotoVideoId = liveAsset.id;
|
||||
await this.assetRepository.save(sourceAsset);
|
||||
return liveAsset;
|
||||
}
|
||||
|
||||
private async extractNewPixelLivePhoto(
|
||||
asset: AssetEntity,
|
||||
directory: DirectoryEntry[],
|
||||
fileCreatedAt: Date | null,
|
||||
): Promise<AssetEntity | null> {
|
||||
if (asset.livePhotoVideoId) {
|
||||
// Already extracted, don't try again.
|
||||
const [ret] = await this.assetRepository.getByIds([asset.livePhotoVideoId]);
|
||||
this.logger.log(`Already extracted asset ${ret.originalPath}.`);
|
||||
return ret;
|
||||
}
|
||||
let foundMotionPhoto = false;
|
||||
let motionPhotoOffsetFromEnd = 0;
|
||||
let motionPhotoLength = 0;
|
||||
|
||||
// Look for the directory entry with semantic label "MotionPhoto", which is the embedded video.
|
||||
// Then, determine the length from the end of the file to the start of the embedded video.
|
||||
for (const entry of directory) {
|
||||
if (entry.Item.Semantic == 'MotionPhoto') {
|
||||
if (foundMotionPhoto) {
|
||||
this.logger.error(`Asset ${asset.originalPath} has more than one motion photo.`);
|
||||
continue;
|
||||
}
|
||||
foundMotionPhoto = true;
|
||||
motionPhotoLength = entry.Item.Length ?? 0;
|
||||
}
|
||||
if (foundMotionPhoto) {
|
||||
motionPhotoOffsetFromEnd += entry.Item.Length ?? 0;
|
||||
motionPhotoOffsetFromEnd += entry.Item.Padding ?? 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (!foundMotionPhoto || motionPhotoLength == 0) {
|
||||
return null;
|
||||
}
|
||||
return this.extractEmbeddedVideo(asset, motionPhotoOffsetFromEnd, motionPhotoLength, fileCreatedAt);
|
||||
}
|
||||
|
||||
private async extractEmbeddedVideo(
|
||||
asset: AssetEntity,
|
||||
offsetFromEnd: number,
|
||||
length: number | null,
|
||||
fileCreatedAt: Date | null,
|
||||
) {
|
||||
let file = null;
|
||||
try {
|
||||
file = await fs.promises.open(asset.originalPath);
|
||||
let extracted = null;
|
||||
// Read in embedded video.
|
||||
const stat = await file.stat();
|
||||
if (length == null) {
|
||||
length = offsetFromEnd;
|
||||
}
|
||||
const offset = stat.size - offsetFromEnd;
|
||||
extracted = await file.read({
|
||||
buffer: Buffer.alloc(length),
|
||||
position: offset,
|
||||
length: length,
|
||||
});
|
||||
|
||||
// Write out extracted video, and add it to the asset repository.
|
||||
const encodedVideoFolder = this.storageCore.getFolderLocation(StorageFolder.ENCODED_VIDEO, asset.ownerId);
|
||||
this.storageRepository.mkdirSync(encodedVideoFolder);
|
||||
const livePhotoPath = path.join(encodedVideoFolder, path.parse(asset.originalPath).name + '.mp4');
|
||||
await fs.promises.writeFile(livePhotoPath, extracted.buffer);
|
||||
|
||||
const result = await this.addExtractedLivePhoto(asset, livePhotoPath, fileCreatedAt);
|
||||
await this.handleMetadataExtraction({ id: result.id });
|
||||
return result;
|
||||
} catch (e) {
|
||||
this.logger.error(`Failed to extract live photo ${asset.originalPath}: ${e}`);
|
||||
return null;
|
||||
} finally {
|
||||
if (file) {
|
||||
await file.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async handlePhotoMetadataExtraction(asset: AssetEntity) {
|
||||
const mediaExifData = await exiftool.read<ImmichTags>(asset.originalPath).catch((error: any) => {
|
||||
this.logger.warn(
|
||||
@ -163,7 +310,32 @@ export class MetadataExtractionProcessor {
|
||||
const longitude = getExifProperty('GPSLongitude');
|
||||
newExif.latitude = latitude !== null ? parseLatitude(latitude) : null;
|
||||
newExif.longitude = longitude !== null ? parseLongitude(longitude) : null;
|
||||
|
||||
if (getExifProperty('MotionPhoto')) {
|
||||
// Seen on more recent Pixel phones: starting as early as Pixel 4a, possibly earlier.
|
||||
const rawDirectory = getExifProperty('Directory');
|
||||
if (Array.isArray(rawDirectory)) {
|
||||
// exiftool-vendor thinks directory is a string, but actually it's an array of DirectoryEntry.
|
||||
const directory = rawDirectory as DirectoryEntry[];
|
||||
await this.extractNewPixelLivePhoto(asset, directory, fileCreatedAt);
|
||||
} else {
|
||||
this.logger.warn(`Failed to get Pixel motionPhoto information: directory: ${JSON.stringify(rawDirectory)}`);
|
||||
}
|
||||
} else if (getExifProperty('MicroVideo')) {
|
||||
// Seen on earlier Pixel phones - Pixel 2 and earlier, possibly Pixel 3.
|
||||
let offset = getExifProperty('MicroVideoOffset'); // offset from end of file.
|
||||
if (typeof offset == 'string') {
|
||||
offset = parseInt(offset);
|
||||
}
|
||||
if (Number.isNaN(offset) || offset == null) {
|
||||
this.logger.warn(
|
||||
`Failed to get MicroVideo information for ${asset.originalPath}, offset=${getExifProperty(
|
||||
'MicroVideoOffset',
|
||||
)}`,
|
||||
);
|
||||
} else {
|
||||
await this.extractEmbeddedVideo(asset, offset, null, fileCreatedAt);
|
||||
}
|
||||
}
|
||||
newExif.livePhotoCID = getExifProperty('MediaGroupUUID');
|
||||
if (newExif.livePhotoCID && !asset.livePhotoVideoId) {
|
||||
const motionAsset = await this.assetRepository.findLivePhotoMatch({
|
||||
|
Loading…
Reference in New Issue
Block a user