mirror of
https://github.com/immich-app/immich.git
synced 2025-03-11 15:09:45 +02:00
Merge branch 'main' of https://github.com/immich-app/immich into feat/sidecar-asset-files
This commit is contained in:
commit
08b8793aab
3
mobile/openapi/README.md
generated
3
mobile/openapi/README.md
generated
@ -424,6 +424,9 @@ Class | Method | HTTP request | Description
|
||||
- [SyncAckDeleteDto](doc//SyncAckDeleteDto.md)
|
||||
- [SyncAckDto](doc//SyncAckDto.md)
|
||||
- [SyncAckSetDto](doc//SyncAckSetDto.md)
|
||||
- [SyncAssetDeleteV1](doc//SyncAssetDeleteV1.md)
|
||||
- [SyncAssetExifV1](doc//SyncAssetExifV1.md)
|
||||
- [SyncAssetV1](doc//SyncAssetV1.md)
|
||||
- [SyncEntityType](doc//SyncEntityType.md)
|
||||
- [SyncPartnerDeleteV1](doc//SyncPartnerDeleteV1.md)
|
||||
- [SyncPartnerV1](doc//SyncPartnerV1.md)
|
||||
|
3
mobile/openapi/lib/api.dart
generated
3
mobile/openapi/lib/api.dart
generated
@ -231,6 +231,9 @@ part 'model/stack_update_dto.dart';
|
||||
part 'model/sync_ack_delete_dto.dart';
|
||||
part 'model/sync_ack_dto.dart';
|
||||
part 'model/sync_ack_set_dto.dart';
|
||||
part 'model/sync_asset_delete_v1.dart';
|
||||
part 'model/sync_asset_exif_v1.dart';
|
||||
part 'model/sync_asset_v1.dart';
|
||||
part 'model/sync_entity_type.dart';
|
||||
part 'model/sync_partner_delete_v1.dart';
|
||||
part 'model/sync_partner_v1.dart';
|
||||
|
6
mobile/openapi/lib/api_client.dart
generated
6
mobile/openapi/lib/api_client.dart
generated
@ -518,6 +518,12 @@ class ApiClient {
|
||||
return SyncAckDto.fromJson(value);
|
||||
case 'SyncAckSetDto':
|
||||
return SyncAckSetDto.fromJson(value);
|
||||
case 'SyncAssetDeleteV1':
|
||||
return SyncAssetDeleteV1.fromJson(value);
|
||||
case 'SyncAssetExifV1':
|
||||
return SyncAssetExifV1.fromJson(value);
|
||||
case 'SyncAssetV1':
|
||||
return SyncAssetV1.fromJson(value);
|
||||
case 'SyncEntityType':
|
||||
return SyncEntityTypeTypeTransformer().decode(value);
|
||||
case 'SyncPartnerDeleteV1':
|
||||
|
99
mobile/openapi/lib/model/sync_asset_delete_v1.dart
generated
Normal file
99
mobile/openapi/lib/model/sync_asset_delete_v1.dart
generated
Normal file
@ -0,0 +1,99 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class SyncAssetDeleteV1 {
|
||||
/// Returns a new [SyncAssetDeleteV1] instance.
|
||||
SyncAssetDeleteV1({
|
||||
required this.assetId,
|
||||
});
|
||||
|
||||
String assetId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SyncAssetDeleteV1 &&
|
||||
other.assetId == assetId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(assetId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SyncAssetDeleteV1[assetId=$assetId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'assetId'] = this.assetId;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [SyncAssetDeleteV1] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static SyncAssetDeleteV1? fromJson(dynamic value) {
|
||||
upgradeDto(value, "SyncAssetDeleteV1");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SyncAssetDeleteV1(
|
||||
assetId: mapValueOfType<String>(json, r'assetId')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<SyncAssetDeleteV1> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SyncAssetDeleteV1>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SyncAssetDeleteV1.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, SyncAssetDeleteV1> mapFromJson(dynamic json) {
|
||||
final map = <String, SyncAssetDeleteV1>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = SyncAssetDeleteV1.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of SyncAssetDeleteV1-objects as value to a dart map
|
||||
static Map<String, List<SyncAssetDeleteV1>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<SyncAssetDeleteV1>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = SyncAssetDeleteV1.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'assetId',
|
||||
};
|
||||
}
|
||||
|
387
mobile/openapi/lib/model/sync_asset_exif_v1.dart
generated
Normal file
387
mobile/openapi/lib/model/sync_asset_exif_v1.dart
generated
Normal file
@ -0,0 +1,387 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class SyncAssetExifV1 {
|
||||
/// Returns a new [SyncAssetExifV1] instance.
|
||||
SyncAssetExifV1({
|
||||
required this.assetId,
|
||||
required this.city,
|
||||
required this.country,
|
||||
required this.dateTimeOriginal,
|
||||
required this.description,
|
||||
required this.exifImageHeight,
|
||||
required this.exifImageWidth,
|
||||
required this.exposureTime,
|
||||
required this.fNumber,
|
||||
required this.fileSizeInByte,
|
||||
required this.focalLength,
|
||||
required this.fps,
|
||||
required this.iso,
|
||||
required this.latitude,
|
||||
required this.lensModel,
|
||||
required this.longitude,
|
||||
required this.make,
|
||||
required this.model,
|
||||
required this.modifyDate,
|
||||
required this.orientation,
|
||||
required this.profileDescription,
|
||||
required this.projectionType,
|
||||
required this.rating,
|
||||
required this.state,
|
||||
required this.timeZone,
|
||||
});
|
||||
|
||||
String assetId;
|
||||
|
||||
String? city;
|
||||
|
||||
String? country;
|
||||
|
||||
DateTime? dateTimeOriginal;
|
||||
|
||||
String? description;
|
||||
|
||||
int? exifImageHeight;
|
||||
|
||||
int? exifImageWidth;
|
||||
|
||||
String? exposureTime;
|
||||
|
||||
int? fNumber;
|
||||
|
||||
int? fileSizeInByte;
|
||||
|
||||
int? focalLength;
|
||||
|
||||
int? fps;
|
||||
|
||||
int? iso;
|
||||
|
||||
int? latitude;
|
||||
|
||||
String? lensModel;
|
||||
|
||||
int? longitude;
|
||||
|
||||
String? make;
|
||||
|
||||
String? model;
|
||||
|
||||
DateTime? modifyDate;
|
||||
|
||||
String? orientation;
|
||||
|
||||
String? profileDescription;
|
||||
|
||||
String? projectionType;
|
||||
|
||||
int? rating;
|
||||
|
||||
String? state;
|
||||
|
||||
String? timeZone;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SyncAssetExifV1 &&
|
||||
other.assetId == assetId &&
|
||||
other.city == city &&
|
||||
other.country == country &&
|
||||
other.dateTimeOriginal == dateTimeOriginal &&
|
||||
other.description == description &&
|
||||
other.exifImageHeight == exifImageHeight &&
|
||||
other.exifImageWidth == exifImageWidth &&
|
||||
other.exposureTime == exposureTime &&
|
||||
other.fNumber == fNumber &&
|
||||
other.fileSizeInByte == fileSizeInByte &&
|
||||
other.focalLength == focalLength &&
|
||||
other.fps == fps &&
|
||||
other.iso == iso &&
|
||||
other.latitude == latitude &&
|
||||
other.lensModel == lensModel &&
|
||||
other.longitude == longitude &&
|
||||
other.make == make &&
|
||||
other.model == model &&
|
||||
other.modifyDate == modifyDate &&
|
||||
other.orientation == orientation &&
|
||||
other.profileDescription == profileDescription &&
|
||||
other.projectionType == projectionType &&
|
||||
other.rating == rating &&
|
||||
other.state == state &&
|
||||
other.timeZone == timeZone;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(assetId.hashCode) +
|
||||
(city == null ? 0 : city!.hashCode) +
|
||||
(country == null ? 0 : country!.hashCode) +
|
||||
(dateTimeOriginal == null ? 0 : dateTimeOriginal!.hashCode) +
|
||||
(description == null ? 0 : description!.hashCode) +
|
||||
(exifImageHeight == null ? 0 : exifImageHeight!.hashCode) +
|
||||
(exifImageWidth == null ? 0 : exifImageWidth!.hashCode) +
|
||||
(exposureTime == null ? 0 : exposureTime!.hashCode) +
|
||||
(fNumber == null ? 0 : fNumber!.hashCode) +
|
||||
(fileSizeInByte == null ? 0 : fileSizeInByte!.hashCode) +
|
||||
(focalLength == null ? 0 : focalLength!.hashCode) +
|
||||
(fps == null ? 0 : fps!.hashCode) +
|
||||
(iso == null ? 0 : iso!.hashCode) +
|
||||
(latitude == null ? 0 : latitude!.hashCode) +
|
||||
(lensModel == null ? 0 : lensModel!.hashCode) +
|
||||
(longitude == null ? 0 : longitude!.hashCode) +
|
||||
(make == null ? 0 : make!.hashCode) +
|
||||
(model == null ? 0 : model!.hashCode) +
|
||||
(modifyDate == null ? 0 : modifyDate!.hashCode) +
|
||||
(orientation == null ? 0 : orientation!.hashCode) +
|
||||
(profileDescription == null ? 0 : profileDescription!.hashCode) +
|
||||
(projectionType == null ? 0 : projectionType!.hashCode) +
|
||||
(rating == null ? 0 : rating!.hashCode) +
|
||||
(state == null ? 0 : state!.hashCode) +
|
||||
(timeZone == null ? 0 : timeZone!.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SyncAssetExifV1[assetId=$assetId, city=$city, country=$country, dateTimeOriginal=$dateTimeOriginal, description=$description, exifImageHeight=$exifImageHeight, exifImageWidth=$exifImageWidth, exposureTime=$exposureTime, fNumber=$fNumber, fileSizeInByte=$fileSizeInByte, focalLength=$focalLength, fps=$fps, iso=$iso, latitude=$latitude, lensModel=$lensModel, longitude=$longitude, make=$make, model=$model, modifyDate=$modifyDate, orientation=$orientation, profileDescription=$profileDescription, projectionType=$projectionType, rating=$rating, state=$state, timeZone=$timeZone]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'assetId'] = this.assetId;
|
||||
if (this.city != null) {
|
||||
json[r'city'] = this.city;
|
||||
} else {
|
||||
// json[r'city'] = null;
|
||||
}
|
||||
if (this.country != null) {
|
||||
json[r'country'] = this.country;
|
||||
} else {
|
||||
// json[r'country'] = null;
|
||||
}
|
||||
if (this.dateTimeOriginal != null) {
|
||||
json[r'dateTimeOriginal'] = this.dateTimeOriginal!.toUtc().toIso8601String();
|
||||
} else {
|
||||
// json[r'dateTimeOriginal'] = null;
|
||||
}
|
||||
if (this.description != null) {
|
||||
json[r'description'] = this.description;
|
||||
} else {
|
||||
// json[r'description'] = null;
|
||||
}
|
||||
if (this.exifImageHeight != null) {
|
||||
json[r'exifImageHeight'] = this.exifImageHeight;
|
||||
} else {
|
||||
// json[r'exifImageHeight'] = null;
|
||||
}
|
||||
if (this.exifImageWidth != null) {
|
||||
json[r'exifImageWidth'] = this.exifImageWidth;
|
||||
} else {
|
||||
// json[r'exifImageWidth'] = null;
|
||||
}
|
||||
if (this.exposureTime != null) {
|
||||
json[r'exposureTime'] = this.exposureTime;
|
||||
} else {
|
||||
// json[r'exposureTime'] = null;
|
||||
}
|
||||
if (this.fNumber != null) {
|
||||
json[r'fNumber'] = this.fNumber;
|
||||
} else {
|
||||
// json[r'fNumber'] = null;
|
||||
}
|
||||
if (this.fileSizeInByte != null) {
|
||||
json[r'fileSizeInByte'] = this.fileSizeInByte;
|
||||
} else {
|
||||
// json[r'fileSizeInByte'] = null;
|
||||
}
|
||||
if (this.focalLength != null) {
|
||||
json[r'focalLength'] = this.focalLength;
|
||||
} else {
|
||||
// json[r'focalLength'] = null;
|
||||
}
|
||||
if (this.fps != null) {
|
||||
json[r'fps'] = this.fps;
|
||||
} else {
|
||||
// json[r'fps'] = null;
|
||||
}
|
||||
if (this.iso != null) {
|
||||
json[r'iso'] = this.iso;
|
||||
} else {
|
||||
// json[r'iso'] = null;
|
||||
}
|
||||
if (this.latitude != null) {
|
||||
json[r'latitude'] = this.latitude;
|
||||
} else {
|
||||
// json[r'latitude'] = null;
|
||||
}
|
||||
if (this.lensModel != null) {
|
||||
json[r'lensModel'] = this.lensModel;
|
||||
} else {
|
||||
// json[r'lensModel'] = null;
|
||||
}
|
||||
if (this.longitude != null) {
|
||||
json[r'longitude'] = this.longitude;
|
||||
} else {
|
||||
// json[r'longitude'] = null;
|
||||
}
|
||||
if (this.make != null) {
|
||||
json[r'make'] = this.make;
|
||||
} else {
|
||||
// json[r'make'] = null;
|
||||
}
|
||||
if (this.model != null) {
|
||||
json[r'model'] = this.model;
|
||||
} else {
|
||||
// json[r'model'] = null;
|
||||
}
|
||||
if (this.modifyDate != null) {
|
||||
json[r'modifyDate'] = this.modifyDate!.toUtc().toIso8601String();
|
||||
} else {
|
||||
// json[r'modifyDate'] = null;
|
||||
}
|
||||
if (this.orientation != null) {
|
||||
json[r'orientation'] = this.orientation;
|
||||
} else {
|
||||
// json[r'orientation'] = null;
|
||||
}
|
||||
if (this.profileDescription != null) {
|
||||
json[r'profileDescription'] = this.profileDescription;
|
||||
} else {
|
||||
// json[r'profileDescription'] = null;
|
||||
}
|
||||
if (this.projectionType != null) {
|
||||
json[r'projectionType'] = this.projectionType;
|
||||
} else {
|
||||
// json[r'projectionType'] = null;
|
||||
}
|
||||
if (this.rating != null) {
|
||||
json[r'rating'] = this.rating;
|
||||
} else {
|
||||
// json[r'rating'] = null;
|
||||
}
|
||||
if (this.state != null) {
|
||||
json[r'state'] = this.state;
|
||||
} else {
|
||||
// json[r'state'] = null;
|
||||
}
|
||||
if (this.timeZone != null) {
|
||||
json[r'timeZone'] = this.timeZone;
|
||||
} else {
|
||||
// json[r'timeZone'] = null;
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [SyncAssetExifV1] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static SyncAssetExifV1? fromJson(dynamic value) {
|
||||
upgradeDto(value, "SyncAssetExifV1");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SyncAssetExifV1(
|
||||
assetId: mapValueOfType<String>(json, r'assetId')!,
|
||||
city: mapValueOfType<String>(json, r'city'),
|
||||
country: mapValueOfType<String>(json, r'country'),
|
||||
dateTimeOriginal: mapDateTime(json, r'dateTimeOriginal', r''),
|
||||
description: mapValueOfType<String>(json, r'description'),
|
||||
exifImageHeight: mapValueOfType<int>(json, r'exifImageHeight'),
|
||||
exifImageWidth: mapValueOfType<int>(json, r'exifImageWidth'),
|
||||
exposureTime: mapValueOfType<String>(json, r'exposureTime'),
|
||||
fNumber: mapValueOfType<int>(json, r'fNumber'),
|
||||
fileSizeInByte: mapValueOfType<int>(json, r'fileSizeInByte'),
|
||||
focalLength: mapValueOfType<int>(json, r'focalLength'),
|
||||
fps: mapValueOfType<int>(json, r'fps'),
|
||||
iso: mapValueOfType<int>(json, r'iso'),
|
||||
latitude: mapValueOfType<int>(json, r'latitude'),
|
||||
lensModel: mapValueOfType<String>(json, r'lensModel'),
|
||||
longitude: mapValueOfType<int>(json, r'longitude'),
|
||||
make: mapValueOfType<String>(json, r'make'),
|
||||
model: mapValueOfType<String>(json, r'model'),
|
||||
modifyDate: mapDateTime(json, r'modifyDate', r''),
|
||||
orientation: mapValueOfType<String>(json, r'orientation'),
|
||||
profileDescription: mapValueOfType<String>(json, r'profileDescription'),
|
||||
projectionType: mapValueOfType<String>(json, r'projectionType'),
|
||||
rating: mapValueOfType<int>(json, r'rating'),
|
||||
state: mapValueOfType<String>(json, r'state'),
|
||||
timeZone: mapValueOfType<String>(json, r'timeZone'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<SyncAssetExifV1> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SyncAssetExifV1>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SyncAssetExifV1.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, SyncAssetExifV1> mapFromJson(dynamic json) {
|
||||
final map = <String, SyncAssetExifV1>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = SyncAssetExifV1.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of SyncAssetExifV1-objects as value to a dart map
|
||||
static Map<String, List<SyncAssetExifV1>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<SyncAssetExifV1>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = SyncAssetExifV1.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'assetId',
|
||||
'city',
|
||||
'country',
|
||||
'dateTimeOriginal',
|
||||
'description',
|
||||
'exifImageHeight',
|
||||
'exifImageWidth',
|
||||
'exposureTime',
|
||||
'fNumber',
|
||||
'fileSizeInByte',
|
||||
'focalLength',
|
||||
'fps',
|
||||
'iso',
|
||||
'latitude',
|
||||
'lensModel',
|
||||
'longitude',
|
||||
'make',
|
||||
'model',
|
||||
'modifyDate',
|
||||
'orientation',
|
||||
'profileDescription',
|
||||
'projectionType',
|
||||
'rating',
|
||||
'state',
|
||||
'timeZone',
|
||||
};
|
||||
}
|
||||
|
279
mobile/openapi/lib/model/sync_asset_v1.dart
generated
Normal file
279
mobile/openapi/lib/model/sync_asset_v1.dart
generated
Normal file
@ -0,0 +1,279 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class SyncAssetV1 {
|
||||
/// Returns a new [SyncAssetV1] instance.
|
||||
SyncAssetV1({
|
||||
required this.checksum,
|
||||
required this.deletedAt,
|
||||
required this.fileCreatedAt,
|
||||
required this.fileModifiedAt,
|
||||
required this.id,
|
||||
required this.isFavorite,
|
||||
required this.isVisible,
|
||||
required this.localDateTime,
|
||||
required this.ownerId,
|
||||
required this.thumbhash,
|
||||
required this.type,
|
||||
});
|
||||
|
||||
String checksum;
|
||||
|
||||
DateTime? deletedAt;
|
||||
|
||||
DateTime? fileCreatedAt;
|
||||
|
||||
DateTime? fileModifiedAt;
|
||||
|
||||
String id;
|
||||
|
||||
bool isFavorite;
|
||||
|
||||
bool isVisible;
|
||||
|
||||
DateTime? localDateTime;
|
||||
|
||||
String ownerId;
|
||||
|
||||
String? thumbhash;
|
||||
|
||||
SyncAssetV1TypeEnum type;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SyncAssetV1 &&
|
||||
other.checksum == checksum &&
|
||||
other.deletedAt == deletedAt &&
|
||||
other.fileCreatedAt == fileCreatedAt &&
|
||||
other.fileModifiedAt == fileModifiedAt &&
|
||||
other.id == id &&
|
||||
other.isFavorite == isFavorite &&
|
||||
other.isVisible == isVisible &&
|
||||
other.localDateTime == localDateTime &&
|
||||
other.ownerId == ownerId &&
|
||||
other.thumbhash == thumbhash &&
|
||||
other.type == type;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(checksum.hashCode) +
|
||||
(deletedAt == null ? 0 : deletedAt!.hashCode) +
|
||||
(fileCreatedAt == null ? 0 : fileCreatedAt!.hashCode) +
|
||||
(fileModifiedAt == null ? 0 : fileModifiedAt!.hashCode) +
|
||||
(id.hashCode) +
|
||||
(isFavorite.hashCode) +
|
||||
(isVisible.hashCode) +
|
||||
(localDateTime == null ? 0 : localDateTime!.hashCode) +
|
||||
(ownerId.hashCode) +
|
||||
(thumbhash == null ? 0 : thumbhash!.hashCode) +
|
||||
(type.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SyncAssetV1[checksum=$checksum, deletedAt=$deletedAt, fileCreatedAt=$fileCreatedAt, fileModifiedAt=$fileModifiedAt, id=$id, isFavorite=$isFavorite, isVisible=$isVisible, localDateTime=$localDateTime, ownerId=$ownerId, thumbhash=$thumbhash, type=$type]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'checksum'] = this.checksum;
|
||||
if (this.deletedAt != null) {
|
||||
json[r'deletedAt'] = this.deletedAt!.toUtc().toIso8601String();
|
||||
} else {
|
||||
// json[r'deletedAt'] = null;
|
||||
}
|
||||
if (this.fileCreatedAt != null) {
|
||||
json[r'fileCreatedAt'] = this.fileCreatedAt!.toUtc().toIso8601String();
|
||||
} else {
|
||||
// json[r'fileCreatedAt'] = null;
|
||||
}
|
||||
if (this.fileModifiedAt != null) {
|
||||
json[r'fileModifiedAt'] = this.fileModifiedAt!.toUtc().toIso8601String();
|
||||
} else {
|
||||
// json[r'fileModifiedAt'] = null;
|
||||
}
|
||||
json[r'id'] = this.id;
|
||||
json[r'isFavorite'] = this.isFavorite;
|
||||
json[r'isVisible'] = this.isVisible;
|
||||
if (this.localDateTime != null) {
|
||||
json[r'localDateTime'] = this.localDateTime!.toUtc().toIso8601String();
|
||||
} else {
|
||||
// json[r'localDateTime'] = null;
|
||||
}
|
||||
json[r'ownerId'] = this.ownerId;
|
||||
if (this.thumbhash != null) {
|
||||
json[r'thumbhash'] = this.thumbhash;
|
||||
} else {
|
||||
// json[r'thumbhash'] = null;
|
||||
}
|
||||
json[r'type'] = this.type;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [SyncAssetV1] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static SyncAssetV1? fromJson(dynamic value) {
|
||||
upgradeDto(value, "SyncAssetV1");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SyncAssetV1(
|
||||
checksum: mapValueOfType<String>(json, r'checksum')!,
|
||||
deletedAt: mapDateTime(json, r'deletedAt', r''),
|
||||
fileCreatedAt: mapDateTime(json, r'fileCreatedAt', r''),
|
||||
fileModifiedAt: mapDateTime(json, r'fileModifiedAt', r''),
|
||||
id: mapValueOfType<String>(json, r'id')!,
|
||||
isFavorite: mapValueOfType<bool>(json, r'isFavorite')!,
|
||||
isVisible: mapValueOfType<bool>(json, r'isVisible')!,
|
||||
localDateTime: mapDateTime(json, r'localDateTime', r''),
|
||||
ownerId: mapValueOfType<String>(json, r'ownerId')!,
|
||||
thumbhash: mapValueOfType<String>(json, r'thumbhash'),
|
||||
type: SyncAssetV1TypeEnum.fromJson(json[r'type'])!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<SyncAssetV1> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SyncAssetV1>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SyncAssetV1.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, SyncAssetV1> mapFromJson(dynamic json) {
|
||||
final map = <String, SyncAssetV1>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = SyncAssetV1.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of SyncAssetV1-objects as value to a dart map
|
||||
static Map<String, List<SyncAssetV1>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<SyncAssetV1>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = SyncAssetV1.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'checksum',
|
||||
'deletedAt',
|
||||
'fileCreatedAt',
|
||||
'fileModifiedAt',
|
||||
'id',
|
||||
'isFavorite',
|
||||
'isVisible',
|
||||
'localDateTime',
|
||||
'ownerId',
|
||||
'thumbhash',
|
||||
'type',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
class SyncAssetV1TypeEnum {
|
||||
/// Instantiate a new enum with the provided [value].
|
||||
const SyncAssetV1TypeEnum._(this.value);
|
||||
|
||||
/// The underlying value of this enum member.
|
||||
final String value;
|
||||
|
||||
@override
|
||||
String toString() => value;
|
||||
|
||||
String toJson() => value;
|
||||
|
||||
static const IMAGE = SyncAssetV1TypeEnum._(r'IMAGE');
|
||||
static const VIDEO = SyncAssetV1TypeEnum._(r'VIDEO');
|
||||
static const AUDIO = SyncAssetV1TypeEnum._(r'AUDIO');
|
||||
static const OTHER = SyncAssetV1TypeEnum._(r'OTHER');
|
||||
|
||||
/// List of all possible values in this [enum][SyncAssetV1TypeEnum].
|
||||
static const values = <SyncAssetV1TypeEnum>[
|
||||
IMAGE,
|
||||
VIDEO,
|
||||
AUDIO,
|
||||
OTHER,
|
||||
];
|
||||
|
||||
static SyncAssetV1TypeEnum? fromJson(dynamic value) => SyncAssetV1TypeEnumTypeTransformer().decode(value);
|
||||
|
||||
static List<SyncAssetV1TypeEnum> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SyncAssetV1TypeEnum>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SyncAssetV1TypeEnum.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
}
|
||||
|
||||
/// Transformation class that can [encode] an instance of [SyncAssetV1TypeEnum] to String,
|
||||
/// and [decode] dynamic data back to [SyncAssetV1TypeEnum].
|
||||
class SyncAssetV1TypeEnumTypeTransformer {
|
||||
factory SyncAssetV1TypeEnumTypeTransformer() => _instance ??= const SyncAssetV1TypeEnumTypeTransformer._();
|
||||
|
||||
const SyncAssetV1TypeEnumTypeTransformer._();
|
||||
|
||||
String encode(SyncAssetV1TypeEnum data) => data.value;
|
||||
|
||||
/// Decodes a [dynamic value][data] to a SyncAssetV1TypeEnum.
|
||||
///
|
||||
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
||||
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
||||
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
|
||||
///
|
||||
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
||||
/// and users are still using an old app with the old code.
|
||||
SyncAssetV1TypeEnum? decode(dynamic data, {bool allowNull = true}) {
|
||||
if (data != null) {
|
||||
switch (data) {
|
||||
case r'IMAGE': return SyncAssetV1TypeEnum.IMAGE;
|
||||
case r'VIDEO': return SyncAssetV1TypeEnum.VIDEO;
|
||||
case r'AUDIO': return SyncAssetV1TypeEnum.AUDIO;
|
||||
case r'OTHER': return SyncAssetV1TypeEnum.OTHER;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Singleton [SyncAssetV1TypeEnumTypeTransformer] instance.
|
||||
static SyncAssetV1TypeEnumTypeTransformer? _instance;
|
||||
}
|
||||
|
||||
|
18
mobile/openapi/lib/model/sync_entity_type.dart
generated
18
mobile/openapi/lib/model/sync_entity_type.dart
generated
@ -27,6 +27,12 @@ class SyncEntityType {
|
||||
static const userDeleteV1 = SyncEntityType._(r'UserDeleteV1');
|
||||
static const partnerV1 = SyncEntityType._(r'PartnerV1');
|
||||
static const partnerDeleteV1 = SyncEntityType._(r'PartnerDeleteV1');
|
||||
static const assetV1 = SyncEntityType._(r'AssetV1');
|
||||
static const assetDeleteV1 = SyncEntityType._(r'AssetDeleteV1');
|
||||
static const assetExifV1 = SyncEntityType._(r'AssetExifV1');
|
||||
static const partnerAssetV1 = SyncEntityType._(r'PartnerAssetV1');
|
||||
static const partnerAssetDeleteV1 = SyncEntityType._(r'PartnerAssetDeleteV1');
|
||||
static const partnerAssetExifV1 = SyncEntityType._(r'PartnerAssetExifV1');
|
||||
|
||||
/// List of all possible values in this [enum][SyncEntityType].
|
||||
static const values = <SyncEntityType>[
|
||||
@ -34,6 +40,12 @@ class SyncEntityType {
|
||||
userDeleteV1,
|
||||
partnerV1,
|
||||
partnerDeleteV1,
|
||||
assetV1,
|
||||
assetDeleteV1,
|
||||
assetExifV1,
|
||||
partnerAssetV1,
|
||||
partnerAssetDeleteV1,
|
||||
partnerAssetExifV1,
|
||||
];
|
||||
|
||||
static SyncEntityType? fromJson(dynamic value) => SyncEntityTypeTypeTransformer().decode(value);
|
||||
@ -76,6 +88,12 @@ class SyncEntityTypeTypeTransformer {
|
||||
case r'UserDeleteV1': return SyncEntityType.userDeleteV1;
|
||||
case r'PartnerV1': return SyncEntityType.partnerV1;
|
||||
case r'PartnerDeleteV1': return SyncEntityType.partnerDeleteV1;
|
||||
case r'AssetV1': return SyncEntityType.assetV1;
|
||||
case r'AssetDeleteV1': return SyncEntityType.assetDeleteV1;
|
||||
case r'AssetExifV1': return SyncEntityType.assetExifV1;
|
||||
case r'PartnerAssetV1': return SyncEntityType.partnerAssetV1;
|
||||
case r'PartnerAssetDeleteV1': return SyncEntityType.partnerAssetDeleteV1;
|
||||
case r'PartnerAssetExifV1': return SyncEntityType.partnerAssetExifV1;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
|
12
mobile/openapi/lib/model/sync_request_type.dart
generated
12
mobile/openapi/lib/model/sync_request_type.dart
generated
@ -25,11 +25,19 @@ class SyncRequestType {
|
||||
|
||||
static const usersV1 = SyncRequestType._(r'UsersV1');
|
||||
static const partnersV1 = SyncRequestType._(r'PartnersV1');
|
||||
static const assetsV1 = SyncRequestType._(r'AssetsV1');
|
||||
static const assetExifsV1 = SyncRequestType._(r'AssetExifsV1');
|
||||
static const partnerAssetsV1 = SyncRequestType._(r'PartnerAssetsV1');
|
||||
static const partnerAssetExifsV1 = SyncRequestType._(r'PartnerAssetExifsV1');
|
||||
|
||||
/// List of all possible values in this [enum][SyncRequestType].
|
||||
static const values = <SyncRequestType>[
|
||||
usersV1,
|
||||
partnersV1,
|
||||
assetsV1,
|
||||
assetExifsV1,
|
||||
partnerAssetsV1,
|
||||
partnerAssetExifsV1,
|
||||
];
|
||||
|
||||
static SyncRequestType? fromJson(dynamic value) => SyncRequestTypeTypeTransformer().decode(value);
|
||||
@ -70,6 +78,10 @@ class SyncRequestTypeTypeTransformer {
|
||||
switch (data) {
|
||||
case r'UsersV1': return SyncRequestType.usersV1;
|
||||
case r'PartnersV1': return SyncRequestType.partnersV1;
|
||||
case r'AssetsV1': return SyncRequestType.assetsV1;
|
||||
case r'AssetExifsV1': return SyncRequestType.assetExifsV1;
|
||||
case r'PartnerAssetsV1': return SyncRequestType.partnerAssetsV1;
|
||||
case r'PartnerAssetExifsV1': return SyncRequestType.partnerAssetExifsV1;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
|
@ -12049,12 +12049,228 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SyncAssetDeleteV1": {
|
||||
"properties": {
|
||||
"assetId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"assetId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SyncAssetExifV1": {
|
||||
"properties": {
|
||||
"assetId": {
|
||||
"type": "string"
|
||||
},
|
||||
"city": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"country": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"dateTimeOriginal": {
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"exifImageHeight": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"exifImageWidth": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"exposureTime": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"fNumber": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"fileSizeInByte": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"focalLength": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"fps": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"iso": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"latitude": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"lensModel": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"longitude": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"make": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"model": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"modifyDate": {
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"orientation": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"profileDescription": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"projectionType": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"rating": {
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
},
|
||||
"state": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"timeZone": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"assetId",
|
||||
"city",
|
||||
"country",
|
||||
"dateTimeOriginal",
|
||||
"description",
|
||||
"exifImageHeight",
|
||||
"exifImageWidth",
|
||||
"exposureTime",
|
||||
"fNumber",
|
||||
"fileSizeInByte",
|
||||
"focalLength",
|
||||
"fps",
|
||||
"iso",
|
||||
"latitude",
|
||||
"lensModel",
|
||||
"longitude",
|
||||
"make",
|
||||
"model",
|
||||
"modifyDate",
|
||||
"orientation",
|
||||
"profileDescription",
|
||||
"projectionType",
|
||||
"rating",
|
||||
"state",
|
||||
"timeZone"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SyncAssetV1": {
|
||||
"properties": {
|
||||
"checksum": {
|
||||
"type": "string"
|
||||
},
|
||||
"deletedAt": {
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"fileCreatedAt": {
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"fileModifiedAt": {
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"isFavorite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"isVisible": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"localDateTime": {
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"ownerId": {
|
||||
"type": "string"
|
||||
},
|
||||
"thumbhash": {
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"IMAGE",
|
||||
"VIDEO",
|
||||
"AUDIO",
|
||||
"OTHER"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"checksum",
|
||||
"deletedAt",
|
||||
"fileCreatedAt",
|
||||
"fileModifiedAt",
|
||||
"id",
|
||||
"isFavorite",
|
||||
"isVisible",
|
||||
"localDateTime",
|
||||
"ownerId",
|
||||
"thumbhash",
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SyncEntityType": {
|
||||
"enum": [
|
||||
"UserV1",
|
||||
"UserDeleteV1",
|
||||
"PartnerV1",
|
||||
"PartnerDeleteV1"
|
||||
"PartnerDeleteV1",
|
||||
"AssetV1",
|
||||
"AssetDeleteV1",
|
||||
"AssetExifV1",
|
||||
"PartnerAssetV1",
|
||||
"PartnerAssetDeleteV1",
|
||||
"PartnerAssetExifV1"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@ -12095,7 +12311,11 @@
|
||||
"SyncRequestType": {
|
||||
"enum": [
|
||||
"UsersV1",
|
||||
"PartnersV1"
|
||||
"PartnersV1",
|
||||
"AssetsV1",
|
||||
"AssetExifsV1",
|
||||
"PartnerAssetsV1",
|
||||
"PartnerAssetExifsV1"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -3647,11 +3647,21 @@ export enum SyncEntityType {
|
||||
UserV1 = "UserV1",
|
||||
UserDeleteV1 = "UserDeleteV1",
|
||||
PartnerV1 = "PartnerV1",
|
||||
PartnerDeleteV1 = "PartnerDeleteV1"
|
||||
PartnerDeleteV1 = "PartnerDeleteV1",
|
||||
AssetV1 = "AssetV1",
|
||||
AssetDeleteV1 = "AssetDeleteV1",
|
||||
AssetExifV1 = "AssetExifV1",
|
||||
PartnerAssetV1 = "PartnerAssetV1",
|
||||
PartnerAssetDeleteV1 = "PartnerAssetDeleteV1",
|
||||
PartnerAssetExifV1 = "PartnerAssetExifV1"
|
||||
}
|
||||
export enum SyncRequestType {
|
||||
UsersV1 = "UsersV1",
|
||||
PartnersV1 = "PartnersV1"
|
||||
PartnersV1 = "PartnersV1",
|
||||
AssetsV1 = "AssetsV1",
|
||||
AssetExifsV1 = "AssetExifsV1",
|
||||
PartnerAssetsV1 = "PartnerAssetsV1",
|
||||
PartnerAssetExifsV1 = "PartnerAssetExifsV1"
|
||||
}
|
||||
export enum TranscodeHWAccel {
|
||||
Nvenc = "nvenc",
|
||||
|
14
server/package-lock.json
generated
14
server/package-lock.json
generated
@ -17,7 +17,6 @@
|
||||
"@nestjs/platform-socket.io": "^11.0.4",
|
||||
"@nestjs/schedule": "^5.0.0",
|
||||
"@nestjs/swagger": "^11.0.2",
|
||||
"@nestjs/typeorm": "^11.0.0",
|
||||
"@nestjs/websockets": "^11.0.4",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.56.0",
|
||||
"@opentelemetry/context-async-hooks": "^1.24.0",
|
||||
@ -2920,19 +2919,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@nestjs/typeorm": {
|
||||
"version": "11.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@nestjs/typeorm/-/typeorm-11.0.0.tgz",
|
||||
"integrity": "sha512-SOeUQl70Lb2OfhGkvnh4KXWlsd+zA08RuuQgT7kKbzivngxzSo1Oc7Usu5VxCxACQC9wc2l9esOHILSJeK7rJA==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"@nestjs/common": "^10.0.0 || ^11.0.0",
|
||||
"@nestjs/core": "^10.0.0 || ^11.0.0",
|
||||
"reflect-metadata": "^0.1.13 || ^0.2.0",
|
||||
"rxjs": "^7.2.0",
|
||||
"typeorm": "^0.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@nestjs/websockets": {
|
||||
"version": "11.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@nestjs/websockets/-/websockets-11.0.11.tgz",
|
||||
|
@ -43,7 +43,6 @@
|
||||
"@nestjs/platform-socket.io": "^11.0.4",
|
||||
"@nestjs/schedule": "^5.0.0",
|
||||
"@nestjs/swagger": "^11.0.2",
|
||||
"@nestjs/typeorm": "^11.0.0",
|
||||
"@nestjs/websockets": "^11.0.4",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.56.0",
|
||||
"@opentelemetry/context-async-hooks": "^1.24.0",
|
||||
|
@ -4,13 +4,13 @@ import { APIKeyCreateDto, APIKeyCreateResponseDto, APIKeyResponseDto, APIKeyUpda
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { Permission } from 'src/enum';
|
||||
import { Auth, Authenticated } from 'src/middleware/auth.guard';
|
||||
import { APIKeyService } from 'src/services/api-key.service';
|
||||
import { ApiKeyService } from 'src/services/api-key.service';
|
||||
import { UUIDParamDto } from 'src/validation';
|
||||
|
||||
@ApiTags('API Keys')
|
||||
@Controller('api-keys')
|
||||
export class APIKeyController {
|
||||
constructor(private service: APIKeyService) {}
|
||||
constructor(private service: ApiKeyService) {}
|
||||
|
||||
@Post()
|
||||
@Authenticated({ permission: Permission.API_KEY_CREATE })
|
||||
|
@ -29,6 +29,15 @@ export type AuthApiKey = {
|
||||
permissions: Permission[];
|
||||
};
|
||||
|
||||
export type ApiKey = {
|
||||
id: string;
|
||||
name: string;
|
||||
userId: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
permissions: Permission[];
|
||||
};
|
||||
|
||||
export type User = {
|
||||
id: string;
|
||||
name: string;
|
||||
@ -108,4 +117,46 @@ export const columns = {
|
||||
userDto: ['id', 'name', 'email', 'profileImagePath', 'profileChangedAt'],
|
||||
tagDto: ['id', 'value', 'createdAt', 'updatedAt', 'color', 'parentId'],
|
||||
apiKey: ['id', 'name', 'userId', 'createdAt', 'updatedAt', 'permissions'],
|
||||
syncAsset: [
|
||||
'id',
|
||||
'ownerId',
|
||||
'thumbhash',
|
||||
'checksum',
|
||||
'fileCreatedAt',
|
||||
'fileModifiedAt',
|
||||
'localDateTime',
|
||||
'type',
|
||||
'deletedAt',
|
||||
'isFavorite',
|
||||
'isVisible',
|
||||
'updateId',
|
||||
],
|
||||
syncAssetExif: [
|
||||
'exif.assetId',
|
||||
'exif.description',
|
||||
'exif.exifImageWidth',
|
||||
'exif.exifImageHeight',
|
||||
'exif.fileSizeInByte',
|
||||
'exif.orientation',
|
||||
'exif.dateTimeOriginal',
|
||||
'exif.modifyDate',
|
||||
'exif.timeZone',
|
||||
'exif.latitude',
|
||||
'exif.longitude',
|
||||
'exif.projectionType',
|
||||
'exif.city',
|
||||
'exif.state',
|
||||
'exif.country',
|
||||
'exif.make',
|
||||
'exif.model',
|
||||
'exif.lensModel',
|
||||
'exif.fNumber',
|
||||
'exif.focalLength',
|
||||
'exif.iso',
|
||||
'exif.exposureTime',
|
||||
'exif.profileDescription',
|
||||
'exif.rating',
|
||||
'exif.fps',
|
||||
'exif.updateId',
|
||||
],
|
||||
} as const;
|
||||
|
10
server/src/db.d.ts
vendored
10
server/src/db.d.ts
vendored
@ -119,6 +119,13 @@ export interface AssetJobStatus {
|
||||
thumbnailAt: Timestamp | null;
|
||||
}
|
||||
|
||||
export interface AssetsAudit {
|
||||
deletedAt: Generated<Timestamp>;
|
||||
id: Generated<string>;
|
||||
assetId: string;
|
||||
ownerId: string;
|
||||
}
|
||||
|
||||
export interface Assets {
|
||||
checksum: Buffer;
|
||||
createdAt: Generated<Timestamp>;
|
||||
@ -168,6 +175,8 @@ export interface Audit {
|
||||
|
||||
export interface Exif {
|
||||
assetId: string;
|
||||
updateId: Generated<string>;
|
||||
updatedAt: Generated<Timestamp>;
|
||||
autoStackId: string | null;
|
||||
bitsPerSample: number | null;
|
||||
city: string | null;
|
||||
@ -459,6 +468,7 @@ export interface DB {
|
||||
asset_job_status: AssetJobStatus;
|
||||
asset_stack: AssetStack;
|
||||
assets: Assets;
|
||||
assets_audit: AssetsAudit;
|
||||
audit: Audit;
|
||||
exif: Exif;
|
||||
face_search: FaceSearch;
|
||||
|
@ -102,7 +102,7 @@ const mapStack = (entity: AssetEntity) => {
|
||||
};
|
||||
|
||||
// if an asset is jsonified in the DB before being returned, its buffer fields will be hex-encoded strings
|
||||
const hexOrBufferToBase64 = (encoded: string | Buffer) => {
|
||||
export const hexOrBufferToBase64 = (encoded: string | Buffer) => {
|
||||
if (typeof encoded === 'string') {
|
||||
return Buffer.from(encoded.slice(2), 'hex').toString('base64');
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { IsEnum, IsInt, IsPositive, IsString } from 'class-validator';
|
||||
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { AssetType, SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { Optional, ValidateDate, ValidateUUID } from 'src/validation';
|
||||
|
||||
export class AssetFullSyncDto {
|
||||
@ -56,11 +56,73 @@ export class SyncPartnerDeleteV1 {
|
||||
sharedWithId!: string;
|
||||
}
|
||||
|
||||
export class SyncAssetV1 {
|
||||
id!: string;
|
||||
ownerId!: string;
|
||||
thumbhash!: string | null;
|
||||
checksum!: string;
|
||||
fileCreatedAt!: Date | null;
|
||||
fileModifiedAt!: Date | null;
|
||||
localDateTime!: Date | null;
|
||||
type!: AssetType;
|
||||
deletedAt!: Date | null;
|
||||
isFavorite!: boolean;
|
||||
isVisible!: boolean;
|
||||
}
|
||||
|
||||
export class SyncAssetDeleteV1 {
|
||||
assetId!: string;
|
||||
}
|
||||
|
||||
export class SyncAssetExifV1 {
|
||||
assetId!: string;
|
||||
description!: string | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
exifImageWidth!: number | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
exifImageHeight!: number | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
fileSizeInByte!: number | null;
|
||||
orientation!: string | null;
|
||||
dateTimeOriginal!: Date | null;
|
||||
modifyDate!: Date | null;
|
||||
timeZone!: string | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
latitude!: number | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
longitude!: number | null;
|
||||
projectionType!: string | null;
|
||||
city!: string | null;
|
||||
state!: string | null;
|
||||
country!: string | null;
|
||||
make!: string | null;
|
||||
model!: string | null;
|
||||
lensModel!: string | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
fNumber!: number | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
focalLength!: number | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
iso!: number | null;
|
||||
exposureTime!: string | null;
|
||||
profileDescription!: string | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
rating!: number | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
fps!: number | null;
|
||||
}
|
||||
|
||||
export type SyncItem = {
|
||||
[SyncEntityType.UserV1]: SyncUserV1;
|
||||
[SyncEntityType.UserDeleteV1]: SyncUserDeleteV1;
|
||||
[SyncEntityType.PartnerV1]: SyncPartnerV1;
|
||||
[SyncEntityType.PartnerDeleteV1]: SyncPartnerDeleteV1;
|
||||
[SyncEntityType.AssetV1]: SyncAssetV1;
|
||||
[SyncEntityType.AssetDeleteV1]: SyncAssetDeleteV1;
|
||||
[SyncEntityType.AssetExifV1]: SyncAssetExifV1;
|
||||
[SyncEntityType.PartnerAssetV1]: SyncAssetV1;
|
||||
[SyncEntityType.PartnerAssetDeleteV1]: SyncAssetDeleteV1;
|
||||
[SyncEntityType.PartnerAssetExifV1]: SyncAssetExifV1;
|
||||
};
|
||||
|
||||
const responseDtos = [
|
||||
@ -69,6 +131,9 @@ const responseDtos = [
|
||||
SyncUserDeleteV1,
|
||||
SyncPartnerV1,
|
||||
SyncPartnerDeleteV1,
|
||||
SyncAssetV1,
|
||||
SyncAssetDeleteV1,
|
||||
SyncAssetExifV1,
|
||||
];
|
||||
|
||||
export const extraSyncModels = responseDtos;
|
||||
|
19
server/src/entities/asset-audit.entity.ts
Normal file
19
server/src/entities/asset-audit.entity.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import { Column, CreateDateColumn, Entity, Index, PrimaryColumn } from 'typeorm';
|
||||
|
||||
@Entity('assets_audit')
|
||||
export class AssetAuditEntity {
|
||||
@PrimaryColumn({ type: 'uuid', nullable: false, default: () => 'immich_uuid_v7()' })
|
||||
id!: string;
|
||||
|
||||
@Index('IDX_assets_audit_asset_id')
|
||||
@Column({ type: 'uuid' })
|
||||
assetId!: string;
|
||||
|
||||
@Index('IDX_assets_audit_owner_id')
|
||||
@Column({ type: 'uuid' })
|
||||
ownerId!: string;
|
||||
|
||||
@Index('IDX_assets_audit_deleted_at')
|
||||
@CreateDateColumn({ type: 'timestamptz', default: () => 'clock_timestamp()' })
|
||||
deletedAt!: Date;
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { Index, JoinColumn, OneToOne, PrimaryColumn } from 'typeorm';
|
||||
import { Index, JoinColumn, OneToOne, PrimaryColumn, UpdateDateColumn } from 'typeorm';
|
||||
import { Column } from 'typeorm/decorator/columns/Column.js';
|
||||
import { Entity } from 'typeorm/decorator/entity/Entity.js';
|
||||
|
||||
@ -12,6 +12,13 @@ export class ExifEntity {
|
||||
@PrimaryColumn()
|
||||
assetId!: string;
|
||||
|
||||
@UpdateDateColumn({ type: 'timestamptz', default: () => 'clock_timestamp()' })
|
||||
updatedAt?: Date;
|
||||
|
||||
@Index('IDX_asset_exif_update_id')
|
||||
@Column({ type: 'uuid', nullable: false, default: () => 'immich_uuid_v7()' })
|
||||
updateId?: string;
|
||||
|
||||
/* General info */
|
||||
@Column({ type: 'text', default: '' })
|
||||
description!: string; // or caption
|
||||
|
@ -557,11 +557,24 @@ export enum DatabaseLock {
|
||||
export enum SyncRequestType {
|
||||
UsersV1 = 'UsersV1',
|
||||
PartnersV1 = 'PartnersV1',
|
||||
AssetsV1 = 'AssetsV1',
|
||||
AssetExifsV1 = 'AssetExifsV1',
|
||||
PartnerAssetsV1 = 'PartnerAssetsV1',
|
||||
PartnerAssetExifsV1 = 'PartnerAssetExifsV1',
|
||||
}
|
||||
|
||||
export enum SyncEntityType {
|
||||
UserV1 = 'UserV1',
|
||||
UserDeleteV1 = 'UserDeleteV1',
|
||||
|
||||
PartnerV1 = 'PartnerV1',
|
||||
PartnerDeleteV1 = 'PartnerDeleteV1',
|
||||
|
||||
AssetV1 = 'AssetV1',
|
||||
AssetDeleteV1 = 'AssetDeleteV1',
|
||||
AssetExifV1 = 'AssetExifV1',
|
||||
|
||||
PartnerAssetV1 = 'PartnerAssetV1',
|
||||
PartnerAssetDeleteV1 = 'PartnerAssetDeleteV1',
|
||||
PartnerAssetExifV1 = 'PartnerAssetExifV1',
|
||||
}
|
||||
|
37
server/src/migrations/1741191762113-AssetAuditTable.ts
Normal file
37
server/src/migrations/1741191762113-AssetAuditTable.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||
|
||||
export class AssetAuditTable1741191762113 implements MigrationInterface {
|
||||
name = 'AssetAuditTable1741191762113'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`CREATE TABLE "assets_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "assetId" uuid NOT NULL, "ownerId" uuid NOT NULL, "deletedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT clock_timestamp(), CONSTRAINT "PK_99bd5c015f81a641927a32b4212" PRIMARY KEY ("id"))`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_assets_audit_asset_id" ON "assets_audit" ("assetId") `);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_assets_audit_owner_id" ON "assets_audit" ("ownerId") `);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_assets_audit_deleted_at" ON "assets_audit" ("deletedAt") `);
|
||||
await queryRunner.query(`CREATE OR REPLACE FUNCTION assets_delete_audit() RETURNS TRIGGER AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO assets_audit ("assetId", "ownerId")
|
||||
SELECT "id", "ownerId"
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql`
|
||||
);
|
||||
await queryRunner.query(`CREATE OR REPLACE TRIGGER assets_delete_audit
|
||||
AFTER DELETE ON assets
|
||||
REFERENCING OLD TABLE AS OLD
|
||||
FOR EACH STATEMENT
|
||||
EXECUTE FUNCTION assets_delete_audit();
|
||||
`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP TRIGGER assets_delete_audit`);
|
||||
await queryRunner.query(`DROP FUNCTION assets_delete_audit`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_assets_audit_deleted_at"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_assets_audit_owner_id"`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_assets_audit_asset_id"`);
|
||||
await queryRunner.query(`DROP TABLE "assets_audit"`);
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class FixAssetAndUserCascadeConditions1741280328985 implements MigrationInterface {
|
||||
name = 'FixAssetAndUserCascadeConditions1741280328985';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`
|
||||
CREATE OR REPLACE TRIGGER assets_delete_audit
|
||||
AFTER DELETE ON assets
|
||||
REFERENCING OLD TABLE AS OLD
|
||||
FOR EACH STATEMENT
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION assets_delete_audit();`);
|
||||
await queryRunner.query(`
|
||||
CREATE OR REPLACE TRIGGER users_delete_audit
|
||||
AFTER DELETE ON users
|
||||
REFERENCING OLD TABLE AS OLD
|
||||
FOR EACH STATEMENT
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION users_delete_audit();`);
|
||||
await queryRunner.query(`
|
||||
CREATE OR REPLACE TRIGGER partners_delete_audit
|
||||
AFTER DELETE ON partners
|
||||
REFERENCING OLD TABLE AS OLD
|
||||
FOR EACH STATEMENT
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION partners_delete_audit();`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`
|
||||
CREATE OR REPLACE TRIGGER assets_delete_audit
|
||||
AFTER DELETE ON assets
|
||||
REFERENCING OLD TABLE AS OLD
|
||||
FOR EACH STATEMENT
|
||||
EXECUTE FUNCTION assets_delete_audit();`);
|
||||
await queryRunner.query(`
|
||||
CREATE OR REPLACE TRIGGER users_delete_audit
|
||||
AFTER DELETE ON users
|
||||
REFERENCING OLD TABLE AS OLD
|
||||
FOR EACH STATEMENT
|
||||
EXECUTE FUNCTION users_delete_audit();`);
|
||||
await queryRunner.query(`
|
||||
CREATE OR REPLACE TRIGGER partners_delete_audit
|
||||
AFTER DELETE ON partners
|
||||
REFERENCING OLD TABLE AS OLD
|
||||
FOR EACH STATEMENT
|
||||
EXECUTE FUNCTION partners_delete_audit();`);
|
||||
}
|
||||
}
|
25
server/src/migrations/1741281344519-AddExifUpdateId.ts
Normal file
25
server/src/migrations/1741281344519-AddExifUpdateId.ts
Normal file
@ -0,0 +1,25 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AddExifUpdateId1741281344519 implements MigrationInterface {
|
||||
name = 'AddExifUpdateId1741281344519';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "exif" ADD "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT clock_timestamp()`,
|
||||
);
|
||||
await queryRunner.query(`ALTER TABLE "exif" ADD "updateId" uuid NOT NULL DEFAULT immich_uuid_v7()`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_asset_exif_update_id" ON "exif" ("updateId") `);
|
||||
await queryRunner.query(`
|
||||
create trigger asset_exif_updated_at
|
||||
before update on exif
|
||||
for each row execute procedure updated_at()
|
||||
`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX "public"."IDX_asset_exif_update_id"`);
|
||||
await queryRunner.query(`ALTER TABLE "exif" DROP COLUMN "updateId"`);
|
||||
await queryRunner.query(`ALTER TABLE "exif" DROP COLUMN "updatedAt"`);
|
||||
await queryRunner.query(`DROP TRIGGER asset_exif_updated_at on exif`);
|
||||
}
|
||||
}
|
@ -438,8 +438,8 @@ from
|
||||
) as "stacked_assets" on "asset_stack"."id" is not null
|
||||
where
|
||||
"assets"."ownerId" = $1::uuid
|
||||
and "isVisible" = $2
|
||||
and "updatedAt" <= $3
|
||||
and "assets"."isVisible" = $2
|
||||
and "assets"."updatedAt" <= $3
|
||||
and "assets"."id" > $4
|
||||
order by
|
||||
"assets"."id"
|
||||
@ -468,7 +468,7 @@ from
|
||||
) as "stacked_assets" on "asset_stack"."id" is not null
|
||||
where
|
||||
"assets"."ownerId" = any ($1::uuid[])
|
||||
and "isVisible" = $2
|
||||
and "updatedAt" > $3
|
||||
and "assets"."isVisible" = $2
|
||||
and "assets"."updatedAt" > $3
|
||||
limit
|
||||
$4
|
||||
|
@ -12,7 +12,7 @@ export class ApiKeyRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
create(dto: Insertable<ApiKeys>) {
|
||||
return this.db.insertInto('api_keys').values(dto).returningAll().executeTakeFirstOrThrow();
|
||||
return this.db.insertInto('api_keys').values(dto).returning(columns.apiKey).executeTakeFirstOrThrow();
|
||||
}
|
||||
|
||||
async update(userId: string, id: string, dto: Updateable<ApiKeys>) {
|
||||
@ -21,7 +21,7 @@ export class ApiKeyRepository {
|
||||
.set(dto)
|
||||
.where('api_keys.userId', '=', userId)
|
||||
.where('id', '=', asUuid(id))
|
||||
.returningAll()
|
||||
.returning(columns.apiKey)
|
||||
.executeTakeFirstOrThrow();
|
||||
}
|
||||
|
||||
|
@ -583,7 +583,7 @@ export class AssetRepository {
|
||||
return this.getById(asset.id, { exifInfo: true, faces: { person: true } }) as Promise<AssetEntity>;
|
||||
}
|
||||
|
||||
async remove(asset: AssetEntity): Promise<void> {
|
||||
async remove(asset: { id: string }): Promise<void> {
|
||||
await this.db.deleteFrom('assets').where('id', '=', asUuid(asset.id)).execute();
|
||||
}
|
||||
|
||||
@ -1000,8 +1000,8 @@ export class AssetRepository {
|
||||
)
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
|
||||
.where('assets.ownerId', '=', asUuid(ownerId))
|
||||
.where('isVisible', '=', true)
|
||||
.where('updatedAt', '<=', updatedUntil)
|
||||
.where('assets.isVisible', '=', true)
|
||||
.where('assets.updatedAt', '<=', updatedUntil)
|
||||
.$if(!!lastId, (qb) => qb.where('assets.id', '>', lastId!))
|
||||
.orderBy('assets.id')
|
||||
.limit(limit)
|
||||
@ -1028,8 +1028,8 @@ export class AssetRepository {
|
||||
)
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
|
||||
.where('assets.ownerId', '=', anyUuid(options.userIds))
|
||||
.where('isVisible', '=', true)
|
||||
.where('updatedAt', '>', options.updatedAfter)
|
||||
.where('assets.isVisible', '=', true)
|
||||
.where('assets.updatedAt', '>', options.updatedAfter)
|
||||
.limit(options.limit)
|
||||
.execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
@ -1,11 +1,12 @@
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { EmailRenderRequest, EmailTemplate, NotificationRepository } from 'src/repositories/notification.repository';
|
||||
import { newFakeLoggingRepository } from 'test/repositories/logger.repository.mock';
|
||||
import { automock } from 'test/utils';
|
||||
|
||||
describe(NotificationRepository.name, () => {
|
||||
let sut: NotificationRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
sut = new NotificationRepository(newFakeLoggingRepository());
|
||||
sut = new NotificationRepository(automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }));
|
||||
});
|
||||
|
||||
describe('renderEmail', () => {
|
||||
|
@ -1,7 +1,8 @@
|
||||
import mockfs from 'mock-fs';
|
||||
import { CrawlOptionsDto } from 'src/dtos/library.dto';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { newFakeLoggingRepository } from 'test/repositories/logger.repository.mock';
|
||||
import { automock } from 'test/utils';
|
||||
|
||||
interface Test {
|
||||
test: string;
|
||||
@ -182,7 +183,7 @@ describe(StorageRepository.name, () => {
|
||||
let sut: StorageRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
sut = new StorageRepository(newFakeLoggingRepository());
|
||||
sut = new StorageRepository(automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
@ -1,10 +1,14 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Insertable, Kysely, sql } from 'kysely';
|
||||
import { Insertable, Kysely, SelectQueryBuilder, sql } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { columns } from 'src/database';
|
||||
import { DB, SessionSyncCheckpoints } from 'src/db';
|
||||
import { SyncEntityType } from 'src/enum';
|
||||
import { SyncAck } from 'src/types';
|
||||
|
||||
type auditTables = 'users_audit' | 'partners_audit' | 'assets_audit';
|
||||
type upsertTables = 'users' | 'partners' | 'assets' | 'exif';
|
||||
|
||||
@Injectable()
|
||||
export class SyncRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
@ -41,9 +45,7 @@ export class SyncRepository {
|
||||
return this.db
|
||||
.selectFrom('users')
|
||||
.select(['id', 'name', 'email', 'deletedAt', 'updateId'])
|
||||
.$if(!!ack, (qb) => qb.where('updateId', '>', ack!.updateId))
|
||||
.where('updatedAt', '<', sql.raw<Date>("now() - interval '1 millisecond'"))
|
||||
.orderBy(['updateId asc'])
|
||||
.$call((qb) => this.upsertTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
@ -51,9 +53,7 @@ export class SyncRepository {
|
||||
return this.db
|
||||
.selectFrom('users_audit')
|
||||
.select(['id', 'userId'])
|
||||
.$if(!!ack, (qb) => qb.where('id', '>', ack!.updateId))
|
||||
.where('deletedAt', '<', sql.raw<Date>("now() - interval '1 millisecond'"))
|
||||
.orderBy(['id asc'])
|
||||
.$call((qb) => this.auditTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
@ -61,10 +61,8 @@ export class SyncRepository {
|
||||
return this.db
|
||||
.selectFrom('partners')
|
||||
.select(['sharedById', 'sharedWithId', 'inTimeline', 'updateId'])
|
||||
.$if(!!ack, (qb) => qb.where('updateId', '>', ack!.updateId))
|
||||
.where((eb) => eb.or([eb('sharedById', '=', userId), eb('sharedWithId', '=', userId)]))
|
||||
.where('updatedAt', '<', sql.raw<Date>("now() - interval '1 millisecond'"))
|
||||
.orderBy(['updateId asc'])
|
||||
.$call((qb) => this.upsertTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
@ -72,10 +70,93 @@ export class SyncRepository {
|
||||
return this.db
|
||||
.selectFrom('partners_audit')
|
||||
.select(['id', 'sharedById', 'sharedWithId'])
|
||||
.$if(!!ack, (qb) => qb.where('id', '>', ack!.updateId))
|
||||
.where((eb) => eb.or([eb('sharedById', '=', userId), eb('sharedWithId', '=', userId)]))
|
||||
.where('deletedAt', '<', sql.raw<Date>("now() - interval '1 millisecond'"))
|
||||
.orderBy(['id asc'])
|
||||
.$call((qb) => this.auditTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
getAssetUpserts(userId: string, ack?: SyncAck) {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.select(columns.syncAsset)
|
||||
.where('ownerId', '=', userId)
|
||||
.$call((qb) => this.upsertTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
getPartnerAssetsUpserts(userId: string, ack?: SyncAck) {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.select(columns.syncAsset)
|
||||
.where('ownerId', 'in', (eb) =>
|
||||
eb.selectFrom('partners').select(['sharedById']).where('sharedWithId', '=', userId),
|
||||
)
|
||||
.$call((qb) => this.upsertTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
getAssetDeletes(userId: string, ack?: SyncAck) {
|
||||
return this.db
|
||||
.selectFrom('assets_audit')
|
||||
.select(['id', 'assetId'])
|
||||
.where('ownerId', '=', userId)
|
||||
.$if(!!ack, (qb) => qb.where('id', '>', ack!.updateId))
|
||||
.$call((qb) => this.auditTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
getPartnerAssetDeletes(userId: string, ack?: SyncAck) {
|
||||
return this.db
|
||||
.selectFrom('assets_audit')
|
||||
.select(['id', 'assetId'])
|
||||
.where('ownerId', 'in', (eb) =>
|
||||
eb.selectFrom('partners').select(['sharedById']).where('sharedWithId', '=', userId),
|
||||
)
|
||||
.$call((qb) => this.auditTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
getAssetExifsUpserts(userId: string, ack?: SyncAck) {
|
||||
return this.db
|
||||
.selectFrom('exif')
|
||||
.select(columns.syncAssetExif)
|
||||
.where('assetId', 'in', (eb) => eb.selectFrom('assets').select('id').where('ownerId', '=', userId))
|
||||
.$call((qb) => this.upsertTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
getPartnerAssetExifsUpserts(userId: string, ack?: SyncAck) {
|
||||
return this.db
|
||||
.selectFrom('exif')
|
||||
.select(columns.syncAssetExif)
|
||||
.where('assetId', 'in', (eb) =>
|
||||
eb
|
||||
.selectFrom('assets')
|
||||
.select('id')
|
||||
.where('ownerId', 'in', (eb) =>
|
||||
eb.selectFrom('partners').select(['sharedById']).where('sharedWithId', '=', userId),
|
||||
),
|
||||
)
|
||||
.$call((qb) => this.upsertTableFilters(qb, ack))
|
||||
.stream();
|
||||
}
|
||||
|
||||
private auditTableFilters<T extends keyof Pick<DB, auditTables>, D>(qb: SelectQueryBuilder<DB, T, D>, ack?: SyncAck) {
|
||||
const builder = qb as SelectQueryBuilder<DB, auditTables, D>;
|
||||
return builder
|
||||
.where('deletedAt', '<', sql.raw<Date>("now() - interval '1 millisecond'"))
|
||||
.$if(!!ack, (qb) => qb.where('id', '>', ack!.updateId))
|
||||
.orderBy(['id asc']) as SelectQueryBuilder<DB, T, D>;
|
||||
}
|
||||
|
||||
private upsertTableFilters<T extends keyof Pick<DB, upsertTables>, D>(
|
||||
qb: SelectQueryBuilder<DB, T, D>,
|
||||
ack?: SyncAck,
|
||||
) {
|
||||
const builder = qb as SelectQueryBuilder<DB, upsertTables, D>;
|
||||
return builder
|
||||
.where('updatedAt', '<', sql.raw<Date>("now() - interval '1 millisecond'"))
|
||||
.$if(!!ack, (qb) => qb.where('updateId', '>', ack!.updateId))
|
||||
.orderBy(['updateId asc']) as SelectQueryBuilder<DB, T, D>;
|
||||
}
|
||||
}
|
||||
|
@ -146,6 +146,7 @@ describe(ActivityService.name, () => {
|
||||
const activity = factory.activity();
|
||||
|
||||
mocks.access.activity.checkOwnerAccess.mockResolvedValue(new Set([activity.id]));
|
||||
mocks.activity.delete.mockResolvedValue();
|
||||
|
||||
await sut.delete(factory.auth(), activity.id);
|
||||
|
||||
@ -156,6 +157,7 @@ describe(ActivityService.name, () => {
|
||||
const activity = factory.activity();
|
||||
|
||||
mocks.access.activity.checkAlbumOwnerAccess.mockResolvedValue(new Set([activity.id]));
|
||||
mocks.activity.delete.mockResolvedValue();
|
||||
|
||||
await sut.delete(factory.auth(), activity.id);
|
||||
|
||||
|
@ -347,6 +347,7 @@ describe(AlbumService.name, () => {
|
||||
it('should remove a shared user from an owned album', async () => {
|
||||
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.sharedWithUser.id]));
|
||||
mocks.album.getById.mockResolvedValue(albumStub.sharedWithUser);
|
||||
mocks.albumUser.delete.mockResolvedValue();
|
||||
|
||||
await expect(
|
||||
sut.removeUser(authStub.admin, albumStub.sharedWithUser.id, userStub.user1.id),
|
||||
@ -376,6 +377,7 @@ describe(AlbumService.name, () => {
|
||||
|
||||
it('should allow a shared user to remove themselves', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.sharedWithUser);
|
||||
mocks.albumUser.delete.mockResolvedValue();
|
||||
|
||||
await sut.removeUser(authStub.user1, albumStub.sharedWithUser.id, authStub.user1.user.id);
|
||||
|
||||
@ -388,6 +390,7 @@ describe(AlbumService.name, () => {
|
||||
|
||||
it('should allow a shared user to remove themselves using "me"', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.sharedWithUser);
|
||||
mocks.albumUser.delete.mockResolvedValue();
|
||||
|
||||
await sut.removeUser(authStub.user1, albumStub.sharedWithUser.id, 'me');
|
||||
|
||||
@ -422,6 +425,8 @@ describe(AlbumService.name, () => {
|
||||
describe('updateUser', () => {
|
||||
it('should update user role', async () => {
|
||||
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.sharedWithAdmin.id]));
|
||||
mocks.albumUser.update.mockResolvedValue(null as any);
|
||||
|
||||
await sut.updateUser(authStub.user1, albumStub.sharedWithAdmin.id, userStub.admin.id, {
|
||||
role: AlbumUserRole.EDITOR,
|
||||
});
|
||||
|
@ -1,112 +1,152 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { Permission } from 'src/enum';
|
||||
import { APIKeyService } from 'src/services/api-key.service';
|
||||
import { keyStub } from 'test/fixtures/api-key.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { ApiKeyService } from 'src/services/api-key.service';
|
||||
import { factory, newUuid } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(APIKeyService.name, () => {
|
||||
let sut: APIKeyService;
|
||||
describe(ApiKeyService.name, () => {
|
||||
let sut: ApiKeyService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(APIKeyService));
|
||||
({ sut, mocks } = newTestService(ApiKeyService));
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create a new key', async () => {
|
||||
mocks.apiKey.create.mockResolvedValue(keyStub.admin);
|
||||
await sut.create(authStub.admin, { name: 'Test Key', permissions: [Permission.ALL] });
|
||||
const auth = factory.auth();
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id, permissions: [Permission.ALL] });
|
||||
const key = 'super-secret';
|
||||
|
||||
mocks.crypto.newPassword.mockReturnValue(key);
|
||||
mocks.apiKey.create.mockResolvedValue(apiKey);
|
||||
|
||||
await sut.create(auth, { name: apiKey.name, permissions: apiKey.permissions });
|
||||
|
||||
expect(mocks.apiKey.create).toHaveBeenCalledWith({
|
||||
key: 'cmFuZG9tLWJ5dGVz (hashed)',
|
||||
name: 'Test Key',
|
||||
permissions: [Permission.ALL],
|
||||
userId: authStub.admin.user.id,
|
||||
key: 'super-secret (hashed)',
|
||||
name: apiKey.name,
|
||||
permissions: apiKey.permissions,
|
||||
userId: apiKey.userId,
|
||||
});
|
||||
expect(mocks.crypto.newPassword).toHaveBeenCalled();
|
||||
expect(mocks.crypto.hashSha256).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not require a name', async () => {
|
||||
mocks.apiKey.create.mockResolvedValue(keyStub.admin);
|
||||
const auth = factory.auth();
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id });
|
||||
const key = 'super-secret';
|
||||
|
||||
await sut.create(authStub.admin, { permissions: [Permission.ALL] });
|
||||
mocks.crypto.newPassword.mockReturnValue(key);
|
||||
mocks.apiKey.create.mockResolvedValue(apiKey);
|
||||
|
||||
await sut.create(auth, { permissions: [Permission.ALL] });
|
||||
|
||||
expect(mocks.apiKey.create).toHaveBeenCalledWith({
|
||||
key: 'cmFuZG9tLWJ5dGVz (hashed)',
|
||||
key: 'super-secret (hashed)',
|
||||
name: 'API Key',
|
||||
permissions: [Permission.ALL],
|
||||
userId: authStub.admin.user.id,
|
||||
userId: auth.user.id,
|
||||
});
|
||||
expect(mocks.crypto.newPassword).toHaveBeenCalled();
|
||||
expect(mocks.crypto.hashSha256).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw an error if the api key does not have sufficient permissions', async () => {
|
||||
await expect(
|
||||
sut.create({ ...authStub.admin, apiKey: keyStub.authKey }, { permissions: [Permission.ASSET_READ] }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
const auth = factory.auth({ apiKey: factory.authApiKey({ permissions: [Permission.ASSET_READ] }) });
|
||||
|
||||
await expect(sut.create(auth, { permissions: [Permission.ASSET_UPDATE] })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should throw an error if the key is not found', async () => {
|
||||
await expect(sut.update(authStub.admin, 'random-guid', { name: 'New Name' })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
const id = newUuid();
|
||||
const auth = factory.auth();
|
||||
|
||||
expect(mocks.apiKey.update).not.toHaveBeenCalledWith('random-guid');
|
||||
mocks.apiKey.getById.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.update(auth, id, { name: 'New Name' })).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(mocks.apiKey.update).not.toHaveBeenCalledWith(id);
|
||||
});
|
||||
|
||||
it('should update a key', async () => {
|
||||
mocks.apiKey.getById.mockResolvedValue(keyStub.admin);
|
||||
mocks.apiKey.update.mockResolvedValue(keyStub.admin);
|
||||
const auth = factory.auth();
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id });
|
||||
const newName = 'New name';
|
||||
|
||||
await sut.update(authStub.admin, 'random-guid', { name: 'New Name' });
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
mocks.apiKey.update.mockResolvedValue(apiKey);
|
||||
|
||||
expect(mocks.apiKey.update).toHaveBeenCalledWith(authStub.admin.user.id, 'random-guid', { name: 'New Name' });
|
||||
await sut.update(auth, apiKey.id, { name: newName });
|
||||
|
||||
expect(mocks.apiKey.update).toHaveBeenCalledWith(auth.user.id, apiKey.id, { name: newName });
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should throw an error if the key is not found', async () => {
|
||||
await expect(sut.delete(authStub.admin, 'random-guid')).rejects.toBeInstanceOf(BadRequestException);
|
||||
const auth = factory.auth();
|
||||
const id = newUuid();
|
||||
|
||||
expect(mocks.apiKey.delete).not.toHaveBeenCalledWith('random-guid');
|
||||
mocks.apiKey.getById.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.delete(auth, id)).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(mocks.apiKey.delete).not.toHaveBeenCalledWith(id);
|
||||
});
|
||||
|
||||
it('should delete a key', async () => {
|
||||
mocks.apiKey.getById.mockResolvedValue(keyStub.admin);
|
||||
const auth = factory.auth();
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id });
|
||||
|
||||
await sut.delete(authStub.admin, 'random-guid');
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
mocks.apiKey.delete.mockResolvedValue();
|
||||
|
||||
expect(mocks.apiKey.delete).toHaveBeenCalledWith(authStub.admin.user.id, 'random-guid');
|
||||
await sut.delete(auth, apiKey.id);
|
||||
|
||||
expect(mocks.apiKey.delete).toHaveBeenCalledWith(auth.user.id, apiKey.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getById', () => {
|
||||
it('should throw an error if the key is not found', async () => {
|
||||
await expect(sut.getById(authStub.admin, 'random-guid')).rejects.toBeInstanceOf(BadRequestException);
|
||||
const auth = factory.auth();
|
||||
const id = newUuid();
|
||||
|
||||
expect(mocks.apiKey.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'random-guid');
|
||||
mocks.apiKey.getById.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.getById(auth, id)).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(mocks.apiKey.getById).toHaveBeenCalledWith(auth.user.id, id);
|
||||
});
|
||||
|
||||
it('should get a key by id', async () => {
|
||||
mocks.apiKey.getById.mockResolvedValue(keyStub.admin);
|
||||
const auth = factory.auth();
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id });
|
||||
|
||||
await sut.getById(authStub.admin, 'random-guid');
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
|
||||
expect(mocks.apiKey.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'random-guid');
|
||||
await sut.getById(auth, apiKey.id);
|
||||
|
||||
expect(mocks.apiKey.getById).toHaveBeenCalledWith(auth.user.id, apiKey.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it('should return all the keys for a user', async () => {
|
||||
mocks.apiKey.getByUserId.mockResolvedValue([keyStub.admin]);
|
||||
const auth = factory.auth();
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id });
|
||||
|
||||
await expect(sut.getAll(authStub.admin)).resolves.toHaveLength(1);
|
||||
mocks.apiKey.getByUserId.mockResolvedValue([apiKey]);
|
||||
|
||||
expect(mocks.apiKey.getByUserId).toHaveBeenCalledWith(authStub.admin.user.id);
|
||||
await expect(sut.getAll(auth)).resolves.toHaveLength(1);
|
||||
|
||||
expect(mocks.apiKey.getByUserId).toHaveBeenCalledWith(auth.user.id);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -7,7 +7,7 @@ import { ApiKeyItem } from 'src/types';
|
||||
import { isGranted } from 'src/utils/access';
|
||||
|
||||
@Injectable()
|
||||
export class APIKeyService extends BaseService {
|
||||
export class ApiKeyService extends BaseService {
|
||||
async create(auth: AuthDto, dto: APIKeyCreateDto): Promise<APIKeyCreateResponseDto> {
|
||||
const secret = this.cryptoRepository.newPassword(32);
|
||||
|
||||
|
@ -127,8 +127,11 @@ describe(AssetService.name, () => {
|
||||
|
||||
describe('getRandom', () => {
|
||||
it('should get own random assets', async () => {
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
mocks.asset.getRandom.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await sut.getRandom(authStub.admin, 1);
|
||||
|
||||
expect(mocks.asset.getRandom).toHaveBeenCalledWith([authStub.admin.user.id], 1);
|
||||
});
|
||||
|
||||
@ -531,6 +534,7 @@ describe(AssetService.name, () => {
|
||||
});
|
||||
|
||||
it('should update stack primary asset if deleted asset was primary asset in a stack', async () => {
|
||||
mocks.stack.update.mockResolvedValue(factory.stack() as unknown as any);
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.primaryImage as AssetEntity);
|
||||
|
||||
await sut.handleAssetDeletion({ id: assetStub.primaryImage.id, deleteOnDisk: true });
|
||||
@ -542,6 +546,7 @@ describe(AssetService.name, () => {
|
||||
});
|
||||
|
||||
it('should delete the entire stack if deleted asset was the primary asset and the stack would only contain one asset afterwards', async () => {
|
||||
mocks.stack.delete.mockResolvedValue();
|
||||
mocks.asset.getById.mockResolvedValue({
|
||||
...assetStub.primaryImage,
|
||||
stack: { ...assetStub.primaryImage.stack, assets: assetStub.primaryImage.stack!.assets.slice(0, 2) },
|
||||
|
@ -18,7 +18,10 @@ describe(AuditService.name, () => {
|
||||
|
||||
describe('handleCleanup', () => {
|
||||
it('should delete old audit entries', async () => {
|
||||
mocks.audit.removeBefore.mockResolvedValue();
|
||||
|
||||
await expect(sut.handleCleanup()).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.audit.removeBefore).toHaveBeenCalledWith(expect.any(Date));
|
||||
});
|
||||
});
|
||||
|
@ -4,12 +4,11 @@ import { UserMetadataEntity } from 'src/entities/user-metadata.entity';
|
||||
import { UserEntity } from 'src/entities/user.entity';
|
||||
import { AuthType, Permission } from 'src/enum';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { keyStub } from 'test/fixtures/api-key.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { sessionStub } from 'test/fixtures/session.stub';
|
||||
import { sharedLinkStub } from 'test/fixtures/shared-link.stub';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const oauthResponse = {
|
||||
@ -66,7 +65,10 @@ describe('AuthService', () => {
|
||||
|
||||
describe('onBootstrap', () => {
|
||||
it('should init the repo', () => {
|
||||
mocks.oauth.init.mockResolvedValue();
|
||||
|
||||
sut.onBootstrap();
|
||||
|
||||
expect(mocks.oauth.init).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@ -74,24 +76,30 @@ describe('AuthService', () => {
|
||||
describe('login', () => {
|
||||
it('should throw an error if password login is disabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.disabled);
|
||||
|
||||
await expect(sut.login(fixtures.login, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should check the user exists', async () => {
|
||||
mocks.user.getByEmail.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.login(fixtures.login, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
|
||||
expect(mocks.user.getByEmail).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should check the user has a password', async () => {
|
||||
mocks.user.getByEmail.mockResolvedValue({} as UserEntity);
|
||||
|
||||
await expect(sut.login(fixtures.login, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
|
||||
expect(mocks.user.getByEmail).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should successfully log the user in', async () => {
|
||||
mocks.user.getByEmail.mockResolvedValue(userStub.user1);
|
||||
mocks.session.create.mockResolvedValue(sessionStub.valid);
|
||||
|
||||
await expect(sut.login(fixtures.login, loginDetails)).resolves.toEqual({
|
||||
accessToken: 'cmFuZG9tLWJ5dGVz',
|
||||
userId: 'user-id',
|
||||
@ -101,6 +109,7 @@ describe('AuthService', () => {
|
||||
isAdmin: false,
|
||||
shouldChangePassword: false,
|
||||
});
|
||||
|
||||
expect(mocks.user.getByEmail).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
@ -160,8 +169,10 @@ describe('AuthService', () => {
|
||||
|
||||
describe('logout', () => {
|
||||
it('should return the end session endpoint', async () => {
|
||||
const auth = factory.auth();
|
||||
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
|
||||
const auth = { user: { id: '123' } } as AuthDto;
|
||||
|
||||
await expect(sut.logout(auth, AuthType.OAUTH)).resolves.toEqual({
|
||||
successful: true,
|
||||
redirectUri: 'http://end-session-endpoint',
|
||||
@ -169,7 +180,7 @@ describe('AuthService', () => {
|
||||
});
|
||||
|
||||
it('should return the default redirect', async () => {
|
||||
const auth = { user: { id: '123' } } as AuthDto;
|
||||
const auth = factory.auth();
|
||||
|
||||
await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({
|
||||
successful: true,
|
||||
@ -179,6 +190,7 @@ describe('AuthService', () => {
|
||||
|
||||
it('should delete the access token', async () => {
|
||||
const auth = { user: { id: '123' }, session: { id: 'token123' } } as AuthDto;
|
||||
mocks.session.delete.mockResolvedValue();
|
||||
|
||||
await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({
|
||||
successful: true,
|
||||
@ -204,7 +216,9 @@ describe('AuthService', () => {
|
||||
|
||||
it('should only allow one admin', async () => {
|
||||
mocks.user.getAdmin.mockResolvedValue({} as UserEntity);
|
||||
|
||||
await expect(sut.adminSignUp(dto)).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(mocks.user.getAdmin).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@ -216,6 +230,7 @@ describe('AuthService', () => {
|
||||
createdAt: new Date('2021-01-01'),
|
||||
metadata: [] as UserMetadataEntity[],
|
||||
} as UserEntity);
|
||||
|
||||
await expect(sut.adminSignUp(dto)).resolves.toMatchObject({
|
||||
avatarColor: expect.any(String),
|
||||
id: 'admin',
|
||||
@ -223,6 +238,7 @@ describe('AuthService', () => {
|
||||
email: 'test@immich.com',
|
||||
name: 'immich admin',
|
||||
});
|
||||
|
||||
expect(mocks.user.getAdmin).toHaveBeenCalled();
|
||||
expect(mocks.user.create).toHaveBeenCalled();
|
||||
});
|
||||
@ -242,6 +258,7 @@ describe('AuthService', () => {
|
||||
it('should validate using authorization header', async () => {
|
||||
mocks.user.get.mockResolvedValue(userStub.user1);
|
||||
mocks.session.getByToken.mockResolvedValue(sessionStub.valid as any);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { authorization: 'Bearer auth_token' },
|
||||
@ -257,6 +274,8 @@ describe('AuthService', () => {
|
||||
|
||||
describe('validate - shared key', () => {
|
||||
it('should not accept a non-existent key', async () => {
|
||||
mocks.sharedLink.getByKey.mockResolvedValue(void 0);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-immich-share-key': 'key' },
|
||||
@ -268,6 +287,7 @@ describe('AuthService', () => {
|
||||
|
||||
it('should not accept an expired key', async () => {
|
||||
mocks.sharedLink.getByKey.mockResolvedValue(sharedLinkStub.expired);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-immich-share-key': 'key' },
|
||||
@ -279,6 +299,7 @@ describe('AuthService', () => {
|
||||
|
||||
it('should not accept a key on a non-shared route', async () => {
|
||||
mocks.sharedLink.getByKey.mockResolvedValue(sharedLinkStub.valid);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-immich-share-key': 'key' },
|
||||
@ -291,6 +312,7 @@ describe('AuthService', () => {
|
||||
it('should not accept a key without a user', async () => {
|
||||
mocks.sharedLink.getByKey.mockResolvedValue(sharedLinkStub.expired);
|
||||
mocks.user.get.mockResolvedValue(void 0);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-immich-share-key': 'key' },
|
||||
@ -303,6 +325,7 @@ describe('AuthService', () => {
|
||||
it('should accept a base64url key', async () => {
|
||||
mocks.sharedLink.getByKey.mockResolvedValue(sharedLinkStub.valid);
|
||||
mocks.user.get.mockResolvedValue(userStub.admin);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-immich-share-key': sharedLinkStub.valid.key.toString('base64url') },
|
||||
@ -319,6 +342,7 @@ describe('AuthService', () => {
|
||||
it('should accept a hex key', async () => {
|
||||
mocks.sharedLink.getByKey.mockResolvedValue(sharedLinkStub.valid);
|
||||
mocks.user.get.mockResolvedValue(userStub.admin);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-immich-share-key': sharedLinkStub.valid.key.toString('hex') },
|
||||
@ -336,6 +360,7 @@ describe('AuthService', () => {
|
||||
describe('validate - user token', () => {
|
||||
it('should throw if no token is found', async () => {
|
||||
mocks.session.getByToken.mockResolvedValue(void 0);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-immich-user-token': 'auth_token' },
|
||||
@ -347,6 +372,7 @@ describe('AuthService', () => {
|
||||
|
||||
it('should return an auth dto', async () => {
|
||||
mocks.session.getByToken.mockResolvedValue(sessionStub.valid as any);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { cookie: 'immich_access_token=auth_token' },
|
||||
@ -361,6 +387,7 @@ describe('AuthService', () => {
|
||||
|
||||
it('should throw if admin route and not an admin', async () => {
|
||||
mocks.session.getByToken.mockResolvedValue(sessionStub.valid as any);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { cookie: 'immich_access_token=auth_token' },
|
||||
@ -373,6 +400,7 @@ describe('AuthService', () => {
|
||||
it('should update when access time exceeds an hour', async () => {
|
||||
mocks.session.getByToken.mockResolvedValue(sessionStub.inactive as any);
|
||||
mocks.session.update.mockResolvedValue(sessionStub.valid);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { cookie: 'immich_access_token=auth_token' },
|
||||
@ -387,6 +415,7 @@ describe('AuthService', () => {
|
||||
describe('validate - api key', () => {
|
||||
it('should throw an error if no api key is found', async () => {
|
||||
mocks.apiKey.getKey.mockResolvedValue(void 0);
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-api-key': 'auth_token' },
|
||||
@ -398,7 +427,11 @@ describe('AuthService', () => {
|
||||
});
|
||||
|
||||
it('should throw an error if api key has insufficient permissions', async () => {
|
||||
mocks.apiKey.getKey.mockResolvedValue(keyStub.authKey);
|
||||
const authUser = factory.authUser();
|
||||
const authApiKey = factory.authApiKey({ permissions: [] });
|
||||
|
||||
mocks.apiKey.getKey.mockResolvedValue({ ...authApiKey, user: authUser });
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-api-key': 'auth_token' },
|
||||
@ -409,14 +442,18 @@ describe('AuthService', () => {
|
||||
});
|
||||
|
||||
it('should return an auth dto', async () => {
|
||||
mocks.apiKey.getKey.mockResolvedValue(keyStub.authKey);
|
||||
const authUser = factory.authUser();
|
||||
const authApiKey = factory.authApiKey({ permissions: [] });
|
||||
|
||||
mocks.apiKey.getKey.mockResolvedValue({ ...authApiKey, user: authUser });
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
headers: { 'x-api-key': 'auth_token' },
|
||||
queryParams: {},
|
||||
metadata: { adminRoute: false, sharedLinkRoute: false, uri: 'test' },
|
||||
}),
|
||||
).resolves.toEqual({ user: userStub.admin, apiKey: keyStub.authKey });
|
||||
).resolves.toEqual({ user: authUser, apiKey: expect.objectContaining(authApiKey) });
|
||||
expect(mocks.apiKey.getKey).toHaveBeenCalledWith('auth_token (hashed)');
|
||||
});
|
||||
});
|
||||
@ -436,6 +473,7 @@ describe('AuthService', () => {
|
||||
describe('authorize', () => {
|
||||
it('should fail if oauth is disabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ oauth: { enabled: false } });
|
||||
|
||||
await expect(sut.authorize({ redirectUri: 'https://demo.immich.app' })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
@ -443,6 +481,7 @@ describe('AuthService', () => {
|
||||
|
||||
it('should authorize the user', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithMobileOverride);
|
||||
|
||||
await sut.authorize({ redirectUri: 'https://demo.immich.app' });
|
||||
});
|
||||
});
|
||||
@ -455,9 +494,11 @@ describe('AuthService', () => {
|
||||
it('should not allow auto registering', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthEnabled);
|
||||
mocks.user.getByEmail.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
|
||||
expect(mocks.user.getByEmail).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@ -534,6 +575,7 @@ describe('AuthService', () => {
|
||||
mocks.session.create.mockResolvedValue(sessionStub.valid);
|
||||
|
||||
await sut.callback({ url }, loginDetails);
|
||||
|
||||
expect(mocks.oauth.getProfile).toHaveBeenCalledWith(expect.objectContaining({}), url, 'http://mobile-redirect');
|
||||
});
|
||||
}
|
||||
@ -543,6 +585,7 @@ describe('AuthService', () => {
|
||||
mocks.user.getByEmail.mockResolvedValue(void 0);
|
||||
mocks.user.getAdmin.mockResolvedValue(userStub.user1);
|
||||
mocks.user.create.mockResolvedValue(userStub.user1);
|
||||
mocks.session.create.mockResolvedValue(factory.session());
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
oauthResponse,
|
||||
@ -557,6 +600,7 @@ describe('AuthService', () => {
|
||||
mocks.user.getAdmin.mockResolvedValue(userStub.user1);
|
||||
mocks.user.create.mockResolvedValue(userStub.user1);
|
||||
mocks.oauth.getProfile.mockResolvedValue({ sub, email, immich_quota: 'abc' });
|
||||
mocks.session.create.mockResolvedValue(factory.session());
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
oauthResponse,
|
||||
@ -571,6 +615,7 @@ describe('AuthService', () => {
|
||||
mocks.user.getAdmin.mockResolvedValue(userStub.user1);
|
||||
mocks.user.create.mockResolvedValue(userStub.user1);
|
||||
mocks.oauth.getProfile.mockResolvedValue({ sub, email, immich_quota: -5 });
|
||||
mocks.session.create.mockResolvedValue(factory.session());
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
oauthResponse,
|
||||
@ -585,6 +630,7 @@ describe('AuthService', () => {
|
||||
mocks.user.getAdmin.mockResolvedValue(userStub.user1);
|
||||
mocks.user.create.mockResolvedValue(userStub.user1);
|
||||
mocks.oauth.getProfile.mockResolvedValue({ sub, email, immich_quota: 0 });
|
||||
mocks.session.create.mockResolvedValue(factory.session());
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
oauthResponse,
|
||||
@ -605,6 +651,7 @@ describe('AuthService', () => {
|
||||
mocks.user.getAdmin.mockResolvedValue(userStub.user1);
|
||||
mocks.user.create.mockResolvedValue(userStub.user1);
|
||||
mocks.oauth.getProfile.mockResolvedValue({ sub, email, immich_quota: 5 });
|
||||
mocks.session.create.mockResolvedValue(factory.session());
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
oauthResponse,
|
||||
@ -622,19 +669,27 @@ describe('AuthService', () => {
|
||||
|
||||
describe('link', () => {
|
||||
it('should link an account', async () => {
|
||||
const authUser = factory.authUser();
|
||||
const authApiKey = factory.authApiKey({ permissions: [] });
|
||||
const auth = { user: authUser, apiKey: authApiKey };
|
||||
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
|
||||
mocks.user.update.mockResolvedValue(userStub.user1);
|
||||
|
||||
await sut.link(authStub.user1, { url: 'http://immich/user-settings?code=abc123' });
|
||||
await sut.link(auth, { url: 'http://immich/user-settings?code=abc123' });
|
||||
|
||||
expect(mocks.user.update).toHaveBeenCalledWith(authStub.user1.user.id, { oauthId: sub });
|
||||
expect(mocks.user.update).toHaveBeenCalledWith(auth.user.id, { oauthId: sub });
|
||||
});
|
||||
|
||||
it('should not link an already linked oauth.sub', async () => {
|
||||
const authUser = factory.authUser();
|
||||
const authApiKey = factory.authApiKey({ permissions: [] });
|
||||
const auth = { user: authUser, apiKey: authApiKey };
|
||||
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
|
||||
mocks.user.getByOAuthId.mockResolvedValue({ id: 'other-user' } as UserEntity);
|
||||
|
||||
await expect(sut.link(authStub.user1, { url: 'http://immich/user-settings?code=abc123' })).rejects.toBeInstanceOf(
|
||||
await expect(sut.link(auth, { url: 'http://immich/user-settings?code=abc123' })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
|
||||
@ -644,12 +699,16 @@ describe('AuthService', () => {
|
||||
|
||||
describe('unlink', () => {
|
||||
it('should unlink an account', async () => {
|
||||
const authUser = factory.authUser();
|
||||
const authApiKey = factory.authApiKey({ permissions: [] });
|
||||
const auth = { user: authUser, apiKey: authApiKey };
|
||||
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
|
||||
mocks.user.update.mockResolvedValue(userStub.user1);
|
||||
|
||||
await sut.unlink(authStub.user1);
|
||||
await sut.unlink(auth);
|
||||
|
||||
expect(mocks.user.update).toHaveBeenCalledWith(authStub.user1.user.id, { oauthId: '' });
|
||||
expect(mocks.user.update).toHaveBeenCalledWith(auth.user.id, { oauthId: '' });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -22,6 +22,7 @@ describe(BackupService.name, () => {
|
||||
describe('onBootstrapEvent', () => {
|
||||
it('should init cron job and handle config changes', async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(true);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
@ -47,10 +48,14 @@ describe(BackupService.name, () => {
|
||||
describe('onConfigUpdateEvent', () => {
|
||||
beforeEach(async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(true);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
});
|
||||
|
||||
it('should update cron job if backup is enabled', () => {
|
||||
mocks.cron.update.mockResolvedValue();
|
||||
|
||||
sut.onConfigUpdate({
|
||||
oldConfig: defaults,
|
||||
newConfig: {
|
||||
|
@ -31,6 +31,8 @@ describe(CliService.name, () => {
|
||||
|
||||
it('should default to a random password', async () => {
|
||||
mocks.user.getAdmin.mockResolvedValue(userStub.admin);
|
||||
mocks.user.update.mockResolvedValue(userStub.admin);
|
||||
|
||||
const ask = vitest.fn().mockImplementation(() => {});
|
||||
|
||||
const response = await sut.resetAdminPassword(ask);
|
||||
@ -45,6 +47,8 @@ describe(CliService.name, () => {
|
||||
|
||||
it('should use the supplied password', async () => {
|
||||
mocks.user.getAdmin.mockResolvedValue(userStub.admin);
|
||||
mocks.user.update.mockResolvedValue(userStub.admin);
|
||||
|
||||
const ask = vitest.fn().mockResolvedValue('new-password');
|
||||
|
||||
const response = await sut.resetAdminPassword(ask);
|
||||
|
@ -173,6 +173,7 @@ describe(DownloadService.name, () => {
|
||||
it('should return a list of archives (assetIds)', async () => {
|
||||
const assetIds = ['asset-1', 'asset-2'];
|
||||
|
||||
mocks.user.getMetadata.mockResolvedValue([]);
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(assetIds));
|
||||
mocks.downloadRepository.downloadAssetIds.mockReturnValue(
|
||||
makeStream([
|
||||
@ -187,6 +188,7 @@ describe(DownloadService.name, () => {
|
||||
});
|
||||
|
||||
it('should return a list of archives (albumId)', async () => {
|
||||
mocks.user.getMetadata.mockResolvedValue([]);
|
||||
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set(['album-1']));
|
||||
mocks.downloadRepository.downloadAlbumId.mockReturnValue(
|
||||
makeStream([
|
||||
@ -202,6 +204,7 @@ describe(DownloadService.name, () => {
|
||||
});
|
||||
|
||||
it('should return a list of archives (userId)', async () => {
|
||||
mocks.user.getMetadata.mockResolvedValue([]);
|
||||
mocks.downloadRepository.downloadUserId.mockReturnValue(
|
||||
makeStream([
|
||||
{ id: 'asset-1', livePhotoVideoId: null, size: 100_000 },
|
||||
@ -217,6 +220,7 @@ describe(DownloadService.name, () => {
|
||||
});
|
||||
|
||||
it('should split archives by size', async () => {
|
||||
mocks.user.getMetadata.mockResolvedValue([]);
|
||||
mocks.downloadRepository.downloadUserId.mockReturnValue(
|
||||
makeStream([
|
||||
{ id: 'asset-1', livePhotoVideoId: null, size: 5000 },
|
||||
@ -244,13 +248,13 @@ describe(DownloadService.name, () => {
|
||||
const assetIds = ['asset-1', 'asset-2'];
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(assetIds));
|
||||
mocks.user.getMetadata.mockResolvedValue([]);
|
||||
mocks.downloadRepository.downloadAssetIds.mockReturnValue(
|
||||
makeStream([
|
||||
{ id: 'asset-1', livePhotoVideoId: 'asset-3', size: 5000 },
|
||||
{ id: 'asset-2', livePhotoVideoId: 'asset-4', size: 100_000 },
|
||||
]),
|
||||
);
|
||||
|
||||
mocks.downloadRepository.downloadMotionAssetIds.mockReturnValue(
|
||||
makeStream([
|
||||
{ id: 'asset-3', livePhotoVideoId: null, size: 23_456, originalPath: '/path/to/file.mp4' },
|
||||
@ -271,11 +275,10 @@ describe(DownloadService.name, () => {
|
||||
const assetIds = ['asset-1'];
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(assetIds));
|
||||
|
||||
mocks.user.getMetadata.mockResolvedValue([]);
|
||||
mocks.downloadRepository.downloadAssetIds.mockReturnValue(
|
||||
makeStream([{ id: 'asset-1', livePhotoVideoId: 'asset-3', size: 5000 }]),
|
||||
);
|
||||
|
||||
mocks.downloadRepository.downloadMotionAssetIds.mockReturnValue(
|
||||
makeStream([
|
||||
{ id: 'asset-2', livePhotoVideoId: null, size: 23_456, originalPath: 'upload/encoded-video/uuid-MP.mp4' },
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { ActivityService } from 'src/services/activity.service';
|
||||
import { AlbumService } from 'src/services/album.service';
|
||||
import { APIKeyService } from 'src/services/api-key.service';
|
||||
import { ApiKeyService } from 'src/services/api-key.service';
|
||||
import { ApiService } from 'src/services/api.service';
|
||||
import { AssetMediaService } from 'src/services/asset-media.service';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
@ -40,7 +40,7 @@ import { VersionService } from 'src/services/version.service';
|
||||
import { ViewService } from 'src/services/view.service';
|
||||
|
||||
export const services = [
|
||||
APIKeyService,
|
||||
ApiKeyService,
|
||||
ActivityService,
|
||||
AlbumService,
|
||||
ApiService,
|
||||
|
@ -36,6 +36,9 @@ describe(LibraryService.name, () => {
|
||||
|
||||
describe('onConfigInit', () => {
|
||||
it('should init cron job and handle config changes', async () => {
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
mocks.cron.update.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
|
||||
expect(mocks.cron.create).toHaveBeenCalled();
|
||||
@ -65,6 +68,7 @@ describe(LibraryService.name, () => {
|
||||
mocks.library.get.mockImplementation((id) =>
|
||||
Promise.resolve([library1, library2].find((library) => library.id === id)),
|
||||
);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchEnabled as SystemConfig });
|
||||
|
||||
@ -74,6 +78,8 @@ describe(LibraryService.name, () => {
|
||||
});
|
||||
|
||||
it('should not initialize watcher when watching is disabled', async () => {
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchDisabled as SystemConfig });
|
||||
|
||||
expect(mocks.storage.watch).not.toHaveBeenCalled();
|
||||
@ -99,6 +105,8 @@ describe(LibraryService.name, () => {
|
||||
describe('onConfigUpdateEvent', () => {
|
||||
beforeEach(async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(true);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
});
|
||||
|
||||
@ -111,6 +119,9 @@ describe(LibraryService.name, () => {
|
||||
|
||||
it('should update cron job and enable watching', async () => {
|
||||
mocks.library.getAll.mockResolvedValue([]);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
mocks.cron.update.mockResolvedValue();
|
||||
|
||||
await sut.onConfigUpdate({
|
||||
newConfig: systemConfigStub.libraryScanAndWatch as SystemConfig,
|
||||
oldConfig: defaults,
|
||||
@ -125,6 +136,9 @@ describe(LibraryService.name, () => {
|
||||
|
||||
it('should update cron job and disable watching', async () => {
|
||||
mocks.library.getAll.mockResolvedValue([]);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
mocks.cron.update.mockResolvedValue();
|
||||
|
||||
await sut.onConfigUpdate({
|
||||
newConfig: systemConfigStub.libraryScanAndWatch as SystemConfig,
|
||||
oldConfig: defaults,
|
||||
@ -640,6 +654,7 @@ describe(LibraryService.name, () => {
|
||||
|
||||
const mockClose = vitest.fn();
|
||||
mocks.storage.watch.mockImplementation(makeMockWatcher({ close: mockClose }));
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchEnabled as SystemConfig });
|
||||
await sut.delete(library.id);
|
||||
@ -785,6 +800,7 @@ describe(LibraryService.name, () => {
|
||||
mocks.library.create.mockResolvedValue(library);
|
||||
mocks.library.get.mockResolvedValue(library);
|
||||
mocks.library.getAll.mockResolvedValue([]);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchEnabled as SystemConfig });
|
||||
await sut.create({ ownerId: authStub.admin.user.id, importPaths: library.importPaths });
|
||||
@ -852,6 +868,7 @@ describe(LibraryService.name, () => {
|
||||
describe('update', () => {
|
||||
beforeEach(async () => {
|
||||
mocks.library.getAll.mockResolvedValue([]);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchEnabled as SystemConfig });
|
||||
});
|
||||
@ -898,6 +915,8 @@ describe(LibraryService.name, () => {
|
||||
|
||||
describe('watching disabled', () => {
|
||||
beforeEach(async () => {
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchDisabled as SystemConfig });
|
||||
});
|
||||
|
||||
@ -915,6 +934,8 @@ describe(LibraryService.name, () => {
|
||||
describe('watching enabled', () => {
|
||||
beforeEach(async () => {
|
||||
mocks.library.getAll.mockResolvedValue([]);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchEnabled as SystemConfig });
|
||||
});
|
||||
|
||||
@ -1087,6 +1108,7 @@ describe(LibraryService.name, () => {
|
||||
|
||||
const mockClose = vitest.fn();
|
||||
mocks.storage.watch.mockImplementation(makeMockWatcher({ close: mockClose }));
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchEnabled as SystemConfig });
|
||||
await sut.onShutdown();
|
||||
|
@ -33,6 +33,8 @@ describe(MemoryService.name, () => {
|
||||
});
|
||||
|
||||
it('should map ', async () => {
|
||||
mocks.memory.search.mockResolvedValue([]);
|
||||
|
||||
await expect(sut.search(factory.auth(), {})).resolves.toEqual([]);
|
||||
});
|
||||
});
|
||||
@ -46,6 +48,7 @@ describe(MemoryService.name, () => {
|
||||
const [memoryId] = newUuids();
|
||||
|
||||
mocks.access.memory.checkOwnerAccess.mockResolvedValue(new Set([memoryId]));
|
||||
mocks.memory.get.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.get(factory.auth(), memoryId)).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
@ -159,6 +162,7 @@ describe(MemoryService.name, () => {
|
||||
const memoryId = newUuid();
|
||||
|
||||
mocks.access.memory.checkOwnerAccess.mockResolvedValue(new Set([memoryId]));
|
||||
mocks.memory.delete.mockResolvedValue();
|
||||
|
||||
await expect(sut.remove(factory.auth(), memoryId)).resolves.toBeUndefined();
|
||||
|
||||
@ -183,6 +187,7 @@ describe(MemoryService.name, () => {
|
||||
|
||||
mocks.access.memory.checkOwnerAccess.mockResolvedValue(new Set([memory.id]));
|
||||
mocks.memory.get.mockResolvedValue(memory);
|
||||
mocks.memory.getAssetIds.mockResolvedValue(new Set());
|
||||
|
||||
await expect(sut.addAssets(factory.auth(), memory.id, { ids: [assetId] })).resolves.toEqual([
|
||||
{ error: 'no_permission', id: assetId, success: false },
|
||||
@ -213,6 +218,9 @@ describe(MemoryService.name, () => {
|
||||
mocks.access.memory.checkOwnerAccess.mockResolvedValue(new Set([memory.id]));
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetId]));
|
||||
mocks.memory.get.mockResolvedValue(memory);
|
||||
mocks.memory.update.mockResolvedValue(memory);
|
||||
mocks.memory.getAssetIds.mockResolvedValue(new Set());
|
||||
mocks.memory.addAssetIds.mockResolvedValue();
|
||||
|
||||
await expect(sut.addAssets(factory.auth(), memory.id, { ids: [assetId] })).resolves.toEqual([
|
||||
{ id: assetId, success: true },
|
||||
@ -233,6 +241,7 @@ describe(MemoryService.name, () => {
|
||||
|
||||
it('should skip assets not in the memory', async () => {
|
||||
mocks.access.memory.checkOwnerAccess.mockResolvedValue(new Set(['memory1']));
|
||||
mocks.memory.getAssetIds.mockResolvedValue(new Set());
|
||||
|
||||
await expect(sut.removeAssets(factory.auth(), 'memory1', { ids: ['not-found'] })).resolves.toEqual([
|
||||
{ error: 'not_found', id: 'not-found', success: false },
|
||||
@ -242,15 +251,20 @@ describe(MemoryService.name, () => {
|
||||
});
|
||||
|
||||
it('should remove assets', async () => {
|
||||
mocks.access.memory.checkOwnerAccess.mockResolvedValue(new Set(['memory1']));
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset1']));
|
||||
mocks.memory.getAssetIds.mockResolvedValue(new Set(['asset1']));
|
||||
const memory = factory.memory();
|
||||
const asset = factory.asset();
|
||||
|
||||
await expect(sut.removeAssets(factory.auth(), 'memory1', { ids: ['asset1'] })).resolves.toEqual([
|
||||
{ id: 'asset1', success: true },
|
||||
mocks.access.memory.checkOwnerAccess.mockResolvedValue(new Set([memory.id]));
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([asset.id]));
|
||||
mocks.memory.getAssetIds.mockResolvedValue(new Set([asset.id]));
|
||||
mocks.memory.removeAssetIds.mockResolvedValue();
|
||||
mocks.memory.update.mockResolvedValue(memory);
|
||||
|
||||
await expect(sut.removeAssets(factory.auth(), memory.id, { ids: [asset.id] })).resolves.toEqual([
|
||||
{ id: asset.id, success: true },
|
||||
]);
|
||||
|
||||
expect(mocks.memory.removeAssetIds).toHaveBeenCalledWith('memory1', ['asset1']);
|
||||
expect(mocks.memory.removeAssetIds).toHaveBeenCalledWith(memory.id, [asset.id]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -53,6 +53,10 @@ describe(MetadataService.name, () => {
|
||||
|
||||
describe('onBootstrapEvent', () => {
|
||||
it('should pause and resume queue during init', async () => {
|
||||
mocks.job.pause.mockResolvedValue();
|
||||
mocks.map.init.mockResolvedValue();
|
||||
mocks.job.resume.mockResolvedValue();
|
||||
|
||||
await sut.onBootstrap();
|
||||
|
||||
expect(mocks.job.pause).toHaveBeenCalledTimes(1);
|
||||
|
@ -260,6 +260,7 @@ describe(NotificationService.name, () => {
|
||||
mocks.user.get.mockResolvedValue(userStub.admin);
|
||||
mocks.notification.verifySmtp.mockResolvedValue(true);
|
||||
mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.notification.sendEmail.mockResolvedValue({ messageId: 'message-1', response: '' });
|
||||
|
||||
await expect(sut.sendTestEmail('', configs.smtpTransport.notifications.smtp)).resolves.not.toThrow();
|
||||
expect(mocks.notification.renderEmail).toHaveBeenCalledWith({
|
||||
@ -279,6 +280,7 @@ describe(NotificationService.name, () => {
|
||||
mocks.notification.verifySmtp.mockResolvedValue(true);
|
||||
mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({ server: { externalDomain: 'https://demo.immich.app' } });
|
||||
mocks.notification.sendEmail.mockResolvedValue({ messageId: 'message-1', response: '' });
|
||||
|
||||
await expect(sut.sendTestEmail('', configs.smtpTransport.notifications.smtp)).resolves.not.toThrow();
|
||||
expect(mocks.notification.renderEmail).toHaveBeenCalledWith({
|
||||
@ -297,6 +299,7 @@ describe(NotificationService.name, () => {
|
||||
mocks.user.get.mockResolvedValue(userStub.admin);
|
||||
mocks.notification.verifySmtp.mockResolvedValue(true);
|
||||
mocks.notification.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.notification.sendEmail.mockResolvedValue({ messageId: 'message-1', response: '' });
|
||||
|
||||
await expect(
|
||||
sut.sendTestEmail('', { ...configs.smtpTransport.notifications.smtp, replyTo: 'demo@immich.app' }),
|
||||
|
@ -324,6 +324,10 @@ describe(PersonService.name, () => {
|
||||
mocks.person.getFacesByIds.mockResolvedValue([faceStub.face1]);
|
||||
mocks.person.reassignFace.mockResolvedValue(1);
|
||||
mocks.person.getRandomFace.mockResolvedValue(faceStub.primaryFace1);
|
||||
mocks.person.refreshFaces.mockResolvedValue();
|
||||
mocks.person.reassignFace.mockResolvedValue(5);
|
||||
mocks.person.update.mockResolvedValue(personStub.noName);
|
||||
|
||||
await expect(
|
||||
sut.reassignFaces(authStub.admin, personStub.noName.id, {
|
||||
data: [{ personId: personStub.withName.id, assetId: assetStub.image.id }],
|
||||
@ -515,6 +519,7 @@ describe(PersonService.name, () => {
|
||||
hasNextPage: false,
|
||||
});
|
||||
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.randomPerson]);
|
||||
mocks.person.deleteFaces.mockResolvedValue();
|
||||
|
||||
await sut.handleQueueDetectFaces({ force: true });
|
||||
|
||||
@ -633,6 +638,7 @@ describe(PersonService.name, () => {
|
||||
mocks.person.getAll.mockReturnValue(makeStream());
|
||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||
mocks.person.getAllWithoutFaces.mockResolvedValue([]);
|
||||
mocks.person.unassignFaces.mockResolvedValue();
|
||||
|
||||
await sut.handleQueueRecognizeFaces({ force: true, nightly: true });
|
||||
|
||||
@ -679,6 +685,7 @@ describe(PersonService.name, () => {
|
||||
mocks.person.getAll.mockReturnValue(makeStream([faceStub.face1.person, personStub.randomPerson]));
|
||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.randomPerson]);
|
||||
mocks.person.unassignFaces.mockResolvedValue();
|
||||
|
||||
await sut.handleQueueRecognizeFaces({ force: true });
|
||||
|
||||
@ -757,6 +764,7 @@ describe(PersonService.name, () => {
|
||||
mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock);
|
||||
mocks.search.searchFaces.mockResolvedValue([{ ...faceStub.face1, distance: 0.7 }]);
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
|
||||
mocks.person.refreshFaces.mockResolvedValue();
|
||||
|
||||
await sut.handleDetectFaces({ id: assetStub.image.id });
|
||||
|
||||
@ -784,6 +792,7 @@ describe(PersonService.name, () => {
|
||||
it('should add new face and delete an existing face not among the new detected faces', async () => {
|
||||
mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock);
|
||||
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.primaryFace1] }]);
|
||||
mocks.person.refreshFaces.mockResolvedValue();
|
||||
|
||||
await sut.handleDetectFaces({ id: assetStub.image.id });
|
||||
|
||||
@ -799,6 +808,7 @@ describe(PersonService.name, () => {
|
||||
it('should add embedding to matching metadata face', async () => {
|
||||
mocks.machineLearning.detectFaces.mockResolvedValue(detectFaceMock);
|
||||
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.image, faces: [faceStub.fromExif1] }]);
|
||||
mocks.person.refreshFaces.mockResolvedValue();
|
||||
|
||||
await sut.handleDetectFaces({ id: assetStub.image.id });
|
||||
|
||||
@ -1006,6 +1016,7 @@ describe(PersonService.name, () => {
|
||||
mocks.person.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.middle.assetId });
|
||||
mocks.person.getFaceByIdWithAssets.mockResolvedValue(faceStub.middle);
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.primaryImage);
|
||||
mocks.media.generateThumbnail.mockResolvedValue();
|
||||
|
||||
await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id });
|
||||
|
||||
@ -1038,6 +1049,7 @@ describe(PersonService.name, () => {
|
||||
mocks.person.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.start.assetId });
|
||||
mocks.person.getFaceByIdWithAssets.mockResolvedValue(faceStub.start);
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.image);
|
||||
mocks.media.generateThumbnail.mockResolvedValue();
|
||||
|
||||
await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id });
|
||||
|
||||
@ -1063,7 +1075,9 @@ describe(PersonService.name, () => {
|
||||
it('should generate a thumbnail without overflowing', async () => {
|
||||
mocks.person.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.end.assetId });
|
||||
mocks.person.getFaceByIdWithAssets.mockResolvedValue(faceStub.end);
|
||||
mocks.person.update.mockResolvedValue(personStub.primaryPerson);
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.primaryImage);
|
||||
mocks.media.generateThumbnail.mockResolvedValue();
|
||||
|
||||
await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id });
|
||||
|
||||
|
@ -57,6 +57,8 @@ describe(SearchService.name, () => {
|
||||
describe('getSearchSuggestions', () => {
|
||||
it('should return search suggestions for country', async () => {
|
||||
mocks.search.getCountries.mockResolvedValue(['USA']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: false, type: SearchSuggestionType.COUNTRY }),
|
||||
).resolves.toEqual(['USA']);
|
||||
@ -65,6 +67,8 @@ describe(SearchService.name, () => {
|
||||
|
||||
it('should return search suggestions for country (including null)', async () => {
|
||||
mocks.search.getCountries.mockResolvedValue(['USA']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: true, type: SearchSuggestionType.COUNTRY }),
|
||||
).resolves.toEqual(['USA', null]);
|
||||
@ -73,6 +77,8 @@ describe(SearchService.name, () => {
|
||||
|
||||
it('should return search suggestions for state', async () => {
|
||||
mocks.search.getStates.mockResolvedValue(['California']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: false, type: SearchSuggestionType.STATE }),
|
||||
).resolves.toEqual(['California']);
|
||||
@ -81,6 +87,8 @@ describe(SearchService.name, () => {
|
||||
|
||||
it('should return search suggestions for state (including null)', async () => {
|
||||
mocks.search.getStates.mockResolvedValue(['California']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: true, type: SearchSuggestionType.STATE }),
|
||||
).resolves.toEqual(['California', null]);
|
||||
@ -89,6 +97,8 @@ describe(SearchService.name, () => {
|
||||
|
||||
it('should return search suggestions for city', async () => {
|
||||
mocks.search.getCities.mockResolvedValue(['Denver']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: false, type: SearchSuggestionType.CITY }),
|
||||
).resolves.toEqual(['Denver']);
|
||||
@ -97,6 +107,8 @@ describe(SearchService.name, () => {
|
||||
|
||||
it('should return search suggestions for city (including null)', async () => {
|
||||
mocks.search.getCities.mockResolvedValue(['Denver']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: true, type: SearchSuggestionType.CITY }),
|
||||
).resolves.toEqual(['Denver', null]);
|
||||
@ -105,6 +117,8 @@ describe(SearchService.name, () => {
|
||||
|
||||
it('should return search suggestions for camera make', async () => {
|
||||
mocks.search.getCameraMakes.mockResolvedValue(['Nikon']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: false, type: SearchSuggestionType.CAMERA_MAKE }),
|
||||
).resolves.toEqual(['Nikon']);
|
||||
@ -113,6 +127,8 @@ describe(SearchService.name, () => {
|
||||
|
||||
it('should return search suggestions for camera make (including null)', async () => {
|
||||
mocks.search.getCameraMakes.mockResolvedValue(['Nikon']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: true, type: SearchSuggestionType.CAMERA_MAKE }),
|
||||
).resolves.toEqual(['Nikon', null]);
|
||||
@ -121,6 +137,8 @@ describe(SearchService.name, () => {
|
||||
|
||||
it('should return search suggestions for camera model', async () => {
|
||||
mocks.search.getCameraModels.mockResolvedValue(['Fujifilm X100VI']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: false, type: SearchSuggestionType.CAMERA_MODEL }),
|
||||
).resolves.toEqual(['Fujifilm X100VI']);
|
||||
@ -129,6 +147,8 @@ describe(SearchService.name, () => {
|
||||
|
||||
it('should return search suggestions for camera model (including null)', async () => {
|
||||
mocks.search.getCameraModels.mockResolvedValue(['Fujifilm X100VI']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: true, type: SearchSuggestionType.CAMERA_MODEL }),
|
||||
).resolves.toEqual(['Fujifilm X100VI', null]);
|
||||
|
@ -36,6 +36,7 @@ describe('SessionService', () => {
|
||||
updateId: 'uuid-v7',
|
||||
},
|
||||
]);
|
||||
mocks.session.delete.mockResolvedValue();
|
||||
|
||||
await expect(sut.handleCleanup()).resolves.toEqual(JobStatus.SUCCESS);
|
||||
expect(mocks.session.delete).toHaveBeenCalledWith('123');
|
||||
@ -71,6 +72,7 @@ describe('SessionService', () => {
|
||||
describe('logoutDevices', () => {
|
||||
it('should logout all devices', async () => {
|
||||
mocks.session.getByUserId.mockResolvedValue([sessionStub.inactive, sessionStub.valid] as any[]);
|
||||
mocks.session.delete.mockResolvedValue();
|
||||
|
||||
await sut.deleteAll(authStub.user1);
|
||||
|
||||
@ -83,6 +85,7 @@ describe('SessionService', () => {
|
||||
describe('logoutDevice', () => {
|
||||
it('should logout the device', async () => {
|
||||
mocks.access.authDevice.checkOwnerAccess.mockResolvedValue(new Set(['token-1']));
|
||||
mocks.session.delete.mockResolvedValue();
|
||||
|
||||
await sut.delete(authStub.user1, 'token-1');
|
||||
|
||||
|
@ -71,7 +71,10 @@ describe(SharedLinkService.name, () => {
|
||||
|
||||
describe('get', () => {
|
||||
it('should throw an error for an invalid shared link', async () => {
|
||||
mocks.sharedLink.get.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.get(authStub.user1, 'missing-id')).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(mocks.sharedLink.get).toHaveBeenCalledWith(authStub.user1.user.id, 'missing-id');
|
||||
expect(mocks.sharedLink.update).not.toHaveBeenCalled();
|
||||
});
|
||||
@ -194,7 +197,10 @@ describe(SharedLinkService.name, () => {
|
||||
|
||||
describe('update', () => {
|
||||
it('should throw an error for an invalid shared link', async () => {
|
||||
mocks.sharedLink.get.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.update(authStub.user1, 'missing-id', {})).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(mocks.sharedLink.get).toHaveBeenCalledWith(authStub.user1.user.id, 'missing-id');
|
||||
expect(mocks.sharedLink.update).not.toHaveBeenCalled();
|
||||
});
|
||||
@ -214,14 +220,20 @@ describe(SharedLinkService.name, () => {
|
||||
|
||||
describe('remove', () => {
|
||||
it('should throw an error for an invalid shared link', async () => {
|
||||
mocks.sharedLink.get.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.remove(authStub.user1, 'missing-id')).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(mocks.sharedLink.get).toHaveBeenCalledWith(authStub.user1.user.id, 'missing-id');
|
||||
expect(mocks.sharedLink.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove a key', async () => {
|
||||
mocks.sharedLink.get.mockResolvedValue(sharedLinkStub.valid);
|
||||
mocks.sharedLink.remove.mockResolvedValue();
|
||||
|
||||
await sut.remove(authStub.user1, sharedLinkStub.valid.id);
|
||||
|
||||
expect(mocks.sharedLink.get).toHaveBeenCalledWith(authStub.user1.user.id, sharedLinkStub.valid.id);
|
||||
expect(mocks.sharedLink.remove).toHaveBeenCalledWith(sharedLinkStub.valid);
|
||||
});
|
||||
@ -238,6 +250,7 @@ describe(SharedLinkService.name, () => {
|
||||
it('should add assets to a shared link', async () => {
|
||||
mocks.sharedLink.get.mockResolvedValue(_.cloneDeep(sharedLinkStub.individual));
|
||||
mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.individual);
|
||||
mocks.sharedLink.update.mockResolvedValue(sharedLinkStub.individual);
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-3']));
|
||||
|
||||
await expect(
|
||||
@ -268,6 +281,7 @@ describe(SharedLinkService.name, () => {
|
||||
it('should remove assets from a shared link', async () => {
|
||||
mocks.sharedLink.get.mockResolvedValue(_.cloneDeep(sharedLinkStub.individual));
|
||||
mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.individual);
|
||||
mocks.sharedLink.update.mockResolvedValue(sharedLinkStub.individual);
|
||||
|
||||
await expect(
|
||||
sut.removeAssets(authStub.admin, 'link-1', { assetIds: [assetStub.image.id, 'asset-2'] }),
|
||||
|
@ -155,6 +155,7 @@ describe(StackService.name, () => {
|
||||
|
||||
it('should delete stack', async () => {
|
||||
mocks.access.stack.checkOwnerAccess.mockResolvedValue(new Set(['stack-id']));
|
||||
mocks.stack.delete.mockResolvedValue();
|
||||
|
||||
await sut.delete(authStub.admin, 'stack-id');
|
||||
|
||||
@ -176,6 +177,7 @@ describe(StackService.name, () => {
|
||||
|
||||
it('should delete all stacks', async () => {
|
||||
mocks.access.stack.checkOwnerAccess.mockResolvedValue(new Set(['stack-id']));
|
||||
mocks.stack.deleteAll.mockResolvedValue();
|
||||
|
||||
await sut.deleteAll(authStub.admin, { ids: ['stack-id'] });
|
||||
|
||||
|
@ -93,7 +93,9 @@ describe(StorageTemplateService.name, () => {
|
||||
describe('handleMigrationSingle', () => {
|
||||
it('should skip when storage template is disabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ storageTemplate: { enabled: false } });
|
||||
|
||||
await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.SKIPPED);
|
||||
|
||||
expect(mocks.asset.getByIds).not.toHaveBeenCalled();
|
||||
expect(mocks.storage.checkFileExists).not.toHaveBeenCalled();
|
||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||
|
@ -4,7 +4,7 @@ import { DateTime } from 'luxon';
|
||||
import { Writable } from 'node:stream';
|
||||
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
|
||||
import { SessionSyncCheckpoints } from 'src/db';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetResponseDto, hexOrBufferToBase64, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
AssetDeltaSyncDto,
|
||||
@ -22,10 +22,14 @@ import { setIsEqual } from 'src/utils/set';
|
||||
import { fromAck, serialize } from 'src/utils/sync';
|
||||
|
||||
const FULL_SYNC = { needsFullSync: true, deleted: [], upserted: [] };
|
||||
const SYNC_TYPES_ORDER = [
|
||||
export const SYNC_TYPES_ORDER = [
|
||||
//
|
||||
SyncRequestType.UsersV1,
|
||||
SyncRequestType.PartnersV1,
|
||||
SyncRequestType.AssetsV1,
|
||||
SyncRequestType.AssetExifsV1,
|
||||
SyncRequestType.PartnerAssetsV1,
|
||||
SyncRequestType.PartnerAssetExifsV1,
|
||||
];
|
||||
|
||||
const throwSessionRequired = () => {
|
||||
@ -49,17 +53,22 @@ export class SyncService extends BaseService {
|
||||
return throwSessionRequired();
|
||||
}
|
||||
|
||||
const checkpoints: Insertable<SessionSyncCheckpoints>[] = [];
|
||||
const checkpoints: Record<string, Insertable<SessionSyncCheckpoints>> = {};
|
||||
for (const ack of dto.acks) {
|
||||
const { type } = fromAck(ack);
|
||||
// TODO proper ack validation via class validator
|
||||
if (!Object.values(SyncEntityType).includes(type)) {
|
||||
throw new BadRequestException(`Invalid ack type: ${type}`);
|
||||
}
|
||||
checkpoints.push({ sessionId, type, ack });
|
||||
|
||||
if (checkpoints[type]) {
|
||||
throw new BadRequestException('Only one ack per type is allowed');
|
||||
}
|
||||
|
||||
checkpoints[type] = { sessionId, type, ack };
|
||||
}
|
||||
|
||||
await this.syncRepository.upsertCheckpoints(checkpoints);
|
||||
await this.syncRepository.upsertCheckpoints(Object.values(checkpoints));
|
||||
}
|
||||
|
||||
async deleteAcks(auth: AuthDto, dto: SyncAckDeleteDto) {
|
||||
@ -115,6 +124,87 @@ export class SyncService extends BaseService {
|
||||
break;
|
||||
}
|
||||
|
||||
case SyncRequestType.AssetsV1: {
|
||||
const deletes = this.syncRepository.getAssetDeletes(
|
||||
auth.user.id,
|
||||
checkpointMap[SyncEntityType.AssetDeleteV1],
|
||||
);
|
||||
for await (const { id, ...data } of deletes) {
|
||||
response.write(serialize({ type: SyncEntityType.AssetDeleteV1, updateId: id, data }));
|
||||
}
|
||||
|
||||
const upserts = this.syncRepository.getAssetUpserts(auth.user.id, checkpointMap[SyncEntityType.AssetV1]);
|
||||
for await (const { updateId, checksum, thumbhash, ...data } of upserts) {
|
||||
response.write(
|
||||
serialize({
|
||||
type: SyncEntityType.AssetV1,
|
||||
updateId,
|
||||
data: {
|
||||
...data,
|
||||
checksum: hexOrBufferToBase64(checksum),
|
||||
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case SyncRequestType.PartnerAssetsV1: {
|
||||
const deletes = this.syncRepository.getPartnerAssetDeletes(
|
||||
auth.user.id,
|
||||
checkpointMap[SyncEntityType.PartnerAssetDeleteV1],
|
||||
);
|
||||
for await (const { id, ...data } of deletes) {
|
||||
response.write(serialize({ type: SyncEntityType.PartnerAssetDeleteV1, updateId: id, data }));
|
||||
}
|
||||
|
||||
const upserts = this.syncRepository.getPartnerAssetsUpserts(
|
||||
auth.user.id,
|
||||
checkpointMap[SyncEntityType.PartnerAssetV1],
|
||||
);
|
||||
for await (const { updateId, checksum, thumbhash, ...data } of upserts) {
|
||||
response.write(
|
||||
serialize({
|
||||
type: SyncEntityType.PartnerAssetV1,
|
||||
updateId,
|
||||
data: {
|
||||
...data,
|
||||
checksum: hexOrBufferToBase64(checksum),
|
||||
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case SyncRequestType.AssetExifsV1: {
|
||||
const upserts = this.syncRepository.getAssetExifsUpserts(
|
||||
auth.user.id,
|
||||
checkpointMap[SyncEntityType.AssetExifV1],
|
||||
);
|
||||
for await (const { updateId, ...data } of upserts) {
|
||||
response.write(serialize({ type: SyncEntityType.AssetExifV1, updateId, data }));
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case SyncRequestType.PartnerAssetExifsV1: {
|
||||
const upserts = this.syncRepository.getPartnerAssetExifsUpserts(
|
||||
auth.user.id,
|
||||
checkpointMap[SyncEntityType.PartnerAssetExifV1],
|
||||
);
|
||||
for await (const { updateId, ...data } of upserts) {
|
||||
response.write(serialize({ type: SyncEntityType.PartnerAssetExifV1, updateId, data }));
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
this.logger.warn(`Unsupported sync type: ${type}`);
|
||||
break;
|
||||
|
@ -87,9 +87,12 @@ describe(TagService.name, () => {
|
||||
|
||||
it('should create a new tag with optional color', async () => {
|
||||
mocks.tag.create.mockResolvedValue(tagStub.colorCreate);
|
||||
mocks.tag.getByValue.mockResolvedValue(void 0);
|
||||
|
||||
await expect(sut.create(authStub.admin, { name: 'tag-1', color: '#000000' })).resolves.toEqual(
|
||||
tagResponseStub.color1,
|
||||
);
|
||||
|
||||
expect(mocks.tag.create).toHaveBeenCalledWith({
|
||||
userId: authStub.admin.user.id,
|
||||
value: 'tag-1',
|
||||
@ -168,6 +171,8 @@ describe(TagService.name, () => {
|
||||
|
||||
it('should remove a tag', async () => {
|
||||
mocks.tag.get.mockResolvedValue(tagStub.tag);
|
||||
mocks.tag.delete.mockResolvedValue();
|
||||
|
||||
await sut.remove(authStub.admin, 'tag-1');
|
||||
expect(mocks.tag.delete).toHaveBeenCalledWith('tag-1');
|
||||
});
|
||||
@ -223,6 +228,7 @@ describe(TagService.name, () => {
|
||||
it('should accept accept ids that are new and reject the rest', async () => {
|
||||
mocks.tag.get.mockResolvedValue(tagStub.tag);
|
||||
mocks.tag.getAssetIds.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.tag.addAssetIds.mockResolvedValue();
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-2']));
|
||||
|
||||
await expect(
|
||||
@ -242,6 +248,8 @@ describe(TagService.name, () => {
|
||||
describe('removeAssets', () => {
|
||||
it('should throw an error for an invalid id', async () => {
|
||||
mocks.tag.getAssetIds.mockResolvedValue(new Set());
|
||||
mocks.tag.removeAssetIds.mockResolvedValue();
|
||||
|
||||
await expect(sut.removeAssets(authStub.admin, 'tag-1', { ids: ['asset-1'] })).resolves.toEqual([
|
||||
{ id: 'asset-1', success: false, error: 'not_found' },
|
||||
]);
|
||||
@ -250,6 +258,7 @@ describe(TagService.name, () => {
|
||||
it('should accept accept ids that are tagged and reject the rest', async () => {
|
||||
mocks.tag.get.mockResolvedValue(tagStub.tag);
|
||||
mocks.tag.getAssetIds.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.tag.removeAssetIds.mockResolvedValue();
|
||||
|
||||
await expect(
|
||||
sut.removeAssets(authStub.admin, 'tag-1', {
|
||||
@ -267,7 +276,10 @@ describe(TagService.name, () => {
|
||||
|
||||
describe('handleTagCleanup', () => {
|
||||
it('should delete empty tags', async () => {
|
||||
mocks.tag.deleteEmptyTags.mockResolvedValue();
|
||||
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(mocks.tag.deleteEmptyTags).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
@ -70,6 +70,7 @@ describe(TimelineService.name, () => {
|
||||
|
||||
it('should include partner shared assets', async () => {
|
||||
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getTimeBucket(authStub.admin, {
|
||||
|
@ -39,6 +39,7 @@ describe(TrashService.name, () => {
|
||||
|
||||
it('should restore a batch of assets', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset1', 'asset2']));
|
||||
mocks.trash.restoreAll.mockResolvedValue(0);
|
||||
|
||||
await sut.restoreAssets(authStub.user1, { ids: ['asset1', 'asset2'] });
|
||||
|
||||
|
@ -4,6 +4,7 @@ import { serverVersion } from 'src/constants';
|
||||
import { ImmichEnvironment, JobName, JobStatus, SystemMetadataKey } from 'src/enum';
|
||||
import { VersionService } from 'src/services/version.service';
|
||||
import { mockEnvData } from 'test/repositories/config.repository.mock';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const mockRelease = (version: string) => ({
|
||||
@ -30,7 +31,12 @@ describe(VersionService.name, () => {
|
||||
|
||||
describe('onBootstrap', () => {
|
||||
it('should record a new version', async () => {
|
||||
mocks.versionHistory.getAll.mockResolvedValue([]);
|
||||
mocks.versionHistory.getLatest.mockResolvedValue(void 0);
|
||||
mocks.versionHistory.create.mockResolvedValue(factory.versionHistory());
|
||||
|
||||
await expect(sut.onBootstrap()).resolves.toBeUndefined();
|
||||
|
||||
expect(mocks.versionHistory.create).toHaveBeenCalledWith({ version: expect.any(String) });
|
||||
});
|
||||
|
||||
|
@ -34,9 +34,9 @@ import { TrashRepository } from 'src/repositories/trash.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
|
||||
import { ViewRepository } from 'src/repositories/view-repository';
|
||||
import { newLoggingRepositoryMock } from 'test/repositories/logger.repository.mock';
|
||||
import { newTelemetryRepositoryMock } from 'test/repositories/telemetry.repository.mock';
|
||||
import { newUuid } from 'test/small.factory';
|
||||
import { automock } from 'test/utils';
|
||||
|
||||
class CustomWritable extends Writable {
|
||||
private data = '';
|
||||
@ -55,7 +55,7 @@ class CustomWritable extends Writable {
|
||||
}
|
||||
}
|
||||
|
||||
type Asset = Insertable<Assets>;
|
||||
type Asset = Partial<Insertable<Assets>>;
|
||||
type User = Partial<Insertable<Users>>;
|
||||
type Library = Partial<Insertable<Libraries>>;
|
||||
type Session = Omit<Insertable<Sessions>, 'token'> & { token?: string };
|
||||
@ -176,10 +176,6 @@ export class TestFactory {
|
||||
}
|
||||
|
||||
async create() {
|
||||
for (const asset of this.assets) {
|
||||
await this.context.createAsset(asset);
|
||||
}
|
||||
|
||||
for (const user of this.users) {
|
||||
await this.context.createUser(user);
|
||||
}
|
||||
@ -192,6 +188,10 @@ export class TestFactory {
|
||||
await this.context.createSession(session);
|
||||
}
|
||||
|
||||
for (const asset of this.assets) {
|
||||
await this.context.createAsset(asset);
|
||||
}
|
||||
|
||||
return this.context;
|
||||
}
|
||||
}
|
||||
@ -228,8 +228,8 @@ export class TestContext {
|
||||
versionHistory: VersionHistoryRepository;
|
||||
view: ViewRepository;
|
||||
|
||||
private constructor(private db: Kysely<DB>) {
|
||||
const logger = newLoggingRepositoryMock() as unknown as LoggingRepository;
|
||||
private constructor(public db: Kysely<DB>) {
|
||||
const logger = automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false });
|
||||
const config = new ConfigRepository();
|
||||
|
||||
this.access = new AccessRepository(this.db);
|
||||
|
20
server/test/fixtures/api-key.stub.ts
vendored
20
server/test/fixtures/api-key.stub.ts
vendored
@ -1,20 +0,0 @@
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
|
||||
export const keyStub = {
|
||||
authKey: Object.freeze({
|
||||
id: 'my-random-guid',
|
||||
key: 'my-api-key (hashed)',
|
||||
user: userStub.admin,
|
||||
permissions: [],
|
||||
} as any),
|
||||
|
||||
admin: Object.freeze({
|
||||
id: 'my-random-guid',
|
||||
name: 'My Key',
|
||||
key: 'my-api-key (hashed)',
|
||||
userId: authStub.admin.user.id,
|
||||
user: userStub.admin,
|
||||
permissions: [],
|
||||
} as any),
|
||||
};
|
74
server/test/medium/specs/audit.database.spec.ts
Normal file
74
server/test/medium/specs/audit.database.spec.ts
Normal file
@ -0,0 +1,74 @@
|
||||
import { TestContext, TestFactory } from 'test/factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
describe('audit', () => {
|
||||
let context: TestContext;
|
||||
|
||||
beforeAll(async () => {
|
||||
const db = await getKyselyDB();
|
||||
context = await TestContext.from(db).create();
|
||||
});
|
||||
|
||||
describe('partners_audit', () => {
|
||||
it('should not cascade user deletes to partners_audit', async () => {
|
||||
const user1 = TestFactory.user();
|
||||
const user2 = TestFactory.user();
|
||||
|
||||
await context
|
||||
.getFactory()
|
||||
.withUser(user1)
|
||||
.withUser(user2)
|
||||
.withPartner({ sharedById: user1.id, sharedWithId: user2.id })
|
||||
.create();
|
||||
|
||||
await context.user.delete(user1, true);
|
||||
|
||||
await expect(
|
||||
context.db.selectFrom('partners_audit').select(['id']).where('sharedById', '=', user1.id).execute(),
|
||||
).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('assets_audit', () => {
|
||||
it('should not cascade user deletes to assets_audit', async () => {
|
||||
const user = TestFactory.user();
|
||||
const asset = TestFactory.asset({ ownerId: user.id });
|
||||
|
||||
await context.getFactory().withUser(user).withAsset(asset).create();
|
||||
|
||||
await context.user.delete(user, true);
|
||||
|
||||
await expect(
|
||||
context.db.selectFrom('assets_audit').select(['id']).where('assetId', '=', asset.id).execute(),
|
||||
).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('exif', () => {
|
||||
it('should automatically set updatedAt and updateId when the row is updated', async () => {
|
||||
const user = TestFactory.user();
|
||||
const asset = TestFactory.asset({ ownerId: user.id });
|
||||
const exif = { assetId: asset.id, make: 'Canon' };
|
||||
|
||||
await context.getFactory().withUser(user).withAsset(asset).create();
|
||||
await context.asset.upsertExif(exif);
|
||||
|
||||
const before = await context.db
|
||||
.selectFrom('exif')
|
||||
.select(['updatedAt', 'updateId'])
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
await context.asset.upsertExif({ assetId: asset.id, make: 'Canon 2' });
|
||||
|
||||
const after = await context.db
|
||||
.selectFrom('exif')
|
||||
.select(['updatedAt', 'updateId'])
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
expect(before.updateId).not.toEqual(after.updateId);
|
||||
expect(before.updatedAt).not.toEqual(after.updatedAt);
|
||||
});
|
||||
});
|
||||
});
|
@ -3,12 +3,14 @@ import { writeFile } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { MetadataRepository } from 'src/repositories/metadata.repository';
|
||||
import { MetadataService } from 'src/services/metadata.service';
|
||||
import { newFakeLoggingRepository } from 'test/repositories/logger.repository.mock';
|
||||
import { newRandomImage, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { automock, newRandomImage, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const metadataRepository = new MetadataRepository(newFakeLoggingRepository());
|
||||
const metadataRepository = new MetadataRepository(
|
||||
automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }),
|
||||
);
|
||||
|
||||
const createTestFile = async (exifData: Record<string, any>) => {
|
||||
const data = newRandomImage();
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { SyncRequestType } from 'src/enum';
|
||||
import { SyncService } from 'src/services/sync.service';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { SYNC_TYPES_ORDER, SyncService } from 'src/services/sync.service';
|
||||
import { TestContext, TestFactory } from 'test/factory';
|
||||
import { getKyselyDB, newTestService } from 'test/utils';
|
||||
|
||||
@ -33,7 +33,15 @@ const setup = async () => {
|
||||
};
|
||||
|
||||
describe(SyncService.name, () => {
|
||||
describe.concurrent('users', () => {
|
||||
it('should have all the types in the ordering variable', () => {
|
||||
for (const key in SyncRequestType) {
|
||||
expect(SYNC_TYPES_ORDER).includes(key);
|
||||
}
|
||||
|
||||
expect(SYNC_TYPES_ORDER.length).toBe(Object.keys(SyncRequestType).length);
|
||||
});
|
||||
|
||||
describe.concurrent(SyncEntityType.UserV1, () => {
|
||||
it('should detect and sync the first user', async () => {
|
||||
const { context, auth, sut, testSync } = await setup();
|
||||
|
||||
@ -189,7 +197,7 @@ describe(SyncService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe.concurrent('partners', () => {
|
||||
describe.concurrent(SyncEntityType.PartnerV1, () => {
|
||||
it('should detect and sync the first partner', async () => {
|
||||
const { auth, context, sut, testSync } = await setup();
|
||||
|
||||
@ -349,7 +357,7 @@ describe(SyncService.name, () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should not sync a partner for an unrelated user', async () => {
|
||||
it('should not sync a partner or partner delete for an unrelated user', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
@ -357,9 +365,436 @@ describe(SyncService.name, () => {
|
||||
|
||||
await context.createPartner({ sharedById: user2.id, sharedWithId: user3.id });
|
||||
|
||||
const response = await testSync(auth, [SyncRequestType.PartnersV1]);
|
||||
expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0);
|
||||
|
||||
await context.partner.remove({ sharedById: user2.id, sharedWithId: user3.id });
|
||||
|
||||
expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not sync a partner delete after a user is deleted', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
await context.createPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
await context.user.delete({ id: user2.id }, true);
|
||||
|
||||
expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe.concurrent(SyncEntityType.AssetV1, () => {
|
||||
it('should detect and sync the first asset', async () => {
|
||||
const { auth, context, sut, testSync } = await setup();
|
||||
|
||||
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const date = new Date().toISOString();
|
||||
|
||||
const asset = TestFactory.asset({
|
||||
ownerId: auth.user.id,
|
||||
checksum: Buffer.from(checksum, 'base64'),
|
||||
thumbhash: Buffer.from(thumbhash, 'base64'),
|
||||
fileCreatedAt: date,
|
||||
fileModifiedAt: date,
|
||||
deletedAt: null,
|
||||
});
|
||||
await context.createAsset(asset);
|
||||
|
||||
const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]);
|
||||
|
||||
expect(initialSyncResponse).toHaveLength(1);
|
||||
expect(initialSyncResponse).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
id: asset.id,
|
||||
ownerId: asset.ownerId,
|
||||
thumbhash,
|
||||
checksum,
|
||||
deletedAt: null,
|
||||
fileCreatedAt: date,
|
||||
fileModifiedAt: date,
|
||||
isFavorite: false,
|
||||
isVisible: true,
|
||||
localDateTime: null,
|
||||
type: asset.type,
|
||||
},
|
||||
type: 'AssetV1',
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
const acks = [initialSyncResponse[0].ack];
|
||||
await sut.setAcks(auth, { acks });
|
||||
|
||||
const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]);
|
||||
|
||||
expect(ackSyncResponse).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted asset', async () => {
|
||||
const { auth, context, sut, testSync } = await setup();
|
||||
|
||||
const asset = TestFactory.asset({ ownerId: auth.user.id });
|
||||
await context.createAsset(asset);
|
||||
await context.asset.remove(asset);
|
||||
|
||||
const response = await testSync(auth, [SyncRequestType.AssetsV1]);
|
||||
|
||||
expect(response).toHaveLength(1);
|
||||
expect(response).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: 'AssetDeleteV1',
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
const acks = response.map(({ ack }) => ack);
|
||||
await sut.setAcks(auth, { acks });
|
||||
|
||||
const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]);
|
||||
|
||||
expect(ackSyncResponse).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not sync an asset or asset delete for an unrelated user', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
const session = TestFactory.session({ userId: user2.id });
|
||||
const auth2 = TestFactory.auth({ session, user: user2 });
|
||||
|
||||
const asset = TestFactory.asset({ ownerId: user2.id });
|
||||
await context.createAsset(asset);
|
||||
|
||||
expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
|
||||
expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
|
||||
|
||||
await context.asset.remove(asset);
|
||||
expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
|
||||
expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe.concurrent(SyncRequestType.PartnerAssetsV1, () => {
|
||||
it('should detect and sync the first partner asset', async () => {
|
||||
const { auth, context, sut, testSync } = await setup();
|
||||
|
||||
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const date = new Date().toISOString();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
|
||||
const asset = TestFactory.asset({
|
||||
ownerId: user2.id,
|
||||
checksum: Buffer.from(checksum, 'base64'),
|
||||
thumbhash: Buffer.from(thumbhash, 'base64'),
|
||||
fileCreatedAt: date,
|
||||
fileModifiedAt: date,
|
||||
deletedAt: null,
|
||||
});
|
||||
await context.createAsset(asset);
|
||||
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
|
||||
expect(initialSyncResponse).toHaveLength(1);
|
||||
expect(initialSyncResponse).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
id: asset.id,
|
||||
ownerId: asset.ownerId,
|
||||
thumbhash,
|
||||
checksum,
|
||||
deletedAt: null,
|
||||
fileCreatedAt: date,
|
||||
fileModifiedAt: date,
|
||||
isFavorite: false,
|
||||
isVisible: true,
|
||||
localDateTime: null,
|
||||
type: asset.type,
|
||||
},
|
||||
type: SyncEntityType.PartnerAssetV1,
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
const acks = [initialSyncResponse[0].ack];
|
||||
await sut.setAcks(auth, { acks });
|
||||
|
||||
const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
|
||||
expect(ackSyncResponse).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted partner asset', async () => {
|
||||
const { auth, context, sut, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
const asset = TestFactory.asset({ ownerId: user2.id });
|
||||
await context.createAsset(asset);
|
||||
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
await context.asset.remove(asset);
|
||||
|
||||
const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
|
||||
expect(response).toHaveLength(1);
|
||||
expect(response).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.PartnerAssetDeleteV1,
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
const acks = response.map(({ ack }) => ack);
|
||||
await sut.setAcks(auth, { acks });
|
||||
|
||||
const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
|
||||
expect(ackSyncResponse).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not sync a deleted partner asset due to a user delete', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
await context.createAsset({ ownerId: user2.id });
|
||||
await context.user.delete({ id: user2.id }, true);
|
||||
|
||||
const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
|
||||
expect(response).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
await context.createAsset({ ownerId: user2.id });
|
||||
const partner = { sharedById: user2.id, sharedWithId: auth.user.id };
|
||||
await context.partner.create(partner);
|
||||
|
||||
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(1);
|
||||
|
||||
await context.partner.remove(partner);
|
||||
|
||||
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not sync an asset or asset delete for own user', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
const asset = await context.createAsset({ ownerId: auth.user.id });
|
||||
const partner = { sharedById: user2.id, sharedWithId: auth.user.id };
|
||||
await context.partner.create(partner);
|
||||
|
||||
await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
|
||||
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
|
||||
|
||||
await context.asset.remove(asset);
|
||||
|
||||
await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
|
||||
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not sync an asset or asset delete for unrelated user', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
const session = TestFactory.session({ userId: user2.id });
|
||||
const auth2 = TestFactory.auth({ session, user: user2 });
|
||||
const asset = await context.createAsset({ ownerId: user2.id });
|
||||
|
||||
await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
|
||||
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
|
||||
|
||||
await context.asset.remove(asset);
|
||||
|
||||
await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
|
||||
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe.concurrent(SyncRequestType.AssetExifsV1, () => {
|
||||
it('should detect and sync the first asset exif', async () => {
|
||||
const { auth, context, sut, testSync } = await setup();
|
||||
|
||||
const asset = TestFactory.asset({ ownerId: auth.user.id });
|
||||
const exif = { assetId: asset.id, make: 'Canon' };
|
||||
|
||||
await context.createAsset(asset);
|
||||
await context.asset.upsertExif(exif);
|
||||
|
||||
const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]);
|
||||
|
||||
expect(initialSyncResponse).toHaveLength(1);
|
||||
expect(initialSyncResponse).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
city: null,
|
||||
country: null,
|
||||
dateTimeOriginal: null,
|
||||
description: '',
|
||||
exifImageHeight: null,
|
||||
exifImageWidth: null,
|
||||
exposureTime: null,
|
||||
fNumber: null,
|
||||
fileSizeInByte: null,
|
||||
focalLength: null,
|
||||
fps: null,
|
||||
iso: null,
|
||||
latitude: null,
|
||||
lensModel: null,
|
||||
longitude: null,
|
||||
make: 'Canon',
|
||||
model: null,
|
||||
modifyDate: null,
|
||||
orientation: null,
|
||||
profileDescription: null,
|
||||
projectionType: null,
|
||||
rating: null,
|
||||
state: null,
|
||||
timeZone: null,
|
||||
},
|
||||
type: SyncEntityType.AssetExifV1,
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
const acks = [initialSyncResponse[0].ack];
|
||||
await sut.setAcks(auth, { acks });
|
||||
|
||||
const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]);
|
||||
|
||||
expect(ackSyncResponse).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should only sync asset exif for own user', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
const session = TestFactory.session({ userId: user2.id });
|
||||
const auth2 = TestFactory.auth({ session, user: user2 });
|
||||
|
||||
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
const asset = TestFactory.asset({ ownerId: user2.id });
|
||||
const exif = { assetId: asset.id, make: 'Canon' };
|
||||
|
||||
await context.createAsset(asset);
|
||||
await context.asset.upsertExif(exif);
|
||||
|
||||
await expect(testSync(auth2, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
|
||||
await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe.concurrent(SyncRequestType.PartnerAssetExifsV1, () => {
|
||||
it('should detect and sync the first partner asset exif', async () => {
|
||||
const { auth, context, sut, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
const asset = TestFactory.asset({ ownerId: user2.id });
|
||||
await context.createAsset(asset);
|
||||
const exif = { assetId: asset.id, make: 'Canon' };
|
||||
await context.asset.upsertExif(exif);
|
||||
|
||||
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
|
||||
expect(initialSyncResponse).toHaveLength(1);
|
||||
expect(initialSyncResponse).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
city: null,
|
||||
country: null,
|
||||
dateTimeOriginal: null,
|
||||
description: '',
|
||||
exifImageHeight: null,
|
||||
exifImageWidth: null,
|
||||
exposureTime: null,
|
||||
fNumber: null,
|
||||
fileSizeInByte: null,
|
||||
focalLength: null,
|
||||
fps: null,
|
||||
iso: null,
|
||||
latitude: null,
|
||||
lensModel: null,
|
||||
longitude: null,
|
||||
make: 'Canon',
|
||||
model: null,
|
||||
modifyDate: null,
|
||||
orientation: null,
|
||||
profileDescription: null,
|
||||
projectionType: null,
|
||||
rating: null,
|
||||
state: null,
|
||||
timeZone: null,
|
||||
},
|
||||
type: SyncEntityType.PartnerAssetExifV1,
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
const acks = [initialSyncResponse[0].ack];
|
||||
await sut.setAcks(auth, { acks });
|
||||
|
||||
const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
|
||||
expect(ackSyncResponse).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not sync partner asset exif for own user', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
const asset = TestFactory.asset({ ownerId: auth.user.id });
|
||||
const exif = { assetId: asset.id, make: 'Canon' };
|
||||
await context.createAsset(asset);
|
||||
await context.asset.upsertExif(exif);
|
||||
|
||||
await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
|
||||
await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not sync partner asset exif for unrelated user', async () => {
|
||||
const { auth, context, testSync } = await setup();
|
||||
|
||||
const user2 = await context.createUser();
|
||||
const user3 = await context.createUser();
|
||||
const session = TestFactory.session({ userId: user3.id });
|
||||
const authUser3 = TestFactory.auth({ session, user: user3 });
|
||||
await context.partner.create({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
const asset = TestFactory.asset({ ownerId: user3.id });
|
||||
const exif = { assetId: asset.id, make: 'Canon' };
|
||||
await context.createAsset(asset);
|
||||
await context.asset.upsertExif(exif);
|
||||
|
||||
await expect(testSync(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
|
||||
await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,12 +0,0 @@
|
||||
import { ActivityRepository } from 'src/repositories/activity.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newActivityRepositoryMock = (): Mocked<RepositoryInterface<ActivityRepository>> => {
|
||||
return {
|
||||
search: vitest.fn(),
|
||||
create: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
getStatistics: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,11 +0,0 @@
|
||||
import { AlbumUserRepository } from 'src/repositories/album-user.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked } from 'vitest';
|
||||
|
||||
export const newAlbumUserRepositoryMock = (): Mocked<RepositoryInterface<AlbumUserRepository>> => {
|
||||
return {
|
||||
create: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,25 +0,0 @@
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newAlbumRepositoryMock = (): Mocked<RepositoryInterface<AlbumRepository>> => {
|
||||
return {
|
||||
getById: vitest.fn(),
|
||||
getByAssetId: vitest.fn(),
|
||||
getMetadataForIds: vitest.fn(),
|
||||
getOwned: vitest.fn(),
|
||||
getShared: vitest.fn(),
|
||||
getNotShared: vitest.fn(),
|
||||
restoreAll: vitest.fn(),
|
||||
softDeleteAll: vitest.fn(),
|
||||
deleteAll: vitest.fn(),
|
||||
addAssetIds: vitest.fn(),
|
||||
removeAsset: vitest.fn(),
|
||||
removeAssetIds: vitest.fn(),
|
||||
getAssetIds: vitest.fn(),
|
||||
create: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
updateThumbnails: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,14 +0,0 @@
|
||||
import { ApiKeyRepository } from 'src/repositories/api-key.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newKeyRepositoryMock = (): Mocked<RepositoryInterface<ApiKeyRepository>> => {
|
||||
return {
|
||||
create: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
getKey: vitest.fn(),
|
||||
getById: vitest.fn(),
|
||||
getByUserId: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
import { AuditRepository } from 'src/repositories/audit.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newAuditRepositoryMock = (): Mocked<RepositoryInterface<AuditRepository>> => {
|
||||
return {
|
||||
getAfter: vitest.fn(),
|
||||
removeBefore: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
import { CronRepository } from 'src/repositories/cron.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newCronRepositoryMock = (): Mocked<RepositoryInterface<CronRepository>> => {
|
||||
return {
|
||||
create: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,12 +0,0 @@
|
||||
import { DownloadRepository } from 'src/repositories/download.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newDownloadRepositoryMock = (): Mocked<RepositoryInterface<DownloadRepository>> => {
|
||||
return {
|
||||
downloadAssetIds: vitest.fn(),
|
||||
downloadMotionAssetIds: vitest.fn(),
|
||||
downloadAlbumId: vitest.fn(),
|
||||
downloadUserId: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,17 +0,0 @@
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newEventRepositoryMock = (): Mocked<RepositoryInterface<EventRepository>> => {
|
||||
return {
|
||||
setup: vitest.fn(),
|
||||
emit: vitest.fn() as any,
|
||||
clientSend: vitest.fn() as any,
|
||||
clientBroadcast: vitest.fn() as any,
|
||||
serverSend: vitest.fn(),
|
||||
afterInit: vitest.fn(),
|
||||
handleConnection: vitest.fn(),
|
||||
handleDisconnect: vitest.fn(),
|
||||
setAuthFn: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,17 +0,0 @@
|
||||
import { LibraryRepository } from 'src/repositories/library.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newLibraryRepositoryMock = (): Mocked<RepositoryInterface<LibraryRepository>> => {
|
||||
return {
|
||||
get: vitest.fn(),
|
||||
create: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
softDelete: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
getStatistics: vitest.fn(),
|
||||
getAllDeleted: vitest.fn(),
|
||||
getAll: vitest.fn(),
|
||||
streamAssetIds: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,23 +0,0 @@
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newLoggingRepositoryMock = (): Mocked<RepositoryInterface<LoggingRepository>> => {
|
||||
return {
|
||||
setLogLevel: vitest.fn(),
|
||||
setContext: vitest.fn(),
|
||||
setAppName: vitest.fn(),
|
||||
isLevelEnabled: vitest.fn(),
|
||||
verbose: vitest.fn(),
|
||||
verboseFn: vitest.fn(),
|
||||
debug: vitest.fn(),
|
||||
debugFn: vitest.fn(),
|
||||
log: vitest.fn(),
|
||||
warn: vitest.fn(),
|
||||
error: vitest.fn(),
|
||||
fatal: vitest.fn(),
|
||||
};
|
||||
};
|
||||
|
||||
export const newFakeLoggingRepository = () =>
|
||||
newLoggingRepositoryMock() as RepositoryInterface<LoggingRepository> as LoggingRepository;
|
@ -1,11 +0,0 @@
|
||||
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newMachineLearningRepositoryMock = (): Mocked<RepositoryInterface<MachineLearningRepository>> => {
|
||||
return {
|
||||
encodeImage: vitest.fn(),
|
||||
encodeText: vitest.fn(),
|
||||
detectFaces: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,11 +0,0 @@
|
||||
import { MapRepository } from 'src/repositories/map.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked } from 'vitest';
|
||||
|
||||
export const newMapRepositoryMock = (): Mocked<RepositoryInterface<MapRepository>> => {
|
||||
return {
|
||||
init: vitest.fn(),
|
||||
reverseGeocode: vitest.fn(),
|
||||
getMapMarkers: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,17 +0,0 @@
|
||||
import { MemoryRepository } from 'src/repositories/memory.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newMemoryRepositoryMock = (): Mocked<RepositoryInterface<MemoryRepository>> => {
|
||||
return {
|
||||
search: vitest.fn().mockResolvedValue([]),
|
||||
get: vitest.fn(),
|
||||
create: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
getAssetIds: vitest.fn().mockResolvedValue(new Set()),
|
||||
addAssetIds: vitest.fn(),
|
||||
removeAssetIds: vitest.fn(),
|
||||
cleanup: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,14 +0,0 @@
|
||||
import { MoveRepository } from 'src/repositories/move.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newMoveRepositoryMock = (): Mocked<RepositoryInterface<MoveRepository>> => {
|
||||
return {
|
||||
create: vitest.fn(),
|
||||
getByEntity: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
cleanMoveHistory: vitest.fn(),
|
||||
cleanMoveHistorySingle: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,11 +0,0 @@
|
||||
import { NotificationRepository } from 'src/repositories/notification.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked } from 'vitest';
|
||||
|
||||
export const newNotificationRepositoryMock = (): Mocked<RepositoryInterface<NotificationRepository>> => {
|
||||
return {
|
||||
renderEmail: vitest.fn(),
|
||||
sendEmail: vitest.fn().mockResolvedValue({ messageId: 'message-1' }),
|
||||
verifySmtp: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,12 +0,0 @@
|
||||
import { OAuthRepository } from 'src/repositories/oauth.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked } from 'vitest';
|
||||
|
||||
export const newOAuthRepositoryMock = (): Mocked<RepositoryInterface<OAuthRepository>> => {
|
||||
return {
|
||||
init: vitest.fn(),
|
||||
authorize: vitest.fn(),
|
||||
getLogoutEndpoint: vitest.fn(),
|
||||
getProfile: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,13 +0,0 @@
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newPartnerRepositoryMock = (): Mocked<RepositoryInterface<PartnerRepository>> => {
|
||||
return {
|
||||
create: vitest.fn(),
|
||||
remove: vitest.fn(),
|
||||
getAll: vitest.fn().mockResolvedValue([]),
|
||||
get: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,41 +0,0 @@
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newPersonRepositoryMock = (): Mocked<RepositoryInterface<PersonRepository>> => {
|
||||
return {
|
||||
getById: vitest.fn(),
|
||||
getAll: vitest.fn(),
|
||||
getAllForUser: vitest.fn(),
|
||||
getAllWithoutFaces: vitest.fn(),
|
||||
|
||||
getByName: vitest.fn(),
|
||||
getDistinctNames: vitest.fn(),
|
||||
|
||||
create: vitest.fn(),
|
||||
createAll: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
updateAll: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
deleteFaces: vitest.fn(),
|
||||
|
||||
getStatistics: vitest.fn(),
|
||||
getAllFaces: vitest.fn(),
|
||||
getFacesByIds: vitest.fn(),
|
||||
getRandomFace: vitest.fn(),
|
||||
|
||||
reassignFaces: vitest.fn(),
|
||||
unassignFaces: vitest.fn(),
|
||||
refreshFaces: vitest.fn(),
|
||||
getFaces: vitest.fn(),
|
||||
reassignFace: vitest.fn(),
|
||||
getFaceById: vitest.fn(),
|
||||
getFaceByIdWithAssets: vitest.fn(),
|
||||
getNumberOfPeople: vitest.fn(),
|
||||
getLatestFaceDate: vitest.fn(),
|
||||
|
||||
createAssetFace: vitest.fn(),
|
||||
deleteAssetFace: vitest.fn(),
|
||||
softDeleteAssetFaces: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,9 +0,0 @@
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newProcessRepositoryMock = (): Mocked<RepositoryInterface<ProcessRepository>> => {
|
||||
return {
|
||||
spawn: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,24 +0,0 @@
|
||||
import { SearchRepository } from 'src/repositories/search.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newSearchRepositoryMock = (): Mocked<RepositoryInterface<SearchRepository>> => {
|
||||
return {
|
||||
searchMetadata: vitest.fn(),
|
||||
searchSmart: vitest.fn(),
|
||||
searchDuplicates: vitest.fn(),
|
||||
searchFaces: vitest.fn(),
|
||||
searchRandom: vitest.fn(),
|
||||
upsert: vitest.fn(),
|
||||
searchPlaces: vitest.fn(),
|
||||
getAssetsByCity: vitest.fn(),
|
||||
deleteAllSearchEmbeddings: vitest.fn(),
|
||||
getDimensionSize: vitest.fn(),
|
||||
setDimensionSize: vitest.fn(),
|
||||
getCameraMakes: vitest.fn(),
|
||||
getCameraModels: vitest.fn(),
|
||||
getCities: vitest.fn(),
|
||||
getCountries: vitest.fn(),
|
||||
getStates: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newServerInfoRepositoryMock = (): Mocked<RepositoryInterface<ServerInfoRepository>> => {
|
||||
return {
|
||||
getGitHubRelease: vitest.fn(),
|
||||
getBuildVersions: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,14 +0,0 @@
|
||||
import { SessionRepository } from 'src/repositories/session.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newSessionRepositoryMock = (): Mocked<RepositoryInterface<SessionRepository>> => {
|
||||
return {
|
||||
search: vitest.fn(),
|
||||
create: vitest.fn() as any,
|
||||
update: vitest.fn() as any,
|
||||
delete: vitest.fn(),
|
||||
getByToken: vitest.fn(),
|
||||
getByUserId: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,14 +0,0 @@
|
||||
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newSharedLinkRepositoryMock = (): Mocked<RepositoryInterface<SharedLinkRepository>> => {
|
||||
return {
|
||||
getAll: vitest.fn(),
|
||||
get: vitest.fn(),
|
||||
getByKey: vitest.fn(),
|
||||
create: vitest.fn(),
|
||||
remove: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,14 +0,0 @@
|
||||
import { StackRepository } from 'src/repositories/stack.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newStackRepositoryMock = (): Mocked<RepositoryInterface<StackRepository>> => {
|
||||
return {
|
||||
search: vitest.fn(),
|
||||
create: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
getById: vitest.fn(),
|
||||
deleteAll: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,15 +0,0 @@
|
||||
import { SyncRepository } from 'src/repositories/sync.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newSyncRepositoryMock = (): Mocked<RepositoryInterface<SyncRepository>> => {
|
||||
return {
|
||||
getCheckpoints: vitest.fn(),
|
||||
upsertCheckpoints: vitest.fn(),
|
||||
deleteCheckpoints: vitest.fn(),
|
||||
getUserUpserts: vitest.fn(),
|
||||
getUserDeletes: vitest.fn(),
|
||||
getPartnerUpserts: vitest.fn(),
|
||||
getPartnerDeletes: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,23 +0,0 @@
|
||||
import { TagRepository } from 'src/repositories/tag.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newTagRepositoryMock = (): Mocked<RepositoryInterface<TagRepository>> => {
|
||||
return {
|
||||
getAll: vitest.fn(),
|
||||
getByValue: vitest.fn(),
|
||||
upsertValue: vitest.fn(),
|
||||
replaceAssetTags: vitest.fn(),
|
||||
|
||||
get: vitest.fn(),
|
||||
create: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
|
||||
getAssetIds: vitest.fn(),
|
||||
addAssetIds: vitest.fn(),
|
||||
removeAssetIds: vitest.fn(),
|
||||
upsertAssetIds: vitest.fn(),
|
||||
deleteEmptyTags: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,12 +0,0 @@
|
||||
import { TrashRepository } from 'src/repositories/trash.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newTrashRepositoryMock = (): Mocked<RepositoryInterface<TrashRepository>> => {
|
||||
return {
|
||||
empty: vitest.fn(),
|
||||
restore: vitest.fn(),
|
||||
restoreAll: vitest.fn(),
|
||||
getDeletedIds: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,26 +0,0 @@
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newUserRepositoryMock = (): Mocked<RepositoryInterface<UserRepository>> => {
|
||||
return {
|
||||
get: vitest.fn(),
|
||||
getMetadata: vitest.fn().mockResolvedValue([]),
|
||||
getAdmin: vitest.fn(),
|
||||
getByEmail: vitest.fn(),
|
||||
getByStorageLabel: vitest.fn(),
|
||||
getByOAuthId: vitest.fn(),
|
||||
getUserStats: vitest.fn(),
|
||||
getList: vitest.fn(),
|
||||
create: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
delete: vitest.fn(),
|
||||
restore: vitest.fn(),
|
||||
getDeletedUsers: vitest.fn(),
|
||||
hasAdmin: vitest.fn(),
|
||||
updateUsage: vitest.fn(),
|
||||
syncUsage: vitest.fn(),
|
||||
upsertMetadata: vitest.fn(),
|
||||
deleteMetadata: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,11 +0,0 @@
|
||||
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newVersionHistoryRepositoryMock = (): Mocked<RepositoryInterface<VersionHistoryRepository>> => {
|
||||
return {
|
||||
getAll: vitest.fn().mockResolvedValue([]),
|
||||
getLatest: vitest.fn(),
|
||||
create: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
import { ViewRepository } from 'src/repositories/view-repository';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newViewRepositoryMock = (): Mocked<RepositoryInterface<ViewRepository>> => {
|
||||
return {
|
||||
getAssetsByOriginalPath: vitest.fn(),
|
||||
getUniqueOriginalPaths: vitest.fn(),
|
||||
};
|
||||
};
|
@ -1,7 +1,8 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { Asset, AuthUser, Library, User } from 'src/database';
|
||||
import { ApiKey, Asset, AuthApiKey, AuthUser, Library, User } from 'src/database';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { OnThisDayData } from 'src/entities/memory.entity';
|
||||
import { AssetStatus, AssetType, MemoryType } from 'src/enum';
|
||||
import { AssetStatus, AssetType, MemoryType, Permission } from 'src/enum';
|
||||
import { ActivityItem, MemoryItem } from 'src/types';
|
||||
|
||||
export const newUuid = () => randomUUID() as string;
|
||||
@ -13,11 +14,25 @@ export const newDate = () => new Date();
|
||||
export const newUpdateId = () => 'uuid-v7';
|
||||
export const newSha1 = () => Buffer.from('this is a fake hash');
|
||||
|
||||
const authFactory = (user: Partial<AuthUser> = {}) => ({
|
||||
user: authUserFactory(user),
|
||||
const authFactory = ({ apiKey, ...user }: Partial<AuthUser> & { apiKey?: Partial<AuthApiKey> } = {}) => {
|
||||
const auth: AuthDto = {
|
||||
user: authUserFactory(user),
|
||||
};
|
||||
|
||||
if (apiKey) {
|
||||
auth.apiKey = authApiKeyFactory(apiKey);
|
||||
}
|
||||
|
||||
return auth;
|
||||
};
|
||||
|
||||
const authApiKeyFactory = (apiKey: Partial<AuthApiKey> = {}) => ({
|
||||
id: newUuid(),
|
||||
permissions: [Permission.ALL],
|
||||
...apiKey,
|
||||
});
|
||||
|
||||
const authUserFactory = (authUser: Partial<AuthUser>) => ({
|
||||
const authUserFactory = (authUser: Partial<AuthUser> = {}) => ({
|
||||
id: newUuid(),
|
||||
isAdmin: false,
|
||||
name: 'Test User',
|
||||
@ -27,6 +42,23 @@ const authUserFactory = (authUser: Partial<AuthUser>) => ({
|
||||
...authUser,
|
||||
});
|
||||
|
||||
const sessionFactory = () => ({
|
||||
id: newUuid(),
|
||||
createdAt: newDate(),
|
||||
updatedAt: newDate(),
|
||||
updateId: newUpdateId(),
|
||||
deviceOS: 'android',
|
||||
deviceType: 'mobile',
|
||||
token: 'abc123',
|
||||
userId: newUuid(),
|
||||
});
|
||||
|
||||
const stackFactory = () => ({
|
||||
id: newUuid(),
|
||||
ownerId: newUuid(),
|
||||
primaryAssetId: newUuid(),
|
||||
});
|
||||
|
||||
const userFactory = (user: Partial<User> = {}) => ({
|
||||
id: newUuid(),
|
||||
name: 'Test User',
|
||||
@ -86,6 +118,17 @@ const activityFactory = (activity: Partial<ActivityItem> = {}) => {
|
||||
};
|
||||
};
|
||||
|
||||
const apiKeyFactory = (apiKey: Partial<ApiKey> = {}) => ({
|
||||
id: newUuid(),
|
||||
userId: newUuid(),
|
||||
createdAt: newDate(),
|
||||
updatedAt: newDate(),
|
||||
updateId: newUpdateId(),
|
||||
name: 'Api Key',
|
||||
permissions: [Permission.ALL],
|
||||
...apiKey,
|
||||
});
|
||||
|
||||
const libraryFactory = (library: Partial<Library> = {}) => ({
|
||||
id: newUuid(),
|
||||
createdAt: newDate(),
|
||||
@ -119,12 +162,23 @@ const memoryFactory = (memory: Partial<MemoryItem> = {}) => ({
|
||||
...memory,
|
||||
});
|
||||
|
||||
const versionHistoryFactory = () => ({
|
||||
id: newUuid(),
|
||||
createdAt: newDate(),
|
||||
version: '1.123.45',
|
||||
});
|
||||
|
||||
export const factory = {
|
||||
activity: activityFactory,
|
||||
apiKey: apiKeyFactory,
|
||||
asset: assetFactory,
|
||||
auth: authFactory,
|
||||
authApiKey: authApiKeyFactory,
|
||||
authUser: authUserFactory,
|
||||
library: libraryFactory,
|
||||
memory: memoryFactory,
|
||||
session: sessionFactory,
|
||||
stack: stackFactory,
|
||||
user: userFactory,
|
||||
versionHistory: versionHistoryFactory,
|
||||
};
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { ClassConstructor } from 'class-transformer';
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { PostgresJSDialect } from 'kysely-postgres-js';
|
||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
@ -50,48 +51,59 @@ import { ViewRepository } from 'src/repositories/view-repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { RepositoryInterface } from 'src/types';
|
||||
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
|
||||
import { newActivityRepositoryMock } from 'test/repositories/activity.repository.mock';
|
||||
import { newAlbumUserRepositoryMock } from 'test/repositories/album-user.repository.mock';
|
||||
import { newAlbumRepositoryMock } from 'test/repositories/album.repository.mock';
|
||||
import { newKeyRepositoryMock } from 'test/repositories/api-key.repository.mock';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newAuditRepositoryMock } from 'test/repositories/audit.repository.mock';
|
||||
import { newConfigRepositoryMock } from 'test/repositories/config.repository.mock';
|
||||
import { newCronRepositoryMock } from 'test/repositories/cron.repository.mock';
|
||||
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
|
||||
import { newDatabaseRepositoryMock } from 'test/repositories/database.repository.mock';
|
||||
import { newDownloadRepositoryMock } from 'test/repositories/download.repository.mock';
|
||||
import { newEventRepositoryMock } from 'test/repositories/event.repository.mock';
|
||||
import { newJobRepositoryMock } from 'test/repositories/job.repository.mock';
|
||||
import { newLibraryRepositoryMock } from 'test/repositories/library.repository.mock';
|
||||
import { newLoggingRepositoryMock } from 'test/repositories/logger.repository.mock';
|
||||
import { newMachineLearningRepositoryMock } from 'test/repositories/machine-learning.repository.mock';
|
||||
import { newMapRepositoryMock } from 'test/repositories/map.repository.mock';
|
||||
import { newMediaRepositoryMock } from 'test/repositories/media.repository.mock';
|
||||
import { newMemoryRepositoryMock } from 'test/repositories/memory.repository.mock';
|
||||
import { newMetadataRepositoryMock } from 'test/repositories/metadata.repository.mock';
|
||||
import { newMoveRepositoryMock } from 'test/repositories/move.repository.mock';
|
||||
import { newNotificationRepositoryMock } from 'test/repositories/notification.repository.mock';
|
||||
import { newOAuthRepositoryMock } from 'test/repositories/oauth.repository.mock';
|
||||
import { newPartnerRepositoryMock } from 'test/repositories/partner.repository.mock';
|
||||
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
||||
import { newProcessRepositoryMock } from 'test/repositories/process.repository.mock';
|
||||
import { newSearchRepositoryMock } from 'test/repositories/search.repository.mock';
|
||||
import { newServerInfoRepositoryMock } from 'test/repositories/server-info.repository.mock';
|
||||
import { newSessionRepositoryMock } from 'test/repositories/session.repository.mock';
|
||||
import { newSharedLinkRepositoryMock } from 'test/repositories/shared-link.repository.mock';
|
||||
import { newStackRepositoryMock } from 'test/repositories/stack.repository.mock';
|
||||
import { newStorageRepositoryMock } from 'test/repositories/storage.repository.mock';
|
||||
import { newSyncRepositoryMock } from 'test/repositories/sync.repository.mock';
|
||||
import { newSystemMetadataRepositoryMock } from 'test/repositories/system-metadata.repository.mock';
|
||||
import { newTagRepositoryMock } from 'test/repositories/tag.repository.mock';
|
||||
import { ITelemetryRepositoryMock, newTelemetryRepositoryMock } from 'test/repositories/telemetry.repository.mock';
|
||||
import { newTrashRepositoryMock } from 'test/repositories/trash.repository.mock';
|
||||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
import { newVersionHistoryRepositoryMock } from 'test/repositories/version-history.repository.mock';
|
||||
import { newViewRepositoryMock } from 'test/repositories/view.repository.mock';
|
||||
import { Readable } from 'typeorm/platform/PlatformTools';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
import { assert, Mocked, vitest } from 'vitest';
|
||||
|
||||
const mockFn = (label: string, { strict }: { strict: boolean }) => {
|
||||
const message = `Called a mock function without a mock implementation (${label})`;
|
||||
return vitest.fn().mockImplementation(() => {
|
||||
if (strict) {
|
||||
assert.fail(message);
|
||||
} else {
|
||||
// console.warn(message);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const automock = <T>(
|
||||
Dependency: ClassConstructor<T>,
|
||||
options?: {
|
||||
args?: ConstructorParameters<ClassConstructor<T>>;
|
||||
strict?: boolean;
|
||||
},
|
||||
): Mocked<T> => {
|
||||
const mock: Record<string, unknown> = {};
|
||||
const strict = options?.strict ?? true;
|
||||
const args = options?.args ?? [];
|
||||
|
||||
const instance = new Dependency(...args);
|
||||
for (const property of Object.getOwnPropertyNames(Dependency.prototype)) {
|
||||
if (property === 'constructor') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const label = `${Dependency.name}.${property}`;
|
||||
// console.log(`Automocking ${label}`);
|
||||
|
||||
const target = instance[property as keyof T];
|
||||
if (typeof target === 'function') {
|
||||
mock[property] = mockFn(label, { strict });
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return mock as Mocked<T>;
|
||||
};
|
||||
|
||||
export type ServiceOverrides = {
|
||||
access: AccessRepository;
|
||||
@ -153,48 +165,52 @@ export const newTestService = <T extends BaseService>(
|
||||
Service: Constructor<T, BaseServiceArgs>,
|
||||
overrides: Partial<ServiceOverrides> = {},
|
||||
) => {
|
||||
const loggerMock = { setContext: () => {} };
|
||||
const configMock = { getEnv: () => ({}) };
|
||||
|
||||
const mocks: ServiceMocks = {
|
||||
access: newAccessRepositoryMock(),
|
||||
logger: newLoggingRepositoryMock(),
|
||||
cron: newCronRepositoryMock(),
|
||||
logger: automock(LoggingRepository, { args: [, configMock], strict: false }),
|
||||
cron: automock(CronRepository, { args: [, loggerMock] }),
|
||||
crypto: newCryptoRepositoryMock(),
|
||||
activity: newActivityRepositoryMock(),
|
||||
audit: newAuditRepositoryMock(),
|
||||
album: newAlbumRepositoryMock(),
|
||||
albumUser: newAlbumUserRepositoryMock(),
|
||||
activity: automock(ActivityRepository),
|
||||
audit: automock(AuditRepository),
|
||||
album: automock(AlbumRepository, { strict: false }),
|
||||
albumUser: automock(AlbumUserRepository),
|
||||
asset: newAssetRepositoryMock(),
|
||||
config: newConfigRepositoryMock(),
|
||||
database: newDatabaseRepositoryMock(),
|
||||
downloadRepository: newDownloadRepositoryMock(),
|
||||
event: newEventRepositoryMock(),
|
||||
downloadRepository: automock(DownloadRepository, { strict: false }),
|
||||
event: automock(EventRepository, { args: [, , loggerMock], strict: false }),
|
||||
job: newJobRepositoryMock(),
|
||||
apiKey: newKeyRepositoryMock(),
|
||||
library: newLibraryRepositoryMock(),
|
||||
machineLearning: newMachineLearningRepositoryMock(),
|
||||
map: newMapRepositoryMock(),
|
||||
apiKey: automock(ApiKeyRepository),
|
||||
library: automock(LibraryRepository, { strict: false }),
|
||||
machineLearning: automock(MachineLearningRepository, { args: [loggerMock], strict: false }),
|
||||
map: automock(MapRepository, { args: [undefined, undefined, { setContext: () => {} }] }),
|
||||
media: newMediaRepositoryMock(),
|
||||
memory: newMemoryRepositoryMock(),
|
||||
memory: automock(MemoryRepository),
|
||||
metadata: newMetadataRepositoryMock(),
|
||||
move: newMoveRepositoryMock(),
|
||||
notification: newNotificationRepositoryMock(),
|
||||
oauth: newOAuthRepositoryMock(),
|
||||
partner: newPartnerRepositoryMock(),
|
||||
person: newPersonRepositoryMock(),
|
||||
process: newProcessRepositoryMock(),
|
||||
search: newSearchRepositoryMock(),
|
||||
serverInfo: newServerInfoRepositoryMock(),
|
||||
session: newSessionRepositoryMock(),
|
||||
sharedLink: newSharedLinkRepositoryMock(),
|
||||
stack: newStackRepositoryMock(),
|
||||
move: automock(MoveRepository, { strict: false }),
|
||||
notification: automock(NotificationRepository, { args: [loggerMock] }),
|
||||
oauth: automock(OAuthRepository, { args: [loggerMock] }),
|
||||
partner: automock(PartnerRepository, { strict: false }),
|
||||
person: automock(PersonRepository, { strict: false }),
|
||||
process: automock(ProcessRepository, { args: [loggerMock] }),
|
||||
search: automock(SearchRepository, { args: [loggerMock], strict: false }),
|
||||
serverInfo: automock(ServerInfoRepository, { args: [, loggerMock], strict: false }),
|
||||
session: automock(SessionRepository),
|
||||
sharedLink: automock(SharedLinkRepository),
|
||||
stack: automock(StackRepository),
|
||||
storage: newStorageRepositoryMock(),
|
||||
sync: newSyncRepositoryMock(),
|
||||
sync: automock(SyncRepository),
|
||||
systemMetadata: newSystemMetadataRepositoryMock(),
|
||||
tag: newTagRepositoryMock(),
|
||||
// systemMetadata: automock(SystemMetadataRepository, { strict: false }),
|
||||
tag: automock(TagRepository, { args: [, loggerMock], strict: false }),
|
||||
telemetry: newTelemetryRepositoryMock(),
|
||||
trash: newTrashRepositoryMock(),
|
||||
user: newUserRepositoryMock(),
|
||||
versionHistory: newVersionHistoryRepositoryMock(),
|
||||
view: newViewRepositoryMock(),
|
||||
trash: automock(TrashRepository),
|
||||
user: automock(UserRepository, { strict: false }),
|
||||
versionHistory: automock(VersionHistoryRepository),
|
||||
view: automock(ViewRepository),
|
||||
};
|
||||
|
||||
const sut = new Service(
|
||||
|
Loading…
x
Reference in New Issue
Block a user