1
0
mirror of https://github.com/immich-app/immich.git synced 2024-12-25 10:43:13 +02:00

chore(web): update job dashboard (#5745)

* rename clip encoding to smart search

* update job subtitles

* update api

* update smart search job title and subtitle

* fix `getJobName`

* change smart search icon

* formatting

* wording

* update reference to clip

* formatting

* update reference to Encode CLIP
This commit is contained in:
Mert 2023-12-16 11:50:46 -05:00 committed by GitHub
parent a2deba4734
commit cb1201e690
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 66 additions and 65 deletions

View File

@ -355,12 +355,6 @@ export interface AllJobStatusResponseDto {
* @memberof AllJobStatusResponseDto
*/
'backgroundTask': JobStatusDto;
/**
*
* @type {JobStatusDto}
* @memberof AllJobStatusResponseDto
*/
'clipEncoding': JobStatusDto;
/**
*
* @type {JobStatusDto}
@ -403,6 +397,12 @@ export interface AllJobStatusResponseDto {
* @memberof AllJobStatusResponseDto
*/
'sidecar': JobStatusDto;
/**
*
* @type {JobStatusDto}
* @memberof AllJobStatusResponseDto
*/
'smartSearch': JobStatusDto;
/**
*
* @type {JobStatusDto}
@ -2017,7 +2017,7 @@ export const JobName = {
VideoConversion: 'videoConversion',
ObjectTagging: 'objectTagging',
RecognizeFaces: 'recognizeFaces',
ClipEncoding: 'clipEncoding',
SmartSearch: 'smartSearch',
BackgroundTask: 'backgroundTask',
StorageTemplateMigration: 'storageTemplateMigration',
Migration: 'migration',
@ -3785,12 +3785,6 @@ export interface SystemConfigJobDto {
* @memberof SystemConfigJobDto
*/
'backgroundTask': JobSettingsDto;
/**
*
* @type {JobSettingsDto}
* @memberof SystemConfigJobDto
*/
'clipEncoding': JobSettingsDto;
/**
*
* @type {JobSettingsDto}
@ -3833,6 +3827,12 @@ export interface SystemConfigJobDto {
* @memberof SystemConfigJobDto
*/
'sidecar': JobSettingsDto;
/**
*
* @type {JobSettingsDto}
* @memberof SystemConfigJobDto
*/
'smartSearch': JobSettingsDto;
/**
*
* @type {JobSettingsDto}

View File

@ -26,7 +26,7 @@ Immich optionally uses machine learning for several features. However, it can be
### How can I lower Immich's CPU usage?
The initial backup is the most intensive due to the number of jobs running. The most CPU-intensive ones are transcoding and machine learning jobs (Tag Images, Encode CLIP, Recognize Faces), and to a lesser extent thumbnail generation. Here are some ways to lower their CPU usage:
The initial backup is the most intensive due to the number of jobs running. The most CPU-intensive ones are transcoding and machine learning jobs (Tag Images, Smart Search, Recognize Faces), and to a lesser extent thumbnail generation. Here are some ways to lower their CPU usage:
- Lower the job concurrency for these jobs to 1.
- Under Settings > Transcoding Settings > Threads, set the number of threads to a low number like 1 or 2.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -6470,9 +6470,6 @@
"backgroundTask": {
"$ref": "#/components/schemas/JobStatusDto"
},
"clipEncoding": {
"$ref": "#/components/schemas/JobStatusDto"
},
"library": {
"$ref": "#/components/schemas/JobStatusDto"
},
@ -6494,6 +6491,9 @@
"sidecar": {
"$ref": "#/components/schemas/JobStatusDto"
},
"smartSearch": {
"$ref": "#/components/schemas/JobStatusDto"
},
"storageTemplateMigration": {
"$ref": "#/components/schemas/JobStatusDto"
},
@ -6509,7 +6509,7 @@
"metadataExtraction",
"videoConversion",
"objectTagging",
"clipEncoding",
"smartSearch",
"storageTemplateMigration",
"migration",
"backgroundTask",
@ -7821,7 +7821,7 @@
"videoConversion",
"objectTagging",
"recognizeFaces",
"clipEncoding",
"smartSearch",
"backgroundTask",
"storageTemplateMigration",
"migration",
@ -9182,9 +9182,6 @@
"backgroundTask": {
"$ref": "#/components/schemas/JobSettingsDto"
},
"clipEncoding": {
"$ref": "#/components/schemas/JobSettingsDto"
},
"library": {
"$ref": "#/components/schemas/JobSettingsDto"
},
@ -9206,6 +9203,9 @@
"sidecar": {
"$ref": "#/components/schemas/JobSettingsDto"
},
"smartSearch": {
"$ref": "#/components/schemas/JobSettingsDto"
},
"storageTemplateMigration": {
"$ref": "#/components/schemas/JobSettingsDto"
},
@ -9221,7 +9221,7 @@
"metadataExtraction",
"videoConversion",
"objectTagging",
"clipEncoding",
"smartSearch",
"storageTemplateMigration",
"migration",
"backgroundTask",

View File

@ -4,7 +4,7 @@ export enum QueueName {
VIDEO_CONVERSION = 'videoConversion',
OBJECT_TAGGING = 'objectTagging',
RECOGNIZE_FACES = 'recognizeFaces',
CLIP_ENCODING = 'clipEncoding',
SMART_SEARCH = 'smartSearch',
BACKGROUND_TASK = 'backgroundTask',
STORAGE_TEMPLATE_MIGRATION = 'storageTemplateMigration',
MIGRATION = 'migration',
@ -135,8 +135,8 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.RECOGNIZE_FACES]: QueueName.RECOGNIZE_FACES,
// clip
[JobName.QUEUE_ENCODE_CLIP]: QueueName.CLIP_ENCODING,
[JobName.ENCODE_CLIP]: QueueName.CLIP_ENCODING,
[JobName.QUEUE_ENCODE_CLIP]: QueueName.SMART_SEARCH,
[JobName.ENCODE_CLIP]: QueueName.SMART_SEARCH,
// XMP sidecars
[JobName.QUEUE_SIDECAR]: QueueName.SIDECAR,

View File

@ -63,7 +63,7 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
[QueueName.OBJECT_TAGGING]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.CLIP_ENCODING]!: JobStatusDto;
[QueueName.SMART_SEARCH]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.STORAGE_TEMPLATE_MIGRATION]!: JobStatusDto;

View File

@ -97,7 +97,7 @@ describe(JobService.name, () => {
await expect(sut.getAllJobsStatus()).resolves.toEqual({
[QueueName.BACKGROUND_TASK]: expectedJobStatus,
[QueueName.CLIP_ENCODING]: expectedJobStatus,
[QueueName.SMART_SEARCH]: expectedJobStatus,
[QueueName.METADATA_EXTRACTION]: expectedJobStatus,
[QueueName.OBJECT_TAGGING]: expectedJobStatus,
[QueueName.SEARCH]: expectedJobStatus,
@ -171,7 +171,7 @@ describe(JobService.name, () => {
it('should handle a start clip encoding command', async () => {
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.CLIP_ENCODING, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.SMART_SEARCH, { command: JobCommand.START, force: false });
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_ENCODE_CLIP, data: { force: false } });
});
@ -232,7 +232,7 @@ describe(JobService.name, () => {
SystemConfigCore.create(newSystemConfigRepositoryMock(false)).config$.next({
job: {
[QueueName.BACKGROUND_TASK]: { concurrency: 10 },
[QueueName.CLIP_ENCODING]: { concurrency: 10 },
[QueueName.SMART_SEARCH]: { concurrency: 10 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 10 },
[QueueName.OBJECT_TAGGING]: { concurrency: 10 },
[QueueName.RECOGNIZE_FACES]: { concurrency: 10 },
@ -247,7 +247,7 @@ describe(JobService.name, () => {
} as SystemConfig);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.BACKGROUND_TASK, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.CLIP_ENCODING, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.SMART_SEARCH, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.OBJECT_TAGGING, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.RECOGNIZE_FACES, 10);
@ -367,7 +367,7 @@ describe(JobService.name, () => {
const featureTests: Array<{ queue: QueueName; feature: FeatureFlag; configKey: SystemConfigKey }> = [
{
queue: QueueName.CLIP_ENCODING,
queue: QueueName.SMART_SEARCH,
feature: FeatureFlag.CLIP_ENCODE,
configKey: SystemConfigKey.MACHINE_LEARNING_CLIP_ENABLED,
},

View File

@ -98,7 +98,7 @@ export class JobService {
await this.configCore.requireFeature(FeatureFlag.TAG_IMAGE);
return this.jobRepository.queue({ name: JobName.QUEUE_OBJECT_TAGGING, data: { force } });
case QueueName.CLIP_ENCODING:
case QueueName.SMART_SEARCH:
await this.configCore.requireFeature(FeatureFlag.CLIP_ENCODE);
return this.jobRepository.queue({ name: JobName.QUEUE_ENCODE_CLIP, data: { force } });

View File

@ -29,13 +29,13 @@ export class SmartInfoService {
}
async init() {
await this.jobRepository.pause(QueueName.CLIP_ENCODING);
await this.jobRepository.pause(QueueName.SMART_SEARCH);
let { isActive } = await this.jobRepository.getQueueStatus(QueueName.CLIP_ENCODING);
let { isActive } = await this.jobRepository.getQueueStatus(QueueName.SMART_SEARCH);
while (isActive) {
this.logger.verbose('Waiting for CLIP encoding queue to stop...');
await setTimeout(1000).then(async () => {
({ isActive } = await this.jobRepository.getQueueStatus(QueueName.CLIP_ENCODING));
({ isActive } = await this.jobRepository.getQueueStatus(QueueName.SMART_SEARCH));
});
}
@ -43,7 +43,7 @@ export class SmartInfoService {
await this.repository.init(machineLearning.clip.modelName);
await this.jobRepository.resume(QueueName.CLIP_ENCODING);
await this.jobRepository.resume(QueueName.SMART_SEARCH);
}
async handleQueueObjectTagging({ force }: IBaseJob) {

View File

@ -39,7 +39,7 @@ export class SystemConfigJobDto implements Record<QueueName, JobSettingsDto> {
@ValidateNested()
@IsObject()
@Type(() => JobSettingsDto)
[QueueName.CLIP_ENCODING]!: JobSettingsDto;
[QueueName.SMART_SEARCH]!: JobSettingsDto;
@ApiProperty({ type: JobSettingsDto })
@ValidateNested()

View File

@ -47,7 +47,7 @@ export const defaults = Object.freeze<SystemConfig>({
},
job: {
[QueueName.BACKGROUND_TASK]: { concurrency: 5 },
[QueueName.CLIP_ENCODING]: { concurrency: 2 },
[QueueName.SMART_SEARCH]: { concurrency: 2 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 5 },
[QueueName.OBJECT_TAGGING]: { concurrency: 2 },
[QueueName.RECOGNIZE_FACES]: { concurrency: 2 },

View File

@ -27,7 +27,7 @@ const updates: SystemConfigEntity[] = [
const updatedConfig = Object.freeze<SystemConfig>({
job: {
[QueueName.BACKGROUND_TASK]: { concurrency: 5 },
[QueueName.CLIP_ENCODING]: { concurrency: 2 },
[QueueName.SMART_SEARCH]: { concurrency: 2 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 5 },
[QueueName.OBJECT_TAGGING]: { concurrency: 2 },
[QueueName.RECOGNIZE_FACES]: { concurrency: 2 },

View File

@ -136,7 +136,7 @@ class ImmichApi {
[JobName.MetadataExtraction]: 'Extract Metadata',
[JobName.Sidecar]: 'Sidecar Metadata',
[JobName.ObjectTagging]: 'Tag Objects',
[JobName.ClipEncoding]: 'Encode Clip',
[JobName.SmartSearch]: 'Smart Search',
[JobName.RecognizeFaces]: 'Recognize Faces',
[JobName.VideoConversion]: 'Transcode Videos',
[JobName.StorageTemplateMigration]: 'Storage Template Migration',

View File

@ -355,12 +355,6 @@ export interface AllJobStatusResponseDto {
* @memberof AllJobStatusResponseDto
*/
'backgroundTask': JobStatusDto;
/**
*
* @type {JobStatusDto}
* @memberof AllJobStatusResponseDto
*/
'clipEncoding': JobStatusDto;
/**
*
* @type {JobStatusDto}
@ -403,6 +397,12 @@ export interface AllJobStatusResponseDto {
* @memberof AllJobStatusResponseDto
*/
'sidecar': JobStatusDto;
/**
*
* @type {JobStatusDto}
* @memberof AllJobStatusResponseDto
*/
'smartSearch': JobStatusDto;
/**
*
* @type {JobStatusDto}
@ -2017,7 +2017,7 @@ export const JobName = {
VideoConversion: 'videoConversion',
ObjectTagging: 'objectTagging',
RecognizeFaces: 'recognizeFaces',
ClipEncoding: 'clipEncoding',
SmartSearch: 'smartSearch',
BackgroundTask: 'backgroundTask',
StorageTemplateMigration: 'storageTemplateMigration',
Migration: 'migration',
@ -3785,12 +3785,6 @@ export interface SystemConfigJobDto {
* @memberof SystemConfigJobDto
*/
'backgroundTask': JobSettingsDto;
/**
*
* @type {JobSettingsDto}
* @memberof SystemConfigJobDto
*/
'clipEncoding': JobSettingsDto;
/**
*
* @type {JobSettingsDto}
@ -3833,6 +3827,12 @@ export interface SystemConfigJobDto {
* @memberof SystemConfigJobDto
*/
'sidecar': JobSettingsDto;
/**
*
* @type {JobSettingsDto}
* @memberof SystemConfigJobDto
*/
'smartSearch': JobSettingsDto;
/**
*
* @type {JobSettingsDto}

View File

@ -12,10 +12,10 @@
mdiFileJpgBox,
mdiFileXmlBox,
mdiFolderMove,
mdiImageSearch,
mdiLibraryShelves,
mdiTable,
mdiTagMultiple,
mdiVectorCircle,
mdiVideo,
} from '@mdi/js';
import ConfirmDialogue from '../../shared-components/confirm-dialogue.svelte';
@ -56,12 +56,12 @@
[JobName.ThumbnailGeneration]: {
icon: mdiFileJpgBox,
title: api.getJobName(JobName.ThumbnailGeneration),
subtitle: 'Regenerate JPEG and WebP thumbnails',
subtitle: 'Generate large, small and blurred thumbnails for each asset, as well as thumbnails for each person',
},
[JobName.MetadataExtraction]: {
icon: mdiTable,
title: api.getJobName(JobName.MetadataExtraction),
subtitle: 'Extract metadata information i.e. GPS, resolution...etc',
subtitle: 'Extract metadata information from each asset, such as GPS and resolution',
},
[JobName.Library]: {
icon: mdiLibraryShelves,
@ -81,26 +81,27 @@
[JobName.ObjectTagging]: {
icon: mdiTagMultiple,
title: api.getJobName(JobName.ObjectTagging),
subtitle: 'Run machine learning to tag objects\nNote that some assets may not have any objects detected',
subtitle:
'Run machine learning on assets to tag objects\nNote that some assets may not have any objects detected',
disabled: !$featureFlags.tagImage,
},
[JobName.ClipEncoding]: {
icon: mdiVectorCircle,
title: api.getJobName(JobName.ClipEncoding),
subtitle: 'Run machine learning to generate clip embeddings',
[JobName.SmartSearch]: {
icon: mdiImageSearch,
title: api.getJobName(JobName.SmartSearch),
subtitle: 'Run machine learning on assets to support smart search',
disabled: !$featureFlags.clipEncode,
},
[JobName.RecognizeFaces]: {
icon: mdiFaceRecognition,
title: api.getJobName(JobName.RecognizeFaces),
subtitle: 'Run machine learning to recognize faces',
subtitle: 'Run machine learning on assets to recognize faces',
handleCommand: handleFaceCommand,
disabled: !$featureFlags.facialRecognition,
},
[JobName.VideoConversion]: {
icon: mdiVideo,
title: api.getJobName(JobName.VideoConversion),
subtitle: 'Transcode videos not in the desired format',
subtitle: 'Transcode videos for wider compatibility with browsers and devices',
},
[JobName.StorageTemplateMigration]: {
icon: mdiFolderMove,

View File

@ -23,7 +23,7 @@
JobName.Library,
JobName.Sidecar,
JobName.ObjectTagging,
JobName.ClipEncoding,
JobName.SmartSearch,
JobName.RecognizeFaces,
JobName.VideoConversion,
JobName.StorageTemplateMigration,