1
0
mirror of https://github.com/immich-app/immich.git synced 2024-12-22 01:47:08 +02:00

feat(machine-learning)!: move machine learning to Python based image (#1774)

BREAKING CHANGES
* Users have to update the docker-compose file, machine-learning portion.
* Temporary dropping machine-learning support for Arm64 and Armv7
This commit is contained in:
Alex 2023-02-18 09:13:37 -06:00 committed by GitHub
parent 8c315dfeb1
commit 57136e48fb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 92 additions and 16849 deletions

View File

@ -16,6 +16,7 @@ jobs:
# Prevent a failure in one image from stopping the other builds
fail-fast: false
matrix:
platforms: ["linux/arm/v7,linux/amd64,linux/arm64"]
include:
- context: "server"
image: "immich-server"
@ -23,6 +24,7 @@ jobs:
image: "immich-web"
- context: "machine-learning"
image: "immich-machine-learning"
platforms: "linux/amd64"
- context: "nginx"
image: "immich-proxy"
@ -92,7 +94,7 @@ jobs:
uses: docker/build-push-action@v4.0.0
with:
context: ${{ matrix.context }}
platforms: linux/arm/v7,linux/amd64,linux/arm64
platforms: ${{ matrix.platform }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{matrix.image}}

View File

@ -30,18 +30,20 @@ services:
build:
context: ../machine-learning
dockerfile: Dockerfile
target: builder
command: npm run start:dev
command: python main.py
ports:
- 3003:3003
volumes:
- ../machine-learning:/usr/src/app
- ../machine-learning/src:/usr/src/app
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /usr/src/app/node_modules
- model-cache:/cache
env_file:
- .env
environment:
- NODE_ENV=development
depends_on:
- database
restart: always
immich-microservices:
container_name: immich_microservices
@ -126,3 +128,4 @@ services:
volumes:
pgdata:
model-cache:

View File

@ -1,95 +0,0 @@
version: "3.8"
services:
immich-server:
container_name: immich_server
image: altran1502/immich-server:staging
entrypoint: ["/bin/sh", "./start-server.sh"]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
env_file:
- .env
environment:
- NODE_ENV=production
depends_on:
- redis
- database
restart: always
immich-microservices:
container_name: immich_microservices
image: altran1502/immich-server:staging
entrypoint: ["/bin/sh", "./start-microservices.sh"]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
env_file:
- .env
environment:
- NODE_ENV=production
depends_on:
- redis
- database
restart: always
immich-machine-learning:
container_name: immich_machine_learning
image: altran1502/immich-machine-learning:staging
entrypoint: ["/bin/sh", "./entrypoint.sh"]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
env_file:
- .env
environment:
- NODE_ENV=production
depends_on:
- database
restart: always
immich-web:
container_name: immich_web
image: altran1502/immich-web:staging
entrypoint: ["/bin/sh", "./entrypoint.sh"]
env_file:
- .env
environment:
# Rename these values for svelte public interface
- PUBLIC_IMMICH_SERVER_URL=${IMMICH_SERVER_URL}
- PUBLIC_IMMICH_API_URL_EXTERNAL=${IMMICH_API_URL_EXTERNAL}
restart: always
redis:
container_name: immich_redis
image: redis:6.2
restart: always
database:
container_name: immich_postgres
image: postgres:14
env_file:
- .env
environment:
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME}
PG_DATA: /var/lib/postgresql/data
volumes:
- pgdata:/var/lib/postgresql/data
restart: always
immich-proxy:
container_name: immich_proxy
image: altran1502/immich-proxy:staging
environment:
# Make sure these values get passed through from the env file
- IMMICH_SERVER_URL
- IMMICH_WEB_URL
ports:
- 2283:8080
logging:
driver: none
depends_on:
- immich-server
restart: always
volumes:
pgdata:

View File

@ -4,7 +4,7 @@ services:
immich-server:
container_name: immich_server
image: altran1502/immich-server:release
entrypoint: ["/bin/sh", "./start-server.sh"]
entrypoint: [ "/bin/sh", "./start-server.sh" ]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
env_file:
@ -19,7 +19,7 @@ services:
immich-microservices:
container_name: immich_microservices
image: altran1502/immich-server:release
entrypoint: ["/bin/sh", "./start-microservices.sh"]
entrypoint: [ "/bin/sh", "./start-microservices.sh" ]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
env_file:
@ -34,9 +34,10 @@ services:
immich-machine-learning:
container_name: immich_machine_learning
image: altran1502/immich-machine-learning:release
entrypoint: ["/bin/sh", "./entrypoint.sh"]
command: [ "python", "src/main.py" ]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- model-cache:/cache
env_file:
- .env
environment:
@ -48,7 +49,7 @@ services:
immich-web:
container_name: immich_web
image: altran1502/immich-web:release
entrypoint: ["/bin/sh", "./entrypoint.sh"]
entrypoint: [ "/bin/sh", "./entrypoint.sh" ]
env_file:
- .env
restart: always
@ -89,3 +90,4 @@ services:
volumes:
pgdata:
model-cache:

View File

@ -1,4 +1 @@
node_modules/
upload/
dist/
venv/

View File

@ -1,24 +0,0 @@
module.exports = {
parser: '@typescript-eslint/parser',
parserOptions: {
project: 'tsconfig.json',
sourceType: 'module',
},
plugins: ['@typescript-eslint/eslint-plugin'],
extends: [
'plugin:@typescript-eslint/recommended',
'plugin:prettier/recommended',
],
root: true,
env: {
node: true,
jest: true,
},
ignorePatterns: ['.eslintrc.js'],
rules: {
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
},
};

View File

@ -1,37 +1,4 @@
# compiled output
/dist
/node_modules
# Logs
logs
*.log
npm-debug.log*
pnpm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# OS
.DS_Store
# Tests
/coverage
/.nyc_output
# IDEs and editors
/.idea
.project
.classpath
.c9/
*.launch
.settings/
*.sublime-workspace
# IDE - VSCode
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
upload/
venv/
__pycache__/
model-cache/

View File

@ -1,4 +0,0 @@
{
"singleQuote": true,
"trailingComma": "all"
}

View File

@ -1,34 +1,11 @@
FROM node:16-bullseye-slim as builder
FROM python:3.10
ARG DEBIAN_FRONTEND=noninteractive
ENV TRANSFORMERS_CACHE=/cache
WORKDIR /usr/src/app
RUN apt-get update
RUN apt-get install gcc g++ make cmake python3 python3-pip -y
COPY package.json package-lock.json ./
RUN npm ci
RUN npm rebuild @tensorflow/tfjs-node --build-from-source
RUN pip install --user --no-cache-dir torch==1.13.1+cpu -f https://download.pytorch.org/whl/torch_stable.html
RUN pip install --user transformers tqdm numpy scikit-learn scipy nltk sentencepiece flask Pillow
RUN pip install --user --no-deps sentence-transformers
COPY . .
FROM builder as prod
RUN npm run build
RUN npm prune --omit=dev
FROM node:16-bullseye-slim
ARG DEBIAN_FRONTEND=noninteractive
WORKDIR /usr/src/app
COPY --from=prod /usr/src/app/node_modules ./node_modules
COPY --from=prod /usr/src/app/dist ./dist
COPY package.json package-lock.json ./
COPY entrypoint.sh ./
# CMD [ "node", "dist/main" ]

View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022 Hau Tran
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,4 +1,5 @@
# Microservices for Immich
# Immich Machine Learning
## Image Classifier
- Object Detection
- Image Classification

View File

@ -1,4 +0,0 @@
#! /bin/sh
# npm run typeorm migration:run
# npm run start:prod
exec node dist/main.js

View File

@ -1,4 +0,0 @@
{
"collection": "@nestjs/schematics",
"sourceRoot": "src"
}

File diff suppressed because it is too large Load Diff

View File

@ -1,71 +0,0 @@
{
"name": "nest_microservices",
"version": "0.0.1",
"description": "",
"author": "",
"private": true,
"license": "UNLICENSED",
"scripts": {
"prebuild": "rimraf dist",
"build": "nest build",
"format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"",
"start": "nest start",
"start:dev": "nest start --watch",
"start:debug": "nest start --debug --watch",
"start:prod": "node dist/main",
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix",
"test": "jest",
"test:watch": "jest --watch",
"test:cov": "jest --coverage",
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
"test:e2e": "jest --config ./test/jest-e2e.json"
},
"dependencies": {
"@nestjs/common": "^8.0.0",
"@nestjs/core": "^8.0.0",
"@tensorflow-models/coco-ssd": "^2.2.2",
"@tensorflow-models/mobilenet": "^2.1.0",
"@tensorflow/tfjs-node": "^3.19.0"
},
"devDependencies": {
"@nestjs/cli": "^8.2.4",
"@nestjs/schematics": "^8.0.0",
"@nestjs/testing": "^8.0.0",
"@types/express": "^4.17.13",
"@types/jest": "27.4.1",
"@types/node": "^16.0.0",
"@types/supertest": "^2.0.11",
"@typescript-eslint/eslint-plugin": "^5.0.0",
"@typescript-eslint/parser": "^5.0.0",
"eslint": "^8.0.1",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^4.0.0",
"jest": "^27.2.5",
"prettier": "^2.3.2",
"rimraf": "^3.0.2",
"source-map-support": "^0.5.20",
"supertest": "^6.1.3",
"ts-jest": "^27.0.3",
"ts-loader": "^9.2.3",
"ts-node": "^10.0.0",
"tsconfig-paths": "^3.10.1",
"typescript": "^4.3.5"
},
"jest": {
"moduleFileExtensions": [
"js",
"json",
"ts"
],
"rootDir": "src",
"testRegex": ".*\\.spec\\.ts$",
"transform": {
"^.+\\.(t|j)s$": "ts-jest"
},
"collectCoverageFrom": [
"**/*.(t|j)s"
],
"coverageDirectory": "../coverage",
"testEnvironment": "node"
}
}

View File

@ -1,10 +0,0 @@
import { Module } from '@nestjs/common';
import { ImageClassifierModule } from './image-classifier/image-classifier.module';
import { ObjectDetectionModule } from './object-detection/object-detection.module';
@Module({
imports: [ImageClassifierModule, ObjectDetectionModule],
controllers: [],
providers: [],
})
export class AppModule {}

View File

@ -1,14 +0,0 @@
import { Body, Controller, Post } from '@nestjs/common';
import { ImageClassifierService } from './image-classifier.service';
@Controller('image-classifier')
export class ImageClassifierController {
constructor(
private readonly imageClassifierService: ImageClassifierService,
) { }
@Post('/tag-image')
async tagImage(@Body('thumbnailPath') thumbnailPath: string) {
return await this.imageClassifierService.tagImage(thumbnailPath);
}
}

View File

@ -1,9 +0,0 @@
import { Module } from '@nestjs/common';
import { ImageClassifierService } from './image-classifier.service';
import { ImageClassifierController } from './image-classifier.controller';
@Module({
controllers: [ImageClassifierController],
providers: [ImageClassifierService],
})
export class ImageClassifierModule {}

View File

@ -1,49 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import * as mobilenet from '@tensorflow-models/mobilenet';
import * as cocoSsd from '@tensorflow-models/coco-ssd';
import * as tf from '@tensorflow/tfjs-node';
import * as fs from 'fs';
@Injectable()
export class ImageClassifierService {
private readonly MOBILENET_VERSION = 2;
private readonly MOBILENET_ALPHA = 1.0;
private mobileNetModel: mobilenet.MobileNet;
constructor() {
Logger.log(
`Running Node TensorFlow Version : ${tf.version['tfjs']}`,
'ImageClassifier',
);
mobilenet
.load({
version: this.MOBILENET_VERSION,
alpha: this.MOBILENET_ALPHA,
})
.then((mobilenetModel) => (this.mobileNetModel = mobilenetModel));
}
async tagImage(thumbnailPath: string) {
try {
const isExist = fs.existsSync(thumbnailPath);
if (isExist) {
const tags = [];
const image = fs.readFileSync(thumbnailPath);
const decodedImage = tf.node.decodeImage(image, 3) as tf.Tensor3D;
const predictions = await this.mobileNetModel.classify(decodedImage);
for (const prediction of predictions) {
if (prediction.probability >= 0.1) {
tags.push(...prediction.className.split(',').map((e) => e.trim()));
}
}
tf.dispose(decodedImage);
return tags;
}
} catch (e) {
console.log('Error reading file ', e);
}
}
}

View File

@ -0,0 +1,61 @@
import os
from flask import Flask, request
from transformers import pipeline
server = Flask(__name__)
classifier = pipeline(
task="image-classification",
model="microsoft/resnet-50"
)
detector = pipeline(
task="object-detection",
model="hustvl/yolos-tiny"
)
# Environment resolver
is_dev = os.getenv('NODE_ENV') == 'development'
server_port = os.getenv('MACHINE_LEARNING_PORT') or 3003
@server.route("/ping")
def ping():
return "pong"
@server.route("/object-detection/detect-object", methods=['POST'])
def object_detection():
assetPath = request.json['thumbnailPath']
return run_engine(detector, assetPath), 201
@server.route("/image-classifier/tag-image", methods=['POST'])
def image_classification():
assetPath = request.json['thumbnailPath']
return run_engine(classifier, assetPath), 201
def run_engine(engine, path):
result = []
predictions = engine(path)
for index, pred in enumerate(predictions):
tags = pred['label'].split(', ')
if (index == 0):
result = tags
else:
if (pred['score'] > 0.5):
result = [*result, *tags]
if (len(result) > 1):
result = list(set(result))
return result
if __name__ == "__main__":
server.run(debug=is_dev, host='0.0.0.0', port=server_port)

View File

@ -1,27 +0,0 @@
import { NestFactory } from '@nestjs/core';
import { AppModule } from './app.module';
import { Logger } from '@nestjs/common';
async function bootstrap() {
const app = await NestFactory.create(AppModule);
const port = Number(process.env.MACHINE_LEARNING_PORT) || 3003;
await app.listen(port, () => {
if (process.env.NODE_ENV == 'development') {
Logger.log(
'Running Immich Machine Learning in DEVELOPMENT environment',
'IMMICH MICROSERVICES',
);
}
if (process.env.NODE_ENV == 'production') {
Logger.log(
'Running Immich Machine Learning in PRODUCTION environment',
'IMMICH MICROSERVICES',
);
}
});
}
bootstrap();

View File

@ -1,15 +0,0 @@
import { Body, Controller, Post } from '@nestjs/common';
import { ObjectDetectionService } from './object-detection.service';
import { Logger } from '@nestjs/common';
@Controller('object-detection')
export class ObjectDetectionController {
constructor(
private readonly objectDetectionService: ObjectDetectionService,
) { }
@Post('/detect-object')
async detectObject(@Body('thumbnailPath') thumbnailPath: string) {
return await this.objectDetectionService.detectObject(thumbnailPath);
}
}

View File

@ -1,9 +0,0 @@
import { Module } from '@nestjs/common';
import { ObjectDetectionService } from './object-detection.service';
import { ObjectDetectionController } from './object-detection.controller';
@Module({
controllers: [ObjectDetectionController],
providers: [ObjectDetectionService],
})
export class ObjectDetectionModule {}

View File

@ -1,39 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import * as cocoSsd from '@tensorflow-models/coco-ssd';
import * as tf from '@tensorflow/tfjs-node';
import * as fs from 'fs';
@Injectable()
export class ObjectDetectionService {
private cocoSsdModel: cocoSsd.ObjectDetection;
constructor() {
Logger.log(
`Running Node TensorFlow Version : ${tf.version['tfjs']}`,
'ObjectDetection',
);
cocoSsd.load().then((model) => (this.cocoSsdModel = model));
}
async detectObject(thumbnailPath: string) {
try {
const isExist = fs.existsSync(thumbnailPath);
if (isExist) {
const tags = new Set();
const image = fs.readFileSync(thumbnailPath);
const decodedImage = tf.node.decodeImage(image, 3) as tf.Tensor3D;
const predictions = await this.cocoSsdModel.detect(decodedImage);
for (const result of predictions) {
if (result.score > 0.5) {
tags.add(result.class);
}
}
tf.dispose(decodedImage);
return [...tags];
}
} catch (e) {
console.log('Error reading file ', e);
}
}
}

View File

@ -1,4 +0,0 @@
{
"extends": "./tsconfig.json",
"exclude": ["node_modules", "test", "dist", "**/*spec.ts"]
}

View File

@ -1,21 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"declaration": true,
"removeComments": true,
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"allowSyntheticDefaultImports": true,
"target": "es2017",
"sourceMap": true,
"outDir": "./dist",
"baseUrl": "./",
"incremental": true,
"skipLibCheck": true,
"strictNullChecks": false,
"noImplicitAny": false,
"strictBindCallApply": false,
"forceConsistentCasingInFileNames": false,
"noFallthroughCasesInSwitch": false
}
}

View File

@ -33,7 +33,6 @@ export class MachineLearningProcessor {
const smartInfo = new SmartInfoEntity();
smartInfo.assetId = asset.id;
smartInfo.tags = [...res.data];
await this.smartInfoRepository.upsert(smartInfo, {
conflictPaths: ['assetId'],
});