1
0
mirror of https://github.com/immich-app/immich.git synced 2024-11-21 18:16:55 +02:00

Add readme for top level folder, clean up dead code

This commit is contained in:
Alex Tran 2022-02-03 15:27:31 -06:00
parent 85b83f9666
commit 613b4449a7
7 changed files with 90 additions and 60 deletions

33
README.md Normal file
View File

@ -0,0 +1,33 @@
# IMMICH
Self-hosted Photo backup solution directly from your mobile phone.
# Development
You can use docker compose for development, there are several services that compose Immich
1. The server
2. PostgreSQL
3. Redis
## Populate .env file
Navigate to `server` directory and run
```
cp .env.example .env
```
Then populate the value in there.
To start, run
```bash
docker-compose up ./server
```
To force rebuild node modules after installing new packages
```bash
docker-compose up --build -V ./server
```

View File

@ -29,6 +29,12 @@ COPY . .
RUN yarn build
# Clean up commands
RUN apt-get autoremove -y && apt-get clean && \
rm -rf /usr/local/src/*
RUN apt-get clean && \
rm -rf /var/lib/apt/lists/*
FROM ubuntu:20.04 as production
ARG DEBIAN_FRONTEND=noninteractive
@ -62,4 +68,11 @@ COPY . .
COPY --from=development /usr/src/app/dist ./dist
# Clean up commands
RUN apt-get autoremove -y && apt-get clean && \
rm -rf /usr/local/src/*
RUN apt-get clean && \
rm -rf /var/lib/apt/lists/*
CMD ["node", "dist/main"]

View File

@ -10,16 +10,4 @@ There is a tensorflow module running in the server so some package will be neede
$ apt-get install make cmake gcc g++
```
# Docker
To run application using docker compose
```bash
docker-compose up
```
To force rebuild node module after installing new packages
```bash
docker-compose up --build -V
```

View File

@ -6,14 +6,10 @@ export const immichAppConfig: ConfigModuleOptions = {
isGlobal: true,
validationSchema: Joi.object({
NODE_ENV: Joi.string().required().valid('development', 'production', 'staging').default('development'),
// DB_HOST: Joi.string().required(),
DB_USERNAME: Joi.string().required(),
DB_PASSWORD: Joi.string().required(),
DB_DATABASE_NAME: Joi.string().required(),
UPLOAD_LOCATION: Joi.string().required(),
JWT_SECRET: Joi.string().required(),
// REDIS_HOST: Joi.string().required(),
// REDIS_PORT: Joi.string().required(),
// REDIS_PASSWORD: Joi.string().required(),
}),
};

View File

@ -7,7 +7,7 @@ import { AssetService } from '../../api-v1/asset/asset.service';
import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
import { ImageOptimizeProcessor } from './image-optimize.processor';
import { ImageOptimizeService } from './image-optimize.service';
// import { MachineLearningProcessor } from './machine-learning.processor';
import { MachineLearningProcessor } from './machine-learning.processor';
@Module({
imports: [
@ -30,7 +30,7 @@ import { ImageOptimizeService } from './image-optimize.service';
TypeOrmModule.forFeature([AssetEntity]),
],
providers: [ImageOptimizeService, ImageOptimizeProcessor],
providers: [ImageOptimizeService, ImageOptimizeProcessor, MachineLearningProcessor],
exports: [ImageOptimizeService],
})
export class ImageOptimizeModule {}

View File

@ -45,13 +45,14 @@ export class ImageOptimizeProcessor {
await this.assetRepository.update(savedAsset, { resizePath: resizePath });
const jobb = await this.machineLearningQueue.add(
'object-detection',
{
resizePath,
},
{ jobId: randomUUID() },
);
// Send file to object detection after resizing
// const detectionJob = await this.machineLearningQueue.add(
// 'object-detection',
// {
// resizePath,
// },
// { jobId: randomUUID() },
// );
});
});

View File

@ -1,39 +1,38 @@
// import { Process, Processor } from '@nestjs/bull';
// import { InjectRepository } from '@nestjs/typeorm';
// import { Job } from 'bull';
// import { Repository } from 'typeorm';
// import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
// import sharp from 'sharp';
// import fs, { existsSync, mkdirSync } from 'fs';
// import { ConfigService } from '@nestjs/config';
// import * as tfnode from '@tensorflow/tfjs-node';
// import * as cocoSsd from '@tensorflow-models/coco-ssd';
import { Process, Processor } from '@nestjs/bull';
import { InjectRepository } from '@nestjs/typeorm';
import { Job } from 'bull';
import { Repository } from 'typeorm';
import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
import fs from 'fs';
import { ConfigService } from '@nestjs/config';
import * as tfnode from '@tensorflow/tfjs-node';
import * as cocoSsd from '@tensorflow-models/coco-ssd';
// @Processor('machine-learning')
// export class MachineLearningProcessor {
// constructor(
// @InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
// private configService: ConfigService,
// ) {}
@Processor('machine-learning')
export class MachineLearningProcessor {
constructor(
@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
private configService: ConfigService,
) {}
// @Process('object-detection')
// async handleOptimization(job: Job) {
// try {
// const { resizePath }: { resizePath: string } = job.data;
@Process('object-detection')
async handleOptimization(job: Job) {
try {
const { resizePath }: { resizePath: string } = job.data;
// const image = fs.readFileSync(resizePath);
// const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D;
// const model = await cocoSsd.load();
// const predictions = await model.detect(decodedImage);
// console.log('start predictions ------------------ ');
// for (var result of predictions) {
// console.log(`Found ${result.class} with score ${result.score}`);
// }
// console.log('end predictions ------------------ ');
const image = fs.readFileSync(resizePath);
const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D;
const model = await cocoSsd.load();
const predictions = await model.detect(decodedImage);
console.log('\n\nstart predictions ------------------ ');
for (var result of predictions) {
console.log(`Found ${result.class} with score ${result.score}`);
}
console.log('end predictions ------------------\n\n');
// return 'ok';
// } catch (e) {
// console.log('Error object detection ', e);
// }
// }
// }
return 'ok';
} catch (e) {
console.log('Error object detection ', e);
}
}
}