mirror of https://github.com/immich-app/immich.git
Add readme for top level folder, clean up dead code
parent
85b83f9666
commit
613b4449a7
@ -0,0 +1,33 @@
|
|||||||
|
# IMMICH
|
||||||
|
|
||||||
|
Self-hosted Photo backup solution directly from your mobile phone.
|
||||||
|
|
||||||
|
# Development
|
||||||
|
|
||||||
|
You can use docker compose for development, there are several services that compose Immich
|
||||||
|
|
||||||
|
1. The server
|
||||||
|
2. PostgreSQL
|
||||||
|
3. Redis
|
||||||
|
|
||||||
|
## Populate .env file
|
||||||
|
|
||||||
|
Navigate to `server` directory and run
|
||||||
|
|
||||||
|
```
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
|
||||||
|
Then populate the value in there.
|
||||||
|
|
||||||
|
To start, run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up ./server
|
||||||
|
```
|
||||||
|
|
||||||
|
To force rebuild node modules after installing new packages
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up --build -V ./server
|
||||||
|
```
|
||||||
@ -1,39 +1,38 @@
|
|||||||
// import { Process, Processor } from '@nestjs/bull';
|
import { Process, Processor } from '@nestjs/bull';
|
||||||
// import { InjectRepository } from '@nestjs/typeorm';
|
import { InjectRepository } from '@nestjs/typeorm';
|
||||||
// import { Job } from 'bull';
|
import { Job } from 'bull';
|
||||||
// import { Repository } from 'typeorm';
|
import { Repository } from 'typeorm';
|
||||||
// import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
|
import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
|
||||||
// import sharp from 'sharp';
|
import fs from 'fs';
|
||||||
// import fs, { existsSync, mkdirSync } from 'fs';
|
import { ConfigService } from '@nestjs/config';
|
||||||
// import { ConfigService } from '@nestjs/config';
|
import * as tfnode from '@tensorflow/tfjs-node';
|
||||||
// import * as tfnode from '@tensorflow/tfjs-node';
|
import * as cocoSsd from '@tensorflow-models/coco-ssd';
|
||||||
// import * as cocoSsd from '@tensorflow-models/coco-ssd';
|
|
||||||
|
|
||||||
// @Processor('machine-learning')
|
@Processor('machine-learning')
|
||||||
// export class MachineLearningProcessor {
|
export class MachineLearningProcessor {
|
||||||
// constructor(
|
constructor(
|
||||||
// @InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
|
@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
|
||||||
// private configService: ConfigService,
|
private configService: ConfigService,
|
||||||
// ) {}
|
) {}
|
||||||
|
|
||||||
// @Process('object-detection')
|
@Process('object-detection')
|
||||||
// async handleOptimization(job: Job) {
|
async handleOptimization(job: Job) {
|
||||||
// try {
|
try {
|
||||||
// const { resizePath }: { resizePath: string } = job.data;
|
const { resizePath }: { resizePath: string } = job.data;
|
||||||
|
|
||||||
// const image = fs.readFileSync(resizePath);
|
const image = fs.readFileSync(resizePath);
|
||||||
// const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D;
|
const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D;
|
||||||
// const model = await cocoSsd.load();
|
const model = await cocoSsd.load();
|
||||||
// const predictions = await model.detect(decodedImage);
|
const predictions = await model.detect(decodedImage);
|
||||||
// console.log('start predictions ------------------ ');
|
console.log('\n\nstart predictions ------------------ ');
|
||||||
// for (var result of predictions) {
|
for (var result of predictions) {
|
||||||
// console.log(`Found ${result.class} with score ${result.score}`);
|
console.log(`Found ${result.class} with score ${result.score}`);
|
||||||
// }
|
}
|
||||||
// console.log('end predictions ------------------ ');
|
console.log('end predictions ------------------\n\n');
|
||||||
|
|
||||||
// return 'ok';
|
return 'ok';
|
||||||
// } catch (e) {
|
} catch (e) {
|
||||||
// console.log('Error object detection ', e);
|
console.log('Error object detection ', e);
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue