You've already forked pigallery2
							
							
				mirror of
				https://github.com/bpatrik/pigallery2.git
				synced 2025-10-30 23:57:43 +02:00 
			
		
		
		
	Implementing .saved_searches.pg2conf saving tests
This commit is contained in:
		| @@ -51,6 +51,10 @@ export class IndexingManager implements IIndexingManager { | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   /** | ||||
|    * Indexes a dir, but returns early with the scanned version, | ||||
|    * does not wait for the DB to be saved | ||||
|    */ | ||||
|   public indexDirectory(relativeDirectoryName: string): Promise<DirectoryDTO> { | ||||
|     return new Promise(async (resolve, reject): Promise<void> => { | ||||
|       try { | ||||
| @@ -62,14 +66,12 @@ export class IndexingManager implements IIndexingManager { | ||||
|         } | ||||
|         scannedDirectory.media.forEach((p): any[] => p.readyThumbnails = []); | ||||
|  | ||||
|         // filter server side pg2conf | ||||
|         const serverSideConfs = scannedDirectory.metaFile.filter(m => ServerPG2ConfMap[m.name]); | ||||
|         scannedDirectory.metaFile = scannedDirectory.metaFile.filter(m => !ServerPG2ConfMap[m.name]); | ||||
|  | ||||
|         resolve(scannedDirectory); | ||||
|         const dirClone = Utils.shallowClone(scannedDirectory); | ||||
|         // filter server side only config from returning | ||||
|         dirClone.metaFile = dirClone.metaFile.filter(m => !ServerPG2ConfMap[m.name]); | ||||
|  | ||||
|         // process server side pg2conf | ||||
|         await IndexingManager.processServerSidePG2Conf(serverSideConfs); | ||||
|         resolve(dirClone); | ||||
|  | ||||
|         // save directory to DB | ||||
|         this.queueForSave(scannedDirectory).catch(console.error); | ||||
| @@ -94,7 +96,11 @@ export class IndexingManager implements IIndexingManager { | ||||
|   } | ||||
|  | ||||
|   // Todo fix it, once typeorm support connection pools for sqlite | ||||
|   /** | ||||
|    * Queues up a directory to save to the DB. | ||||
|    */ | ||||
|   protected async queueForSave(scannedDirectory: DirectoryDTO): Promise<void> { | ||||
|     // Is this dir  already queued for saving? | ||||
|     if (this.savingQueue.findIndex((dir): boolean => dir.name === scannedDirectory.name && | ||||
|       dir.path === scannedDirectory.path && | ||||
|       dir.lastModified === scannedDirectory.lastModified && | ||||
| @@ -357,12 +363,15 @@ export class IndexingManager implements IIndexingManager { | ||||
|     this.isSaving = true; | ||||
|     try { | ||||
|       const connection = await SQLConnection.getConnection(); | ||||
|       const serverSideConfigs = scannedDirectory.metaFile.filter(m => ServerPG2ConfMap[m.name]); | ||||
|       scannedDirectory.metaFile = scannedDirectory.metaFile.filter(m => !ServerPG2ConfMap[m.name]); | ||||
|       const currentDirId: number = await this.saveParentDir(connection, scannedDirectory); | ||||
|       await this.saveChildDirs(connection, currentDirId, scannedDirectory); | ||||
|       await this.saveMedia(connection, currentDirId, scannedDirectory.media); | ||||
|       await this.saveMetaFiles(connection, currentDirId, scannedDirectory); | ||||
|       await ObjectManagers.getInstance().PersonManager.onGalleryIndexUpdate(); | ||||
|       await ObjectManagers.getInstance().VersionManager.updateDataVersion(); | ||||
|       await IndexingManager.processServerSidePG2Conf(serverSideConfigs); | ||||
|     } catch (e) { | ||||
|       throw e; | ||||
|     } finally { | ||||
|   | ||||
| @@ -22,6 +22,7 @@ import {DatabaseType, ServerDataBaseConfig, SQLLogLevel} from '../../../../commo | ||||
| import {AlbumBaseEntity} from './enitites/album/AlbumBaseEntity'; | ||||
| import {SavedSearchEntity} from './enitites/album/SavedSearchEntity'; | ||||
|  | ||||
| const LOG_TAG = '[SQLConnection]'; | ||||
|  | ||||
| export class SQLConnection { | ||||
|  | ||||
| @@ -53,7 +54,7 @@ export class SQLConnection { | ||||
|       if (Config.Server.Log.sqlLevel !== SQLLogLevel.none) { | ||||
|         options.logging = SQLLogLevel[Config.Server.Log.sqlLevel]; | ||||
|       } | ||||
|  | ||||
|       Logger.debug(LOG_TAG, 'Creating connection: ' + DatabaseType[Config.Server.Database.type]); | ||||
|       this.connection = await this.createConnection(options); | ||||
|       await SQLConnection.schemeSync(this.connection); | ||||
|     } | ||||
| @@ -131,7 +132,7 @@ export class SQLConnection { | ||||
|       return await createConnection(options); | ||||
|     } catch (e) { | ||||
|       if (e.sqlMessage === 'Unknown database \'' + options.database + '\'') { | ||||
|         Logger.debug('creating database: ' + options.database); | ||||
|         Logger.debug(LOG_TAG, 'creating database: ' + options.database); | ||||
|         const tmpOption = Utils.clone(options); | ||||
|         // @ts-ignore | ||||
|         delete tmpOption.database; | ||||
| @@ -153,7 +154,7 @@ export class SQLConnection { | ||||
|     if (version && version.version === DataStructureVersion) { | ||||
|       return; | ||||
|     } | ||||
|     Logger.info('Updating database scheme'); | ||||
|     Logger.info(LOG_TAG, 'Updating database scheme'); | ||||
|     if (!version) { | ||||
|       version = new VersionEntity(); | ||||
|     } | ||||
| @@ -173,7 +174,7 @@ export class SQLConnection { | ||||
|       await connection.dropDatabase(); | ||||
|       await connection.synchronize(); | ||||
|       await connection.getRepository(VersionEntity).save(version); | ||||
|       Logger.warn('Could not move users to the new db scheme, deleting them. Details:' + e.toString()); | ||||
|       Logger.warn(LOG_TAG, 'Could not move users to the new db scheme, deleting them. Details:' + e.toString()); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   | ||||
| @@ -46,6 +46,14 @@ export class Utils { | ||||
|     return JSON.parse(JSON.stringify(object)); | ||||
|   } | ||||
|  | ||||
|   static shallowClone<T>(object: T): T { | ||||
|     const c: any = {}; | ||||
|     for (const e of Object.entries(object)) { | ||||
|       c[e[0]] = [1]; | ||||
|     } | ||||
|     return c; | ||||
|   } | ||||
|  | ||||
|   static zeroPrefix(value: string | number, length: number): string { | ||||
|     const ret = '00000' + value; | ||||
|     return ret.substr(ret.length - length); | ||||
|   | ||||
| @@ -299,7 +299,7 @@ export class ServerPhotoConfig { | ||||
| export class ServerMediaConfig { | ||||
|   @ConfigProperty({description: 'Images are loaded from this folder (read permission required)'}) | ||||
|   folder: string = 'demo/images'; | ||||
|   @ConfigProperty({description: 'Thumbnails, coverted photos, videos will be stored here (write permission required)'}) | ||||
|   @ConfigProperty({description: 'Thumbnails, converted photos, videos will be stored here (write permission required)'}) | ||||
|   tempFolder: string = 'demo/tmp'; | ||||
|   @ConfigProperty() | ||||
|   Video: ServerVideoConfig = new ServerVideoConfig(); | ||||
|   | ||||
| @@ -96,7 +96,6 @@ export class GallerySearchComponent implements OnDestroy { | ||||
|  | ||||
|  | ||||
|   onQueryChange(): void { | ||||
|     console.log('cahnge', this.searchQueryDTO); | ||||
|     this.rawSearchText = this.searchQueryParserService.stringify(this.searchQueryDTO); | ||||
|     // this.validateRawSearchText(); | ||||
|   } | ||||
|   | ||||
							
								
								
									
										10
									
								
								test/backend/assets/.saved_searches.pg2conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								test/backend/assets/.saved_searches.pg2conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| [ | ||||
| { | ||||
|   "name": "Alvin", | ||||
|   "searchQuery": { | ||||
|     "type": 105, | ||||
|     "text": "Alvin", | ||||
|     "matchType": 2 | ||||
|   } | ||||
| } | ||||
| ] | ||||
| @@ -11,11 +11,14 @@ import {MediaDTO} from '../../../../../src/common/entities/MediaDTO'; | ||||
| import {FileDTO} from '../../../../../src/common/entities/FileDTO'; | ||||
| import {IndexingManager} from '../../../../../src/backend/model/database/sql/IndexingManager'; | ||||
| import {ObjectManagers} from '../../../../../src/backend/model/ObjectManagers'; | ||||
| import {PersonManager} from '../../../../../src/backend/model/database/sql/PersonManager'; | ||||
| import {DBTestHelper} from '../../../DBTestHelper'; | ||||
| import {VersionManager} from '../../../../../src/backend/model/database/sql/VersionManager'; | ||||
| import {DiskMangerWorker} from '../../../../../src/backend/model/threading/DiskMangerWorker'; | ||||
| import {ReIndexingSensitivity} from '../../../../../src/common/config/private/PrivateConfig'; | ||||
| import {AlbumManager} from '../../../../../src/backend/model/database/sql/AlbumManager'; | ||||
| import {SearchQueryTypes, TextSearch, TextSearchQueryMatchTypes} from '../../../../../src/common/entities/SearchQueryDTO'; | ||||
| import {ProjectPath} from '../../../../../src/backend/ProjectPath'; | ||||
| import * as path from 'path'; | ||||
| import {DiskManager} from '../../../../../src/backend/model/DiskManger'; | ||||
| 
 | ||||
| const deepEqualInAnyOrder = require('deep-equal-in-any-order'); | ||||
| const chai = require('chai'); | ||||
| @@ -59,12 +62,13 @@ describe('IndexingManager', (sqlHelper: DBTestHelper) => { | ||||
| 
 | ||||
|   beforeEach(async () => { | ||||
|     await sqlHelper.initDB(); | ||||
|     ObjectManagers.getInstance().PersonManager = new PersonManager(); | ||||
|     ObjectManagers.getInstance().VersionManager = new VersionManager(); | ||||
|     //  ObjectManagers.getInstance().PersonManager = new PersonManager();
 | ||||
|     // ObjectManagers.getInstance().VersionManager = new VersionManager();
 | ||||
|   }); | ||||
| 
 | ||||
| 
 | ||||
|   after(async () => { | ||||
|   afterEach(async () => { | ||||
|     Config.loadSync(); | ||||
|     await sqlHelper.clearDB(); | ||||
|   }); | ||||
| 
 | ||||
| @@ -472,7 +476,6 @@ describe('IndexingManager', (sqlHelper: DBTestHelper) => { | ||||
|       .to.deep.equalInAnyOrder(Utils.clone(Utils.removeNullOrEmptyObj(parent))); | ||||
|   }); | ||||
| 
 | ||||
| 
 | ||||
|   it('should reset DB', async () => { | ||||
|     const gm = new GalleryManagerTest(); | ||||
|     const im = new IndexingManagerTest(); | ||||
| @@ -574,4 +577,34 @@ describe('IndexingManager', (sqlHelper: DBTestHelper) => { | ||||
|     }); | ||||
|   }); | ||||
| 
 | ||||
| 
 | ||||
|   DBTestHelper.savedDescribe('should index .pg2conf', () => { | ||||
| 
 | ||||
| 
 | ||||
|     it('.saved_searches.pg2conf', async () => { | ||||
|       Config.Server.Threading.enabled = false; | ||||
| 
 | ||||
|       Config.Server.Media.folder = path.join(__dirname, '/../../../assets'); | ||||
|       ProjectPath.ImageFolder = path.join(__dirname, '/../../../assets'); | ||||
|       const im = new IndexingManagerTest(); | ||||
|       const am = new AlbumManager(); | ||||
|       const dir = await DiskManager.scanDirectory('/'); | ||||
|       await im.saveToDB(dir); | ||||
|       const albums = await am.getAlbums(); | ||||
|       expect(albums[0].preview).to.be.an('object'); | ||||
|       delete albums[0].preview; | ||||
|       expect(albums).to.be.equalInAnyOrder([ | ||||
|         { | ||||
|           id: 1, | ||||
|           name: 'Alvin', | ||||
|           locked: true, | ||||
|           searchQuery: { | ||||
|             type: SearchQueryTypes.person, | ||||
|             text: 'Alvin', | ||||
|             matchType: TextSearchQueryMatchTypes.like | ||||
|           } as TextSearch | ||||
|         } | ||||
|       ]); | ||||
|     }); | ||||
|   }); | ||||
| }); | ||||
		Reference in New Issue
	
	Block a user