From 201f5b41a1865c1b409ad17e658aea82b37f4c1f Mon Sep 17 00:00:00 2001 From: Ravinou Date: Sat, 5 Apr 2025 11:35:26 +0200 Subject: [PATCH] =?UTF-8?q?feat:=20=E2=9C=A8=20new=20config=20service=20wi?= =?UTF-8?q?th=20lowdb=20and=20mutex?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- services/config.service.ts | 73 +++++++++++++++++++++++++++++++++++++ services/history.service.ts | 36 ++++++++++++++++++ 2 files changed, 109 insertions(+) create mode 100644 services/config.service.ts create mode 100644 services/history.service.ts diff --git a/services/config.service.ts b/services/config.service.ts new file mode 100644 index 0000000..f1f969d --- /dev/null +++ b/services/config.service.ts @@ -0,0 +1,73 @@ +// dbHelper.ts +import { Low } from 'lowdb'; +import { JSONFile } from 'lowdb/node'; +import path from 'path'; +import { Mutex } from 'async-mutex'; +import { BorgWarehouseUser, Repository } from '~/types/domain/config.types'; +import repoHistory from './history.service'; + +const jsonDirectory = path.join(process.cwd(), '/config'); +const usersDbPath = path.join(jsonDirectory, 'users.json'); +const repoDbPath = path.join(jsonDirectory, 'repo.json'); + +// Lowdb config +const usersAdapter = new JSONFile(usersDbPath); +const usersDb = new Low(usersAdapter, []); + +const repoAdapter = new JSONFile(repoDbPath); +const repoDb = new Low(repoAdapter, []); + +// Mutexes for concurrent access +const usersMutex = new Mutex(); +const repoMutex = new Mutex(); + +// User management +export const getUsersList = async (): Promise => { + try { + await usersMutex.runExclusive(async () => { + await usersDb.read(); + }); + return usersDb.data; + } catch (error) { + console.log('Error reading users.json:', error); + return []; + } +}; + +export const updateUsersList = async (usersList: BorgWarehouseUser[]): Promise => { + try { + await usersMutex.runExclusive(async () => { + usersDb.data = usersList; + await usersDb.write(); + }); + } catch (error) { + console.log('Error writing users.json:', error); + } +}; + +// Repository management +export const getRepoList = async (): Promise => { + try { + await repoMutex.runExclusive(async () => { + await repoDb.read(); + }); + return repoDb.data; + } catch (error) { + console.log('Error reading repo.json:', error); + return []; + } +}; + +export const updateRepoList = async (repoList: Repository[], history = false): Promise => { + try { + await repoMutex.runExclusive(async () => { + if (history) { + await repoHistory(repoList); + } + repoDb.data = repoList; + await repoDb.write(); + }); + } catch (error) { + console.log('Error writing repo.json:', error); + } +}; diff --git a/services/history.service.ts b/services/history.service.ts new file mode 100644 index 0000000..1399ef0 --- /dev/null +++ b/services/history.service.ts @@ -0,0 +1,36 @@ +import { promises as fs } from 'fs'; +import path from 'path'; +import { Repository } from '~/types/domain/config.types'; + +export default async function repoHistory(repoList: Repository[]) { + try { + const repoHistoryDir = path.join(process.cwd(), '/config/versions'); + const maxBackupCount = parseInt(process.env.MAX_REPO_BACKUP_COUNT ?? '8', 10); + const timestamp = new Date().toISOString(); + const backupDate = timestamp.split('T')[0]; + + //Create the directory if it does not exist + await fs.mkdir(repoHistoryDir, { recursive: true }); + + const existingBackups = await fs.readdir(repoHistoryDir); + + if (existingBackups.length >= maxBackupCount) { + existingBackups.sort(); + const backupsToDelete = existingBackups.slice(0, existingBackups.length - maxBackupCount + 1); + for (const backupToDelete of backupsToDelete) { + const backupFilePathToDelete = path.join(repoHistoryDir, backupToDelete); + await fs.unlink(backupFilePathToDelete); + } + } + + const backupFileName = `${backupDate}.log`; + const backupFilePath = path.join(repoHistoryDir, backupFileName); + const jsonData = JSON.stringify(repoList, null, 2); + + const logData = `\n>>>> History of file repo.json at "${timestamp}" <<<<\n${jsonData}\n`; + + await fs.appendFile(backupFilePath, logData); + } catch (error) { + console.log('An error occurred while saving the repo history :', error); + } +}