diff --git a/.github/workflows/test_config.yml b/.github/workflows/test_config.yml index 5ba0832c6..8643bf959 100644 --- a/.github/workflows/test_config.yml +++ b/.github/workflows/test_config.yml @@ -21,7 +21,7 @@ jobs: list-files: shell build: - name: typecheck - Config-Schema + name: Unit Tests, typecheck - Config-Schema if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.docker-compose == 'true' needs: files-changed runs-on: ubuntu-latest @@ -38,3 +38,6 @@ jobs: - name: typecheck run: cd config-schema && yarn typecheck + - name: unit tests + run: cd config-schema && yarn test + diff --git a/.gitignore b/.gitignore index 3d7f34078..d96592bc8 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ *.bak .turbo vite.config.mjs.timestamp-* +log4js-config*.json /node_modules/* messages.pot nbproject diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 000000000..5fcb62e09 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,91 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "DHT-Node Debug Tests", + "runtimeExecutable": "yarn", + "runtimeArgs": [ + "run", + "test:debug" + ], + "skipFiles": [ + "/**" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen", + "cwd": "${workspaceFolder}/dht-node" + }, + { + "type": "node", + "request": "launch", + "name": "DHT-Node Debug", + "stopOnEntry": true, + "runtimeExecutable": "yarn", + "runtimeArgs": [ + "run", + "dev" + ], + "skipFiles": [ + "/**" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "openOnSessionStart", + "cwd": "${workspaceFolder}/dht-node" + }, + { + "type": "node", + "request": "launch", + "name": "Federation Debug Tests", + "runtimeExecutable": "yarn", + "runtimeArgs": [ + "run", + "test:debug" + ], + "skipFiles": [ + "/**" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen", + "cwd": "${workspaceFolder}/federation" + }, + { + "type": "node", + "request": "launch", + "name": "Federation Debug", + "stopOnEntry": true, + "runtimeExecutable": "yarn", + "runtimeArgs": [ + "run", + "dev" + ], + "skipFiles": [ + "/**" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "openOnSessionStart", + "cwd": "${workspaceFolder}/federation" + }, + { + "type": "node", + "request": "launch", + "name": "Backend Debug", + "stopOnEntry": true, + "runtimeExecutable": "yarn", + "runtimeArgs": [ + "run", + "dev" + ], + "skipFiles": [ + "/**" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "openOnSessionStart", + "cwd": "${workspaceFolder}/backend" + } + ] +} \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 385de1f12..12d101724 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,8 +4,26 @@ All notable changes to this project will be documented in this file. Dates are d Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). -#### [2.5.2](https://github.com/gradido/gradido/compare/2.3.1...2.5.2) +#### [2.6.0](https://github.com/gradido/gradido/compare/2.3.1...2.6.0) +- fix(frontend): fix contribution link [`#3500`](https://github.com/gradido/gradido/pull/3500) +- feat(other): disable index html caching, reenable limits [`#3497`](https://github.com/gradido/gradido/pull/3497) +- fix(admin): fix accidently remove user states [`#3496`](https://github.com/gradido/gradido/pull/3496) +- feat(frontend): use grass as faster alternative to sass [`#3491`](https://github.com/gradido/gradido/pull/3491) +- refactor(database): ms precision and git compatible entity versioning [`#3495`](https://github.com/gradido/gradido/pull/3495) +- refactor(frontend): rename community page and child components to contributions [`#3494`](https://github.com/gradido/gradido/pull/3494) +- feat(frontend): change experimental to beta [`#3493`](https://github.com/gradido/gradido/pull/3493) +- fix(workflow): wrong database docker call [`#3490`](https://github.com/gradido/gradido/pull/3490) +- feat(frontend): disable cross community tx redeem in frontend [`#3486`](https://github.com/gradido/gradido/pull/3486) +- feat(other): split start script, put additional installs in extra script [`#3488`](https://github.com/gradido/gradido/pull/3488) +- fix(other): correct bun install script [`#3487`](https://github.com/gradido/gradido/pull/3487) +- refactor(frontend): contribution page and some graphql code in backend used by the page [`#3483`](https://github.com/gradido/gradido/pull/3483) +- refactor(other): use esbuild instead of tsc [`#3479`](https://github.com/gradido/gradido/pull/3479) +- refactor(other): update to yarn workspaces and turbo [`#3478`](https://github.com/gradido/gradido/pull/3478) +- feat(workflow): x-cross tx per link [`#3467`](https://github.com/gradido/gradido/pull/3467) +- fix(backend): fix problem with humhub, changing from and to alias [`#3484`](https://github.com/gradido/gradido/pull/3484) +- feat(workflow): stop deploy script on error, always cleanup lock file [`#3482`](https://github.com/gradido/gradido/pull/3482) +- chore(release): v2.5.2 beta [`#3480`](https://github.com/gradido/gradido/pull/3480) - refactor(other): use biome instead of eslint [`#3472`](https://github.com/gradido/gradido/pull/3472) - fix(frontend): set explicit page size for admin and moderator user on information page [`#3474`](https://github.com/gradido/gradido/pull/3474) - fix(backend): humhub sync on edge cases [`#3471`](https://github.com/gradido/gradido/pull/3471) diff --git a/admin/package.json b/admin/package.json index d98e10209..31b69defc 100644 --- a/admin/package.json +++ b/admin/package.json @@ -3,7 +3,7 @@ "description": "Administration Interface for Gradido", "main": "index.js", "author": "Gradido Academy - https://www.gradido.net", - "version": "2.5.2", + "version": "2.6.0", "license": "Apache-2.0", "scripts": { "dev": "vite", diff --git a/backend/Dockerfile b/backend/Dockerfile index 4c1ceb36e..4ab2f1883 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -113,8 +113,6 @@ COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/build/worker.js ./wo # add node_modules from production_node_modules COPY --chown=app:app --from=production-node-modules ${DOCKER_WORKDIR}/node_modules ./node_modules -# Copy log4js-config.json to provide log configuration -COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/log4js-config.json ./log4js-config.json # Copy locales COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/locales ./locales diff --git a/backend/esbuild.config.ts b/backend/esbuild.config.ts index 87efef48e..9cdd51cc1 100644 --- a/backend/esbuild.config.ts +++ b/backend/esbuild.config.ts @@ -13,4 +13,5 @@ build({ external: ['sodium-native', 'email-templates'], plugins: [esbuildDecorators()], minify: true, + sourcemap: true, }) diff --git a/backend/jest.config.js b/backend/jest.config.js index 87f32599d..ddf94f977 100644 --- a/backend/jest.config.js +++ b/backend/jest.config.js @@ -2,7 +2,7 @@ module.exports = { verbose: true, preset: 'ts-jest', - collectCoverage: true, + collectCoverage: false, collectCoverageFrom: ['src/**/*.ts', '!**/node_modules/**', '!src/seeds/**', '!build/**'], coverageThreshold: { global: { diff --git a/backend/log4js-config.json b/backend/log4js-config.json deleted file mode 100644 index 0807e6a12..000000000 --- a/backend/log4js-config.json +++ /dev/null @@ -1,173 +0,0 @@ -{ - "appenders": - { - "access": - { - "type": "dateFile", - "filename": "../logs/backend/access.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "apollo": - { - "type": "dateFile", - "filename": "../logs/backend/apollo.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "backend": - { - "type": "dateFile", - "filename": "../logs/backend/backend.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "klicktipp": - { - "type": "dateFile", - "filename": "../logs/backend/klicktipp.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "gms": - { - "type": "dateFile", - "filename": "../logs/backend/gms.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "errorFile": - { - "type": "dateFile", - "filename": "../logs/backend/errors.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m %s" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "errors": - { - "type": "logLevelFilter", - "level": "error", - "appender": "errorFile" - }, - "out": - { - "type": "stdout", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - } - }, - "apolloOut": - { - "type": "stdout", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - } - } - }, - "categories": - { - "default": - { - "appenders": - [ - "out", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "apollo": - { - "appenders": - [ - "apollo", - "apolloOut", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "backend": - { - "appenders": - [ - "backend", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "klicktipp": - { - "appenders": - [ - "klicktipp", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "gms": - { - "appenders": - [ - "gms", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "http": - { - "appenders": - [ - "access" - ], - "level": "info" - } - } -} diff --git a/backend/package.json b/backend/package.json index 4dcd55f50..aeb6fcb5e 100644 --- a/backend/package.json +++ b/backend/package.json @@ -1,6 +1,6 @@ { "name": "backend", - "version": "2.5.2", + "version": "2.6.0", "private": false, "description": "Gradido unified backend providing an API-Service for Gradido Transactions", "repository": "https://github.com/gradido/gradido/backend", @@ -9,9 +9,9 @@ "main": "src/index.ts", "scripts": { "build": "ts-node ./esbuild.config.ts && mkdirp build/templates/ && ncp src/emails/templates build/templates && mkdirp locales/ && ncp src/locales locales", - "clean": "tsc --build --clean", "dev": "cross-env TZ=UTC nodemon -w src --ext ts,pug,json,css -r tsconfig-paths/register src/index.ts", "test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_backend jest --runInBand --forceExit --detectOpenHandles", + "test:coverage": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_backend jest --coverage --runInBand --forceExit --detectOpenHandles", "seed": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/seeds/index.ts", "klicktipp": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/util/executeKlicktipp.ts", "gmsusers": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/apis/gms/ExportUsers.ts", @@ -21,11 +21,13 @@ "lint:fix:unsafe": "biome check --fix --unsafe", "locales": "scripts/sort.sh", "locales:fix": "scripts/sort.sh --fix", - "start": "cross-env TZ=UTC NODE_ENV=production node build/index.js", + "start": "cross-env TZ=UTC node build/index.js", "typecheck": "tsc --noEmit" }, "nodemonConfig": { - "ignore": ["**/*.test.ts"] + "ignore": [ + "**/*.test.ts" + ] }, "dependencies": { "cross-env": "^7.0.3", @@ -34,7 +36,7 @@ }, "devDependencies": { "@anatine/esbuild-decorators": "^0.2.19", - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@swc/cli": "^0.7.3", "@swc/core": "^1.11.24", "@swc/helpers": "^0.5.17", @@ -47,6 +49,7 @@ "@types/node": "^17.0.21", "@types/nodemailer": "^6.4.4", "@types/sodium-native": "^2.3.5", + "@types/source-map-support": "^0.5.10", "@types/uuid": "^8.3.4", "apollo-server-express": "^2.25.2", "apollo-server-testing": "^2.25.2", @@ -85,12 +88,13 @@ "random-bigint": "^0.0.1", "reflect-metadata": "^0.1.13", "regenerator-runtime": "^0.14.1", + "source-map-support": "^0.5.21", "ts-jest": "27.0.5", "ts-node": "^10.9.2", "tsconfig-paths": "^4.1.1", "type-graphql": "^1.1.1", "typed-rest-client": "^1.8.11", - "typeorm": "^0.3.16", + "typeorm": "^0.3.22", "typescript": "^4.9.5", "uuid": "^8.3.2", "workerpool": "^9.2.0", diff --git a/backend/src/apis/HttpRequest.ts b/backend/src/apis/HttpRequest.ts index ef6f540b7..0b63c4af9 100644 --- a/backend/src/apis/HttpRequest.ts +++ b/backend/src/apis/HttpRequest.ts @@ -1,7 +1,10 @@ import axios from 'axios' +import { LOG4JS_APIS_CATEGORY_NAME } from '@/apis' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' + +const logger = getLogger(`${LOG4JS_APIS_CATEGORY_NAME}.HttpRequest`) import { httpAgent, httpsAgent } from './ConnectionAgents' diff --git a/backend/src/apis/KlicktippController.ts b/backend/src/apis/KlicktippController.ts index cb665ea7f..5fe55fb2e 100644 --- a/backend/src/apis/KlicktippController.ts +++ b/backend/src/apis/KlicktippController.ts @@ -1,9 +1,10 @@ +import { LOG4JS_APIS_CATEGORY_NAME } from '@/apis' import { CONFIG } from '@/config' -import { backendLogger as logger } from '@/server/logger' - import KlicktippConnector from 'klicktipp-api' +import { getLogger } from 'log4js' const klicktippConnector = new KlicktippConnector() +const logger = getLogger(`${LOG4JS_APIS_CATEGORY_NAME}.KlicktippController`) export const subscribe = async ( email: string, diff --git a/backend/src/apis/dltConnector/DltConnectorClient.test.ts b/backend/src/apis/dltConnector/DltConnectorClient.test.ts index 4dcc991ec..00b15348d 100644 --- a/backend/src/apis/dltConnector/DltConnectorClient.test.ts +++ b/backend/src/apis/dltConnector/DltConnectorClient.test.ts @@ -1,19 +1,18 @@ import { Transaction as DbTransaction } from 'database' import { Decimal } from 'decimal.js-light' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { cleanDB, testEnvironment } from '@test/helpers' import { CONFIG } from '@/config' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { DltConnectorClient } from './DltConnectorClient' -let con: Connection +let con: DataSource let testEnv: { - con: Connection + con: DataSource } // Mock the GraphQLClient @@ -76,14 +75,14 @@ describe.skip('transmitTransaction, without db connection', () => { describe('transmitTransaction', () => { beforeAll(async () => { - testEnv = await testEnvironment(logger) + testEnv = await testEnvironment() con = testEnv.con await cleanDB() }) afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) const transaction = new DbTransaction() diff --git a/backend/src/apis/dltConnector/DltConnectorClient.ts b/backend/src/apis/dltConnector/DltConnectorClient.ts index 2bebc84c0..8eb8de3fb 100644 --- a/backend/src/apis/dltConnector/DltConnectorClient.ts +++ b/backend/src/apis/dltConnector/DltConnectorClient.ts @@ -1,14 +1,17 @@ import { Transaction as DbTransaction } from 'database' import { GraphQLClient, gql } from 'graphql-request' +import { LOG4JS_APIS_CATEGORY_NAME } from '@/apis/index' import { CONFIG } from '@/config' import { TransactionTypeId } from '@/graphql/enum/TransactionTypeId' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' import { TransactionResult } from './model/TransactionResult' import { UserIdentifier } from './model/UserIdentifier' +const logger = getLogger(`${LOG4JS_APIS_CATEGORY_NAME}.dltConnector`) + const sendTransaction = gql` mutation ($input: TransactionInput!) { sendTransaction(data: $input) { diff --git a/backend/src/apis/gms/ExportUsers.ts b/backend/src/apis/gms/ExportUsers.ts index 29e6dc6fe..aa210707c 100644 --- a/backend/src/apis/gms/ExportUsers.ts +++ b/backend/src/apis/gms/ExportUsers.ts @@ -1,34 +1,28 @@ import { User as DbUser } from 'database' // import { createTestClient } from 'apollo-server-testing' +import { LOG4JS_GMS_CATEGORY_NAME } from '@/apis/gms/index' // import { createGmsUser } from '@/apis/gms/GmsClient' // import { GmsUser } from '@/apis/gms/model/GmsUser' import { CONFIG } from '@/config' import { getHomeCommunity } from '@/graphql/resolver/util/communities' import { sendUserToGms } from '@/graphql/resolver/util/sendUserToGms' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' -import { checkDBVersion } from '@/typeorm/DBVersion' -import { Connection } from '@/typeorm/connection' +import { initLogging } from '@/server/logger' +import { AppDatabase } from 'database' +import { getLogger } from 'log4js' + +const logger = getLogger(`${LOG4JS_GMS_CATEGORY_NAME}.ExportUsers`) CONFIG.EMAIL = false // use force to copy over all user even if gmsRegistered is set to true const forceMode = process.argv.includes('--force') async function main() { + initLogging() // open mysql connection - const con = await Connection.getInstance() - if (!con?.isConnected) { - logger.fatal(`Couldn't open connection to database!`) - throw new Error(`Fatal: Couldn't open connection to database`) - } - - // check for correct database version - const dbVersion = await checkDBVersion(CONFIG.DB_VERSION) - if (!dbVersion) { - logger.fatal('Fatal: Database Version incorrect') - throw new Error('Fatal: Database Version incorrect') - } + const con = AppDatabase.getInstance() + await con.init() const homeCom = await getHomeCommunity() if (homeCom.gmsApiKey === null) { @@ -81,7 +75,6 @@ async function main() { } main().catch((e) => { - // biome-ignore lint/suspicious/noConsole: logger isn't used here - console.error(e) + logger.error(e) process.exit(1) }) diff --git a/backend/src/apis/gms/GmsClient.ts b/backend/src/apis/gms/GmsClient.ts index 537fe36f2..0dd5eccc5 100644 --- a/backend/src/apis/gms/GmsClient.ts +++ b/backend/src/apis/gms/GmsClient.ts @@ -1,13 +1,16 @@ import axios from 'axios' import { httpAgent, httpsAgent } from '@/apis/ConnectionAgents' +import { LOG4JS_GMS_CATEGORY_NAME } from '@/apis/gms/index' import { CONFIG } from '@/config' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { ensureUrlEndsWithSlash } from '@/util/utilities' +import { getLogger } from 'log4js' import { GmsUser } from './model/GmsUser' +const logger = getLogger(`${LOG4JS_GMS_CATEGORY_NAME}.GmsClient`) + /* export async function communityList(): Promise { const baseUrl = ensureUrlEndsWithSlash(CONFIG.GMS_URL) diff --git a/backend/src/apis/gms/index.ts b/backend/src/apis/gms/index.ts new file mode 100644 index 000000000..620aaca4b --- /dev/null +++ b/backend/src/apis/gms/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_APIS_CATEGORY_NAME } from '@/apis' + +export const LOG4JS_GMS_CATEGORY_NAME = `${LOG4JS_APIS_CATEGORY_NAME}.gms` diff --git a/backend/src/apis/humhub/ExportUsers.ts b/backend/src/apis/humhub/ExportUsers.ts index b616cbb2e..5eb3f8d6c 100644 --- a/backend/src/apis/humhub/ExportUsers.ts +++ b/backend/src/apis/humhub/ExportUsers.ts @@ -1,12 +1,9 @@ -import { User } from 'database' +import { AppDatabase, User } from 'database' import { IsNull, Not } from 'typeorm' -import { CONFIG } from '@/config' -import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' -import { checkDBVersion } from '@/typeorm/DBVersion' -import { Connection } from '@/typeorm/connection' - +import { LOG4JS_HUMHUB_CATEGORY_NAME } from '@/apis/humhub/index' +import { initLogging } from '@/server/logger' +import { getLogger } from 'log4js' import { HumHubClient } from './HumHubClient' import { GetUser } from './model/GetUser' import { UsersResponse } from './model/UsersResponse' @@ -14,6 +11,7 @@ import { ExecutedHumhubAction, syncUser } from './syncUser' const USER_BULK_SIZE = 20 const HUMHUB_BULK_SIZE = 50 +const logger = getLogger(`${LOG4JS_HUMHUB_CATEGORY_NAME}.ExportUsers`) function getUsersPage(page: number, limit: number): Promise<[User[], number]> { return User.findAndCount({ @@ -37,7 +35,7 @@ async function loadUsersFromHumHub(client: HumHubClient): Promise { - // biome-ignore lint/suspicious/noConsole: logger isn't used here - console.error(e) + logger.error(e) process.exit(1) }) diff --git a/backend/src/apis/humhub/HumHubClient.ts b/backend/src/apis/humhub/HumHubClient.ts index daa19b5b8..ff4529234 100644 --- a/backend/src/apis/humhub/HumHubClient.ts +++ b/backend/src/apis/humhub/HumHubClient.ts @@ -4,8 +4,9 @@ import { IRequestOptions, IRestResponse, RestClient } from 'typed-rest-client' import { CONFIG } from '@/config' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' +import { LOG4JS_HUMHUB_CATEGORY_NAME } from '@/apis/humhub/index' +import { getLogger } from 'log4js' import { PostUserLoggingView } from './logging/PostUserLogging.view' import { GetUser } from './model/GetUser' import { PostUser } from './model/PostUser' @@ -13,6 +14,8 @@ import { Space } from './model/Space' import { SpacesResponse } from './model/SpacesResponse' import { UsersResponse } from './model/UsersResponse' +const logger = getLogger(`${LOG4JS_HUMHUB_CATEGORY_NAME}.HumHubClient`) + /** * HumHubClient as singleton class */ diff --git a/backend/src/apis/humhub/index.ts b/backend/src/apis/humhub/index.ts new file mode 100644 index 000000000..dda319145 --- /dev/null +++ b/backend/src/apis/humhub/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_APIS_CATEGORY_NAME } from '@/apis' + +export const LOG4JS_HUMHUB_CATEGORY_NAME = `${LOG4JS_APIS_CATEGORY_NAME}.humhub` diff --git a/backend/src/apis/humhub/syncUser.ts b/backend/src/apis/humhub/syncUser.ts index 1e62871be..257f16683 100644 --- a/backend/src/apis/humhub/syncUser.ts +++ b/backend/src/apis/humhub/syncUser.ts @@ -1,13 +1,16 @@ import { User } from 'database' +import { LOG4JS_HUMHUB_CATEGORY_NAME } from '@/apis/humhub/index' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' import { HumHubClient } from './HumHubClient' import { isHumhubUserIdenticalToDbUser } from './compareHumhubUserDbUser' import { GetUser } from './model/GetUser' import { PostUser } from './model/PostUser' +const logger = getLogger(`${LOG4JS_HUMHUB_CATEGORY_NAME}.syncUser`) + export enum ExecutedHumhubAction { UPDATE, CREATE, diff --git a/backend/src/apis/index.ts b/backend/src/apis/index.ts new file mode 100644 index 000000000..c4e45826d --- /dev/null +++ b/backend/src/apis/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' + +export const LOG4JS_APIS_CATEGORY_NAME = `${LOG4JS_BASE_CATEGORY_NAME}.apis` diff --git a/backend/src/apis/openai/OpenaiClient.ts b/backend/src/apis/openai/OpenaiClient.ts index b2a859581..dd447c7ad 100644 --- a/backend/src/apis/openai/OpenaiClient.ts +++ b/backend/src/apis/openai/OpenaiClient.ts @@ -4,10 +4,14 @@ import { Message } from 'openai/resources/beta/threads/messages' import { httpsAgent } from '@/apis/ConnectionAgents' import { CONFIG } from '@/config' -import { backendLogger as logger } from '@/server/logger' import { Message as MessageModel } from './model/Message' +import { LOG4JS_APIS_CATEGORY_NAME } from '@/apis' +import { getLogger } from 'log4js' + +const logger = getLogger(`${LOG4JS_APIS_CATEGORY_NAME}.openai.OpenaiClient`) + /** * The `OpenaiClient` class is a singleton that provides an interface to interact with the OpenAI API. * It ensures that only one instance of the client is created and used throughout the application. diff --git a/backend/src/auth/jwt/JWT.ts b/backend/src/auth/jwt/JWT.ts index 6f6581773..07384c03b 100644 --- a/backend/src/auth/jwt/JWT.ts +++ b/backend/src/auth/jwt/JWT.ts @@ -1,12 +1,14 @@ -import { createPrivateKey, sign } from 'node:crypto' - -import { JWTPayload, SignJWT, decodeJwt, jwtVerify } from 'jose' +import { SignJWT, decodeJwt, jwtVerify } from 'jose' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { JwtPayloadType } from './payloadtypes/JwtPayloadType' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' +import { getLogger } from 'log4js' + +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.auth.jwt.JWT`) + export const verify = async (token: string, signkey: string): Promise => { if (!token) { throw new LogError('401 Unauthorized') diff --git a/backend/src/config/const.ts b/backend/src/config/const.ts new file mode 100644 index 000000000..68bd124a8 --- /dev/null +++ b/backend/src/config/const.ts @@ -0,0 +1 @@ +export const LOG4JS_BASE_CATEGORY_NAME = 'backend' diff --git a/backend/src/config/index.test.ts b/backend/src/config/index.test.ts deleted file mode 100644 index 24908513a..000000000 --- a/backend/src/config/index.test.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { CONFIG } from './index' - -describe('config/index', () => { - describe('decay start block', () => { - it('has the correct date set', () => { - expect(CONFIG.DECAY_START_TIME).toEqual(new Date('2021-05-13 17:46:31-0000')) - }) - }) -}) diff --git a/backend/src/config/index.ts b/backend/src/config/index.ts index 2eba8c8c1..9d61c2da4 100644 --- a/backend/src/config/index.ts +++ b/backend/src/config/index.ts @@ -1,24 +1,18 @@ // ATTENTION: DO NOT PUT ANY SECRETS IN HERE (or the .env) -import { validate } from 'config-schema' -import { latestDbVersion } from 'database' -import { Decimal } from 'decimal.js-light' +import { LogLevel, validate } from 'config-schema' import dotenv from 'dotenv' import { schema } from './schema' dotenv.config() -Decimal.set({ - precision: 25, - rounding: Decimal.ROUND_HALF_UP, -}) - -const constants = { - // DB_VERSION: '0087-add_index_on_user_roles', - DB_VERSION: latestDbVersion, - DECAY_START_TIME: new Date('2021-05-13 17:46:31-0000'), // GMT+0 - LOG4JS_CONFIG: 'log4js-config.json', +const logging = { + LOG4JS_CONFIG: process.env.LOG4JS_CONFIG ?? 'log4js-config.json', + // default log level on production should be info + // log level for default log4js-config.json, don't change existing log4js-config.json + LOG_LEVEL: (process.env.LOG_LEVEL ?? 'info') as LogLevel, + LOG_FILES_BASE_PATH: process.env.LOG_FILES_BASE_PATH ?? '../logs/backend', } const server = { @@ -30,23 +24,6 @@ const server = { GDT_ACTIVE: process.env.GDT_ACTIVE === 'true' || false, GDT_API_URL: process.env.GDT_API_URL ?? 'https://gdt.gradido.net', PRODUCTION: process.env.NODE_ENV === 'production' || false, - // default log level on production should be info - LOG_LEVEL: process.env.LOG_LEVEL ?? 'info', -} - -const database = { - DB_CONNECT_RETRY_COUNT: process.env.DB_CONNECT_RETRY_COUNT - ? Number.parseInt(process.env.DB_CONNECT_RETRY_COUNT) - : 15, - DB_CONNECT_RETRY_DELAY_MS: process.env.DB_CONNECT_RETRY_DELAY_MS - ? Number.parseInt(process.env.DB_CONNECT_RETRY_DELAY_MS) - : 500, - DB_HOST: process.env.DB_HOST ?? 'localhost', - DB_PORT: process.env.DB_PORT ? Number.parseInt(process.env.DB_PORT) : 3306, - DB_USER: process.env.DB_USER ?? 'root', - DB_PASSWORD: process.env.DB_PASSWORD ?? '', - DB_DATABASE: process.env.DB_DATABASE ?? 'gradido_community', - TYPEORM_LOGGING_RELATIVE_PATH: process.env.TYPEORM_LOGGING_RELATIVE_PATH ?? 'typeorm.backend.log', } const klicktipp = { @@ -161,9 +138,8 @@ const openai = { } export const CONFIG = { - ...constants, + ...logging, ...server, - ...database, ...klicktipp, ...dltConnector, ...community, diff --git a/backend/src/config/schema.ts b/backend/src/config/schema.ts index 4bfe2a551..2fb576bdd 100644 --- a/backend/src/config/schema.ts +++ b/backend/src/config/schema.ts @@ -3,14 +3,6 @@ import { COMMUNITY_NAME, COMMUNITY_SUPPORT_MAIL, COMMUNITY_URL, - DB_CONNECT_RETRY_COUNT, - DB_CONNECT_RETRY_DELAY_MS, - DB_DATABASE, - DB_HOST, - DB_PASSWORD, - DB_PORT, - DB_USER, - DB_VERSION, DECAY_START_TIME, GDT_ACTIVE, GDT_API_URL, @@ -21,11 +13,11 @@ import { LOG4JS_CONFIG, LOGIN_APP_SECRET, LOGIN_SERVER_KEY, + LOG_FILES_BASE_PATH, LOG_LEVEL, NODE_ENV, OPENAI_ACTIVE, PRODUCTION, - TYPEORM_LOGGING_RELATIVE_PATH, } from 'config-schema' import Joi from 'joi' @@ -34,14 +26,6 @@ export const schema = Joi.object({ COMMUNITY_URL, COMMUNITY_DESCRIPTION, COMMUNITY_SUPPORT_MAIL, - DB_HOST, - DB_PASSWORD, - DB_PORT, - DB_USER, - DB_VERSION, - DB_DATABASE, - DB_CONNECT_RETRY_COUNT, - DB_CONNECT_RETRY_DELAY_MS, DECAY_START_TIME, GDT_API_URL, GDT_ACTIVE, @@ -49,6 +33,7 @@ export const schema = Joi.object({ GRAPHIQL, HUMHUB_ACTIVE, HUMHUB_API_URL, + LOG_FILES_BASE_PATH, LOG4JS_CONFIG, LOGIN_APP_SECRET, LOGIN_SERVER_KEY, @@ -56,7 +41,6 @@ export const schema = Joi.object({ NODE_ENV, OPENAI_ACTIVE, PRODUCTION, - TYPEORM_LOGGING_RELATIVE_PATH, COMMUNITY_REDEEM_URL: Joi.string() .uri({ scheme: ['http', 'https'] }) diff --git a/backend/src/emails/__snapshots__/sendEmailVariants.test.ts.snap b/backend/src/emails/__snapshots__/sendEmailVariants.test.ts.snap index 37263e937..aec3510fe 100644 --- a/backend/src/emails/__snapshots__/sendEmailVariants.test.ts.snap +++ b/backend/src/emails/__snapshots__/sendEmailVariants.test.ts.snap @@ -496,7 +496,10 @@ exports[`sendEmailVariants sendAddedContributionMessageEmail result has the corr

Read and reply to message

-
To view and reply to the message, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.
To account +
+

„My message.“

+

To reply to the message, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.

+
To account
Please do not reply to this email.
@@ -667,8 +670,8 @@ exports[`sendEmailVariants sendContributionChangedByModeratorEmail result has th

Contribution details

-
To see your common good contributions and related messages, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.
To account -
Or copy the link into your browser window.
http://localhost/community/contributions +
To see your common good contributions and related messages, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.
To account +
Or copy the link into your browser window.
https://gradido.net/contributions/own-contributions/1#contributionListItem-1
Please do not reply to this email.
@@ -839,8 +842,8 @@ exports[`sendEmailVariants sendContributionConfirmedEmail result has the correct

Contribution details

-
To see your common good contributions and related messages, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.
To account -
Or copy the link into your browser window.
http://localhost/community/contributions +
To see your common good contributions and related messages, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.
To account +
Or copy the link into your browser window.
https://gradido.net/contributions/own-contributions/1#contributionListItem-1
Please do not reply to this email.
@@ -1011,8 +1014,8 @@ exports[`sendEmailVariants sendContributionDeletedEmail result has the correct h

Contribution details

-
To see your common good contributions and related messages, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.
To account -
Or copy the link into your browser window.
http://localhost/community/contributions +
To see your common good contributions and related messages, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.
To account +
Or copy the link into your browser window.
https://gradido.net/contributions/own-contributions/1#contributionListItem-1
Please do not reply to this email.
@@ -1183,8 +1186,8 @@ exports[`sendEmailVariants sendContributionDeniedEmail result has the correct ht

Contribution details

-
To see your common good contributions and related messages, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.
To account -
Or copy the link into your browser window.
http://localhost/community/contributions +
To see your common good contributions and related messages, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab.
To account +
Or copy the link into your browser window.
https://gradido.net/contributions/own-contributions/1#contributionListItem-1
Please do not reply to this email.
diff --git a/backend/src/emails/index.ts b/backend/src/emails/index.ts new file mode 100644 index 000000000..d529ac28d --- /dev/null +++ b/backend/src/emails/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' + +export const LOG4JS_EMAILS_CATEGORY_NAME = `${LOG4JS_BASE_CATEGORY_NAME}.emails` diff --git a/backend/src/emails/sendEmailTranslated.test.ts b/backend/src/emails/sendEmailTranslated.test.ts index 917d80ea6..b4da5fbfb 100644 --- a/backend/src/emails/sendEmailTranslated.test.ts +++ b/backend/src/emails/sendEmailTranslated.test.ts @@ -1,11 +1,15 @@ import { createTransport } from 'nodemailer' -import { i18n, logger } from '@test/testSetup' +import { i18n } from '@test/testSetup' import { CONFIG } from '@/config' +import { getLogger } from 'config-schema/test/testSetup' +import { LOG4JS_EMAILS_CATEGORY_NAME } from '.' import { sendEmailTranslated } from './sendEmailTranslated' +const logger = getLogger(`${LOG4JS_EMAILS_CATEGORY_NAME}.sendEmailTranslated`) + const testMailServerHost = 'localhost' const testMailServerPort = 1025 diff --git a/backend/src/emails/sendEmailTranslated.ts b/backend/src/emails/sendEmailTranslated.ts index ae52b3975..30050abf3 100644 --- a/backend/src/emails/sendEmailTranslated.ts +++ b/backend/src/emails/sendEmailTranslated.ts @@ -5,7 +5,10 @@ import i18n from 'i18n' import { createTransport } from 'nodemailer' import { CONFIG } from '@/config' -import { backendLogger as logger } from '@/server/logger' +import { LOG4JS_EMAILS_CATEGORY_NAME } from '@/emails' +import { getLogger } from 'log4js' + +const logger = getLogger(`${LOG4JS_EMAILS_CATEGORY_NAME}.sendEmailTranslated`) export const sendEmailTranslated = async ({ receiver, @@ -31,8 +34,8 @@ export const sendEmailTranslated = async ({ i18n.setLocale('en') // for logging logger.info( - `send Email: language=${locals.locale as string} to=${receiver.to}` + - (receiver.cc ? `, cc=${receiver.cc}` : '') + + `send Email: language=${locals.locale as string} to=${receiver.to.substring(0, 3)}...` + + (receiver.cc ? `, cc=${receiver.cc.substring(0, 3)}...` : '') + `, subject=${i18n.__('emails.' + template + '.subject')}`, ) diff --git a/backend/src/emails/sendEmailVariants.test.ts b/backend/src/emails/sendEmailVariants.test.ts index 1afa49eeb..300885472 100644 --- a/backend/src/emails/sendEmailVariants.test.ts +++ b/backend/src/emails/sendEmailVariants.test.ts @@ -1,9 +1,13 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { Decimal } from 'decimal.js-light' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { testEnvironment } from '@test/helpers' -import { i18n as localization, logger } from '@test/testSetup' +import { i18n as localization } from '@test/testSetup' +import { getLogger } from 'config-schema/test/testSetup' +import { LOG4JS_EMAILS_CATEGORY_NAME } from '.' + +const logger = getLogger(`${LOG4JS_EMAILS_CATEGORY_NAME}.sendEmailTranslated`) import { CONFIG } from '@/config' @@ -45,11 +49,11 @@ jest.mock('nodemailer', () => { } }) -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { @@ -58,13 +62,15 @@ beforeAll(async () => { }) afterAll(async () => { - await con.close() + await con.destroy() }) const sendEmailTranslatedSpy = jest.spyOn(sendEmailTranslatedApi, 'sendEmailTranslated') describe('sendEmailVariants', () => { let result: any + const contributionFrontendLink = + 'https://gradido.net/contributions/own-contributions/1#contributionListItem-1' describe('sendAddedContributionMessageEmail', () => { beforeAll(async () => { @@ -76,6 +82,8 @@ describe('sendEmailVariants', () => { senderFirstName: 'Bibi', senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', + contributionFrontendLink, + message: 'My message.', }) }) @@ -93,9 +101,9 @@ describe('sendEmailVariants', () => { senderFirstName: 'Bibi', senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, + contributionFrontendLink, + message: 'My message.', supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, }, }) }) @@ -234,6 +242,7 @@ describe('sendEmailVariants', () => { senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', contributionAmount: new Decimal(23.54), + contributionFrontendLink, }) }) @@ -252,9 +261,8 @@ describe('sendEmailVariants', () => { senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', contributionAmount: '23.54', - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, + contributionFrontendLink, }, }) }) @@ -291,6 +299,7 @@ describe('sendEmailVariants', () => { senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', contributionMemoUpdated: 'This is a better contribution memo.', + contributionFrontendLink, }) }) @@ -309,9 +318,8 @@ describe('sendEmailVariants', () => { senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', contributionMemoUpdated: 'This is a better contribution memo.', - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, + contributionFrontendLink, supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, }, }) }) @@ -347,6 +355,7 @@ describe('sendEmailVariants', () => { senderFirstName: 'Bibi', senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', + contributionFrontendLink, }) }) @@ -364,9 +373,8 @@ describe('sendEmailVariants', () => { senderFirstName: 'Bibi', senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, + contributionFrontendLink, supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, }, }) }) @@ -402,6 +410,7 @@ describe('sendEmailVariants', () => { senderFirstName: 'Bibi', senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', + contributionFrontendLink, }) }) @@ -419,9 +428,8 @@ describe('sendEmailVariants', () => { senderFirstName: 'Bibi', senderLastName: 'Bloxberg', contributionMemo: 'My contribution.', - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, + contributionFrontendLink, supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, }, }) }) @@ -531,7 +539,6 @@ describe('sendEmailVariants', () => { senderEmail: 'bibi@bloxberg.de', transactionMemo: 'You deserve it! 🙏🏼', transactionAmount: '17.65', - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, communityURL: CONFIG.COMMUNITY_URL, }, @@ -590,7 +597,6 @@ describe('sendEmailVariants', () => { senderLastName: 'Bloxberg', senderEmail: 'bibi@bloxberg.de', transactionAmount: '37.40', - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, communityURL: CONFIG.COMMUNITY_URL, }, diff --git a/backend/src/emails/sendEmailVariants.ts b/backend/src/emails/sendEmailVariants.ts index 3373f17ad..642f87387 100644 --- a/backend/src/emails/sendEmailVariants.ts +++ b/backend/src/emails/sendEmailVariants.ts @@ -5,7 +5,7 @@ import { decimalSeparatorByLanguage } from '@/util/utilities' import { sendEmailTranslated } from './sendEmailTranslated' -export const sendAddedContributionMessageEmail = (data: { +export interface ContributionEmailCommonData { firstName: string lastName: string email: string @@ -13,22 +13,35 @@ export const sendAddedContributionMessageEmail = (data: { senderFirstName: string senderLastName: string contributionMemo: string -}): Promise | boolean | null> => { + contributionFrontendLink: string +} + +function toContributionEmailLocales(data: ContributionEmailCommonData): Record { + return { + firstName: data.firstName, + lastName: data.lastName, + locale: data.language, + senderFirstName: data.senderFirstName, + senderLastName: data.senderLastName, + contributionMemo: data.contributionMemo, + contributionFrontendLink: data.contributionFrontendLink, + supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, + } +} + +export const sendAddedContributionMessageEmail = ( + data: ContributionEmailCommonData & { + message: string + }, +): Promise | boolean | null> => { return sendEmailTranslated({ receiver: { to: `${data.firstName} ${data.lastName} <${data.email}>`, }, template: 'addedContributionMessage', locals: { - firstName: data.firstName, - lastName: data.lastName, - locale: data.language, - senderFirstName: data.senderFirstName, - senderLastName: data.senderLastName, - contributionMemo: data.contributionMemo, - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, - supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, + ...toContributionEmailLocales(data), + message: data.message, }, }) } @@ -79,111 +92,53 @@ export const sendAccountMultiRegistrationEmail = (data: { }) } -export const sendContributionConfirmedEmail = (data: { - firstName: string - lastName: string - email: string - language: string - senderFirstName: string - senderLastName: string - contributionMemo: string - contributionAmount: Decimal -}): Promise | boolean | null> => { +export const sendContributionConfirmedEmail = ( + data: ContributionEmailCommonData & { + contributionAmount: Decimal + }, +): Promise | boolean | null> => { return sendEmailTranslated({ receiver: { to: `${data.firstName} ${data.lastName} <${data.email}>` }, template: 'contributionConfirmed', locals: { - firstName: data.firstName, - lastName: data.lastName, - locale: data.language, - senderFirstName: data.senderFirstName, - senderLastName: data.senderLastName, - contributionMemo: data.contributionMemo, + ...toContributionEmailLocales(data), contributionAmount: decimalSeparatorByLanguage(data.contributionAmount, data.language), - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, - supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, }, }) } -export const sendContributionChangedByModeratorEmail = (data: { - firstName: string - lastName: string - email: string - language: string - senderFirstName: string - senderLastName: string - contributionMemo: string - contributionMemoUpdated: string -}): Promise | boolean | null> => { +export const sendContributionChangedByModeratorEmail = ( + data: ContributionEmailCommonData & { + contributionMemoUpdated: string + }, +): Promise | boolean | null> => { return sendEmailTranslated({ receiver: { to: `${data.firstName} ${data.lastName} <${data.email}>` }, template: 'contributionChangedByModerator', locals: { - firstName: data.firstName, - lastName: data.lastName, - locale: data.language, - senderFirstName: data.senderFirstName, - senderLastName: data.senderLastName, - contributionMemo: data.contributionMemo, + ...toContributionEmailLocales(data), contributionMemoUpdated: data.contributionMemoUpdated, - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, - supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, }, }) } -export const sendContributionDeletedEmail = (data: { - firstName: string - lastName: string - email: string - language: string - senderFirstName: string - senderLastName: string - contributionMemo: string -}): Promise | boolean | null> => { +export const sendContributionDeletedEmail = ( + data: ContributionEmailCommonData, +): Promise | boolean | null> => { return sendEmailTranslated({ receiver: { to: `${data.firstName} ${data.lastName} <${data.email}>` }, template: 'contributionDeleted', - locals: { - firstName: data.firstName, - lastName: data.lastName, - locale: data.language, - senderFirstName: data.senderFirstName, - senderLastName: data.senderLastName, - contributionMemo: data.contributionMemo, - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, - supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, - }, + locals: toContributionEmailLocales(data), }) } -export const sendContributionDeniedEmail = (data: { - firstName: string - lastName: string - email: string - language: string - senderFirstName: string - senderLastName: string - contributionMemo: string -}): Promise | boolean | null> => { +export const sendContributionDeniedEmail = ( + data: ContributionEmailCommonData, +): Promise | boolean | null> => { return sendEmailTranslated({ receiver: { to: `${data.firstName} ${data.lastName} <${data.email}>` }, template: 'contributionDenied', - locals: { - firstName: data.firstName, - lastName: data.lastName, - locale: data.language, - senderFirstName: data.senderFirstName, - senderLastName: data.senderLastName, - contributionMemo: data.contributionMemo, - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, - supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, - communityURL: CONFIG.COMMUNITY_URL, - }, + locals: toContributionEmailLocales(data), }) } @@ -234,7 +189,6 @@ export const sendTransactionLinkRedeemedEmail = (data: { senderEmail: data.senderEmail, transactionMemo: data.transactionMemo, transactionAmount: decimalSeparatorByLanguage(data.transactionAmount, data.language), - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, communityURL: CONFIG.COMMUNITY_URL, }, @@ -264,7 +218,6 @@ export const sendTransactionReceivedEmail = (data: { senderLastName: data.senderLastName, senderEmail: data.senderEmail, transactionAmount: decimalSeparatorByLanguage(data.transactionAmount, data.language), - overviewURL: CONFIG.EMAIL_LINK_OVERVIEW, supportEmail: CONFIG.COMMUNITY_SUPPORT_MAIL, communityURL: CONFIG.COMMUNITY_URL, }, diff --git a/backend/src/emails/templates/addedContributionMessage/html.pug b/backend/src/emails/templates/addedContributionMessage/html.pug index ff7c89c30..df5ba35c4 100644 --- a/backend/src/emails/templates/addedContributionMessage/html.pug +++ b/backend/src/emails/templates/addedContributionMessage/html.pug @@ -7,8 +7,10 @@ block content p= t('emails.addedContributionMessage.commonGoodContributionMessage', { senderFirstName, senderLastName, contributionMemo }) .content h2= t('emails.addedContributionMessage.readMessage') - div(class="p_content")= t('emails.addedContributionMessage.toSeeAndAnswerMessage') + div(class="p_content") + p= t('emails.addedContributionMessage.message', { message }) + p= t('emails.addedContributionMessage.toSeeAndAnswerMessage') - a.button-3(href=`${communityURL}/community/contributions`) #{t('emails.general.toAccount')} + a.button-3(href=`${contributionFrontendLink}`) #{t('emails.general.toAccount')} include ../includes/doNotReply.pug diff --git a/backend/src/emails/templates/includes/contributionDetailsCTA.pug b/backend/src/emails/templates/includes/contributionDetailsCTA.pug index 0a3bd395d..ae3b77e6a 100644 --- a/backend/src/emails/templates/includes/contributionDetailsCTA.pug +++ b/backend/src/emails/templates/includes/contributionDetailsCTA.pug @@ -1,7 +1,7 @@ //- h2= t('emails.general.contributionDetails') div(class="p_content")= t('emails.contribution.toSeeContributionsAndMessages') -a.button-3(href=`${communityURL}/community/contributions`) #{t('emails.general.toAccount')} +a.button-3(href=`${contributionFrontendLink}`) #{t('emails.general.toAccount')} div(class="p_content")= t('emails.general.orCopyLink') -a.clink(href=`${communityURL}/community/contributions`) #{`${communityURL}/community/contributions`} \ No newline at end of file +a.clink(href=`${contributionFrontendLink}`) #{`${contributionFrontendLink}`} \ No newline at end of file diff --git a/backend/src/federation/authenticateCommunities.ts b/backend/src/federation/authenticateCommunities.ts index c4263445b..2a1c6f51d 100644 --- a/backend/src/federation/authenticateCommunities.ts +++ b/backend/src/federation/authenticateCommunities.ts @@ -4,12 +4,15 @@ import { validate as validateUUID, version as versionUUID } from 'uuid' import { CONFIG } from '@/config' import { AuthenticationClient as V1_0_AuthenticationClient } from '@/federation/client/1_0/AuthenticationClient' -import { backendLogger as logger } from '@/server/logger' import { ensureUrlEndsWithSlash } from '@/util/utilities' +import { LOG4JS_FEDERATION_CATEGORY_NAME } from '@/federation' +import { getLogger } from 'log4js' import { OpenConnectionArgs } from './client/1_0/model/OpenConnectionArgs' import { AuthenticationClientFactory } from './client/AuthenticationClientFactory' +const logger = getLogger(`${LOG4JS_FEDERATION_CATEGORY_NAME}.authenticateCommunities`) + export async function startCommunityAuthentication( foreignFedCom: DbFederatedCommunity, ): Promise { diff --git a/backend/src/federation/client/1_0/AuthenticationClient.ts b/backend/src/federation/client/1_0/AuthenticationClient.ts index 264afe3a0..38996433f 100644 --- a/backend/src/federation/client/1_0/AuthenticationClient.ts +++ b/backend/src/federation/client/1_0/AuthenticationClient.ts @@ -1,12 +1,15 @@ import { FederatedCommunity as DbFederatedCommunity } from 'database' import { GraphQLClient } from 'graphql-request' -import { backendLogger as logger } from '@/server/logger' import { ensureUrlEndsWithSlash } from '@/util/utilities' +import { LOG4JS_FEDERATION_CLIENT1_0_CATEGORY_NAME } from '@/federation/client/1_0' +import { getLogger } from 'log4js' import { OpenConnectionArgs } from './model/OpenConnectionArgs' import { openConnection } from './query/openConnection' +const logger = getLogger(`${LOG4JS_FEDERATION_CLIENT1_0_CATEGORY_NAME}.AuthenticationClient`) + export class AuthenticationClient { dbCom: DbFederatedCommunity endpoint: string @@ -25,25 +28,19 @@ export class AuthenticationClient { } async openConnection(args: OpenConnectionArgs): Promise { - logger.debug(`Authentication: openConnection at ${this.endpoint} for args:`, args) + logger.debug(`openConnection at ${this.endpoint} for args:`, args) try { const { data } = await this.client.rawRequest<{ openConnection: boolean }>(openConnection, { args, }) if (!data?.openConnection) { - logger.warn( - 'Authentication: openConnection without response data from endpoint', - this.endpoint, - ) + logger.warn('openConnection without response data from endpoint', this.endpoint) return false } - logger.debug( - 'Authentication: openConnection successfully started with endpoint', - this.endpoint, - ) + logger.debug('openConnection successfully started with endpoint', this.endpoint) return true } catch (err) { - logger.error('Authentication: error on openConnection: ', err) + logger.error('error on openConnection: ', err) } } } diff --git a/backend/src/federation/client/1_0/FederationClient.ts b/backend/src/federation/client/1_0/FederationClient.ts index b83da8a8b..b548a97e1 100644 --- a/backend/src/federation/client/1_0/FederationClient.ts +++ b/backend/src/federation/client/1_0/FederationClient.ts @@ -1,15 +1,18 @@ import { FederatedCommunity as DbFederatedCommunity } from 'database' import { GraphQLClient } from 'graphql-request' +import { LOG4JS_FEDERATION_CLIENT1_0_CATEGORY_NAME } from '@/federation/client/1_0' import { getPublicCommunityInfo } from '@/federation/client/1_0/query/getPublicCommunityInfo' import { getPublicKey } from '@/federation/client/1_0/query/getPublicKey' -import { backendLogger as logger } from '@/server/logger' import { ensureUrlEndsWithSlash } from '@/util/utilities' +import { getLogger } from 'log4js' import { PublicCommunityInfoLoggingView } from './logging/PublicCommunityInfoLogging.view' import { GetPublicKeyResult } from './model/GetPublicKeyResult' import { PublicCommunityInfo } from './model/PublicCommunityInfo' +const logger = getLogger(`${LOG4JS_FEDERATION_CLIENT1_0_CATEGORY_NAME}.FederationClient`) + export class FederationClient { dbCom: DbFederatedCommunity endpoint: string @@ -32,25 +35,25 @@ export class FederationClient { } getPublicKey = async (): Promise => { - logger.debug('Federation: getPublicKey from endpoint', this.endpoint) + logger.debug('getPublicKey from endpoint', this.endpoint) try { const { data } = await this.client.rawRequest<{ getPublicKey: GetPublicKeyResult }>( getPublicKey, {}, ) if (!data?.getPublicKey?.publicKey) { - logger.warn('Federation: getPublicKey without response data from endpoint', this.endpoint) + logger.warn('getPublicKey without response data from endpoint', this.endpoint) return } logger.debug( - 'Federation: getPublicKey successful from endpoint', + 'getPublicKey successful from endpoint', this.endpoint, data.getPublicKey.publicKey, ) return data.getPublicKey.publicKey } catch (err) { const errorString = JSON.stringify(err) - logger.warn('Federation: getPublicKey failed for endpoint', { + logger.warn('getPublicKey failed for endpoint', { endpoint: this.endpoint, err: errorString.length <= 200 ? errorString : errorString.substring(0, 200) + '...', }) @@ -58,20 +61,17 @@ export class FederationClient { } getPublicCommunityInfo = async (): Promise => { - logger.debug(`Federation: getPublicCommunityInfo with endpoint='${this.endpoint}'...`) + logger.debug(`getPublicCommunityInfo with endpoint='${this.endpoint}'...`) try { const { data } = await this.client.rawRequest<{ getPublicCommunityInfo: PublicCommunityInfo }>(getPublicCommunityInfo, {}) if (!data?.getPublicCommunityInfo?.name) { - logger.warn( - 'Federation: getPublicCommunityInfo without response data from endpoint', - this.endpoint, - ) + logger.warn('getPublicCommunityInfo without response data from endpoint', this.endpoint) return } - logger.debug(`Federation: getPublicCommunityInfo successful from endpoint=${this.endpoint}`) + logger.debug(`getPublicCommunityInfo successful from endpoint=${this.endpoint}`) logger.debug( `publicCommunityInfo:`, new PublicCommunityInfoLoggingView(data.getPublicCommunityInfo), @@ -80,7 +80,7 @@ export class FederationClient { } catch (err) { logger.warn(' err', err) const errorString = JSON.stringify(err) - logger.warn('Federation: getPublicCommunityInfo failed for endpoint', { + logger.warn('getPublicCommunityInfo failed for endpoint', { endpoint: this.endpoint, err: errorString.length <= 200 ? errorString : errorString.substring(0, 200) + '...', }) diff --git a/backend/src/federation/client/1_0/SendCoinsClient.ts b/backend/src/federation/client/1_0/SendCoinsClient.ts index 91e7b827c..cc0c74846 100644 --- a/backend/src/federation/client/1_0/SendCoinsClient.ts +++ b/backend/src/federation/client/1_0/SendCoinsClient.ts @@ -2,9 +2,10 @@ import { FederatedCommunity as DbFederatedCommunity } from 'database' import { GraphQLClient } from 'graphql-request' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { ensureUrlEndsWithSlash } from '@/util/utilities' +import { getLogger } from 'log4js' +import { LOG4JS_FEDERATION_CLIENT1_0_CATEGORY_NAME } from '.' import { SendCoinsArgsLoggingView } from './logging/SendCoinsArgsLogging.view' import { SendCoinsResultLoggingView } from './logging/SendCoinsResultLogging.view' import { SendCoinsArgs } from './model/SendCoinsArgs' @@ -14,6 +15,8 @@ import { revertSettledSendCoins as revertSettledSendCoinsQuery } from './query/r import { settleSendCoins as settleSendCoinsQuery } from './query/settleSendCoins' import { voteForSendCoins as voteForSendCoinsQuery } from './query/voteForSendCoins' +const logger = getLogger(`${LOG4JS_FEDERATION_CLIENT1_0_CATEGORY_NAME}.SendCoinsClient`) + export class SendCoinsClient { dbCom: DbFederatedCommunity endpoint: string @@ -32,123 +35,87 @@ export class SendCoinsClient { } async voteForSendCoins(args: SendCoinsArgs): Promise { - logger.debug('X-Com: voteForSendCoins against endpoint=', this.endpoint) + logger.debug('voteForSendCoins against endpoint=', this.endpoint) try { - logger.debug( - `X-Com: SendCoinsClient: voteForSendCoins with args=`, - new SendCoinsArgsLoggingView(args), - ) + logger.debug(`voteForSendCoins with args=`, new SendCoinsArgsLoggingView(args)) const { data } = await this.client.rawRequest<{ voteForSendCoins: SendCoinsResult }>( voteForSendCoinsQuery, { args }, ) const result = data.voteForSendCoins if (!data?.voteForSendCoins?.vote) { - logger.debug( - 'X-Com: voteForSendCoins failed with: ', - new SendCoinsResultLoggingView(result), - ) + logger.debug('voteForSendCoins failed with: ', new SendCoinsResultLoggingView(result)) return new SendCoinsResult() } logger.debug( - 'X-Com: voteForSendCoins successful with result=', + 'voteForSendCoins successful with result=', new SendCoinsResultLoggingView(result), ) return result } catch (err) { - throw new LogError(`X-Com: voteForSendCoins failed for endpoint=${this.endpoint}:`, err) + throw new LogError(`voteForSendCoins failed for endpoint=${this.endpoint}:`, err) } } async revertSendCoins(args: SendCoinsArgs): Promise { - logger.debug('X-Com: revertSendCoins against endpoint=', this.endpoint) + logger.debug('revertSendCoins against endpoint=', this.endpoint) try { - logger.debug( - `X-Com: SendCoinsClient: revertSendCoins with args=`, - new SendCoinsArgsLoggingView(args), - ) + logger.debug(`revertSendCoins with args=`, new SendCoinsArgsLoggingView(args)) const { data } = await this.client.rawRequest<{ revertSendCoins: boolean }>( revertSendCoinsQuery, { args }, ) - logger.debug(`X-Com: SendCoinsClient: after revertSendCoins: data=`, data) + logger.debug(`after revertSendCoins: data=`, data) if (!data?.revertSendCoins) { - logger.warn('X-Com: revertSendCoins without response data from endpoint', this.endpoint) + logger.warn('revertSendCoins without response data from endpoint', this.endpoint) return false } - logger.debug( - `X-Com: SendCoinsClient: revertSendCoins successful from endpoint=${this.endpoint}`, - ) + logger.debug(`revertSendCoins successful from endpoint=${this.endpoint}`) return true } catch (err) { - logger.error( - `X-Com: SendCoinsClient: revertSendCoins failed for endpoint=${this.endpoint}`, - err, - ) + logger.error(`revertSendCoins failed for endpoint=${this.endpoint}`, err) return false } } async settleSendCoins(args: SendCoinsArgs): Promise { - logger.debug(`X-Com: settleSendCoins against endpoint='${this.endpoint}'...`) + logger.debug(`settleSendCoins against endpoint='${this.endpoint}'...`) try { - logger.debug( - `X-Com: SendCoinsClient: settleSendCoins with args=`, - new SendCoinsArgsLoggingView(args), - ) + logger.debug(`settleSendCoins with args=`, new SendCoinsArgsLoggingView(args)) const { data } = await this.client.rawRequest<{ settleSendCoins: boolean }>( settleSendCoinsQuery, { args }, ) - logger.debug(`X-Com: SendCoinsClient: after settleSendCoins: data=`, data) + logger.debug(`after settleSendCoins: data=`, data) if (!data?.settleSendCoins) { - logger.warn( - 'X-Com: SendCoinsClient: settleSendCoins without response data from endpoint', - this.endpoint, - ) + logger.warn('settleSendCoins without response data from endpoint', this.endpoint) return false } - logger.debug( - `X-Com: SendCoinsClient: settleSendCoins successful from endpoint=${this.endpoint}`, - ) + logger.debug(`settleSendCoins successful from endpoint=${this.endpoint}`) return true } catch (err) { - throw new LogError( - `X-Com: SendCoinsClient: settleSendCoins failed for endpoint=${this.endpoint}`, - err, - ) + throw new LogError(`settleSendCoins failed for endpoint=${this.endpoint}`, err) } } async revertSettledSendCoins(args: SendCoinsArgs): Promise { - logger.debug(`X-Com: revertSettledSendCoins against endpoint='${this.endpoint}'...`) + logger.debug(`revertSettledSendCoins against endpoint='${this.endpoint}'...`) try { - logger.debug( - `X-Com: SendCoinsClient: revertSettledSendCoins with args=`, - new SendCoinsArgsLoggingView(args), - ) + logger.debug(`revertSettledSendCoins with args=`, new SendCoinsArgsLoggingView(args)) const { data } = await this.client.rawRequest<{ revertSettledSendCoins: boolean }>( revertSettledSendCoinsQuery, { args }, ) - logger.debug(`X-Com: SendCoinsClient: after revertSettledSendCoins: data=`, data) + logger.debug(`after revertSettledSendCoins: data=`, data) if (!data?.revertSettledSendCoins) { - logger.warn( - 'X-Com: SendCoinsClient: revertSettledSendCoins without response data from endpoint', - this.endpoint, - ) + logger.warn('revertSettledSendCoins without response data from endpoint', this.endpoint) return false } - logger.debug( - `X-Com: SendCoinsClient: revertSettledSendCoins successful from endpoint=${this.endpoint}`, - ) + logger.debug(`revertSettledSendCoins successful from endpoint=${this.endpoint}`) return true } catch (err) { - throw new LogError( - `X-Com: SendCoinsClient: revertSettledSendCoins failed for endpoint=${this.endpoint}`, - err, - ) + throw new LogError(`revertSettledSendCoins failed for endpoint=${this.endpoint}`, err) } } } diff --git a/backend/src/federation/client/1_0/index.ts b/backend/src/federation/client/1_0/index.ts new file mode 100644 index 000000000..c21700126 --- /dev/null +++ b/backend/src/federation/client/1_0/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_FEDERATION_CATEGORY_NAME } from '@/federation' + +export const LOG4JS_FEDERATION_CLIENT1_0_CATEGORY_NAME = `${LOG4JS_FEDERATION_CATEGORY_NAME}.client.1_0` diff --git a/backend/src/federation/index.ts b/backend/src/federation/index.ts new file mode 100644 index 000000000..c1684852b --- /dev/null +++ b/backend/src/federation/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' + +export const LOG4JS_FEDERATION_CATEGORY_NAME = `${LOG4JS_BASE_CATEGORY_NAME}.federation` diff --git a/backend/src/federation/validateCommunities.test.ts b/backend/src/federation/validateCommunities.test.ts index 9e5120578..160068140 100644 --- a/backend/src/federation/validateCommunities.test.ts +++ b/backend/src/federation/validateCommunities.test.ts @@ -2,18 +2,25 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { FederatedCommunity as DbFederatedCommunity } from 'database' import { GraphQLClient } from 'graphql-request' import { Response } from 'graphql-request/dist/types' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' +import { LOG4JS_FEDERATION_CATEGORY_NAME } from '@/federation' +import { LOG4JS_FEDERATION_CLIENT1_0_CATEGORY_NAME } from '@/federation/client/1_0' import { cleanDB, testEnvironment } from '@test/helpers' -import { logger } from '@test/testSetup' +import { clearLogs, getLogger, printLogs } from 'config-schema/test/testSetup' import { validateCommunities } from './validateCommunities' -let con: Connection +const logger = getLogger(`${LOG4JS_FEDERATION_CATEGORY_NAME}.validateCommunities`) +const federationClientLogger = getLogger( + `${LOG4JS_FEDERATION_CLIENT1_0_CATEGORY_NAME}.FederationClient`, +) + +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { @@ -24,7 +31,7 @@ beforeAll(async () => { afterAll(async () => { // await cleanDB() - await con.close() + await con.destroy() }) describe('validate Communities', () => { @@ -49,7 +56,7 @@ describe('validate Communities', () => { }) it('logs zero communities found', () => { - expect(logger.debug).toBeCalledWith(`Federation: found 0 dbCommunities`) + expect(logger.debug).toBeCalledWith(`found 0 dbCommunities`) }) describe('with one Community of api 1_0 but missing pubKey response', () => { @@ -79,11 +86,11 @@ describe('validate Communities', () => { }) it('logs one community found', () => { - expect(logger.debug).toBeCalledWith(`Federation: found 1 dbCommunities`) + expect(logger.debug).toBeCalledWith(`found 1 dbCommunities`) }) it('logs requestGetPublicKey missing response data ', () => { - expect(logger.warn).toBeCalledWith( - 'Federation: getPublicKey without response data from endpoint', + expect(federationClientLogger.warn).toBeCalledWith( + 'getPublicKey without response data from endpoint', 'http//localhost:5001/api/1_0/', ) }) @@ -153,17 +160,17 @@ describe('validate Communities', () => { }) it('logs one community found', () => { - expect(logger.debug).toBeCalledWith(`Federation: found 1 dbCommunities`) + expect(logger.debug).toBeCalledWith(`found 1 dbCommunities`) }) it('logs requestGetPublicKey for community api 1_0 ', () => { - expect(logger.debug).toBeCalledWith( - 'Federation: getPublicKey from endpoint', + expect(federationClientLogger.debug).toBeCalledWith( + 'getPublicKey from endpoint', 'http//localhost:5001/api/1_0/', ) }) it('logs not matching publicKeys', () => { expect(logger.debug).toBeCalledWith( - 'Federation: received not matching publicKey:', + 'received not matching publicKey:', 'somePubKey', expect.stringMatching('11111111111111111111111111111111'), ) @@ -203,18 +210,18 @@ describe('validate Communities', () => { }) it('logs one community found', () => { - expect(logger.debug).toBeCalledWith(`Federation: found 1 dbCommunities`) + expect(logger.debug).toBeCalledWith(`found 1 dbCommunities`) }) it('logs requestGetPublicKey for community api 1_0 ', () => { - expect(logger.debug).toBeCalledWith( - 'Federation: getPublicKey from endpoint', + expect(federationClientLogger.debug).toBeCalledWith( + 'getPublicKey from endpoint', 'http//localhost:5001/api/1_0/', ) }) it('logs community pubKey verified', () => { - expect(logger.debug).toHaveBeenNthCalledWith( - 5, - 'Federation: getPublicKey successful from endpoint', + expect(federationClientLogger.debug).toHaveBeenNthCalledWith( + 2, + 'getPublicKey successful from endpoint', 'http//localhost:5001/api/1_0/', '11111111111111111111111111111111', ) @@ -269,17 +276,17 @@ describe('validate Communities', () => { await validateCommunities() }) it('logs two communities found', () => { - expect(logger.debug).toBeCalledWith(`Federation: found 2 dbCommunities`) + expect(logger.debug).toBeCalledWith(`found 2 dbCommunities`) }) it('logs requestGetPublicKey for community api 1_0 ', () => { - expect(logger.debug).toBeCalledWith( - 'Federation: getPublicKey from endpoint', + expect(federationClientLogger.debug).toBeCalledWith( + 'getPublicKey from endpoint', 'http//localhost:5001/api/1_0/', ) }) it('logs requestGetPublicKey for community api 1_1 ', () => { - expect(logger.debug).toBeCalledWith( - 'Federation: getPublicKey from endpoint', + expect(federationClientLogger.debug).toBeCalledWith( + 'getPublicKey from endpoint', 'http//localhost:5001/api/1_1/', ) }) @@ -321,23 +328,23 @@ describe('validate Communities', () => { await validateCommunities() }) it('logs three community found', () => { - expect(logger.debug).toBeCalledWith(`Federation: found 3 dbCommunities`) + expect(logger.debug).toBeCalledWith(`found 3 dbCommunities`) }) it('logs requestGetPublicKey for community api 1_0 ', () => { - expect(logger.debug).toBeCalledWith( - 'Federation: getPublicKey from endpoint', + expect(federationClientLogger.debug).toBeCalledWith( + 'getPublicKey from endpoint', 'http//localhost:5001/api/1_0/', ) }) it('logs requestGetPublicKey for community api 1_1 ', () => { - expect(logger.debug).toBeCalledWith( - 'Federation: getPublicKey from endpoint', + expect(federationClientLogger.debug).toBeCalledWith( + 'getPublicKey from endpoint', 'http//localhost:5001/api/1_1/', ) }) it('logs unsupported api for community with api 2_0 ', () => { expect(logger.debug).toBeCalledWith( - 'Federation: dbCom with unsupported apiVersion', + 'dbCom with unsupported apiVersion', dbCom.endPoint, '2_0', ) diff --git a/backend/src/federation/validateCommunities.ts b/backend/src/federation/validateCommunities.ts index 2a1bc630e..dc4747352 100644 --- a/backend/src/federation/validateCommunities.ts +++ b/backend/src/federation/validateCommunities.ts @@ -5,23 +5,23 @@ import { } from 'database' import { IsNull } from 'typeorm' +import { LOG4JS_FEDERATION_CATEGORY_NAME } from '@/federation' import { FederationClient as V1_0_FederationClient } from '@/federation/client/1_0/FederationClient' import { PublicCommunityInfo } from '@/federation/client/1_0/model/PublicCommunityInfo' import { FederationClientFactory } from '@/federation/client/FederationClientFactory' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' - +import { getLogger } from 'log4js' import { startCommunityAuthentication } from './authenticateCommunities' import { PublicCommunityInfoLoggingView } from './client/1_0/logging/PublicCommunityInfoLogging.view' import { ApiVersionType } from './enum/apiVersionType' +const logger = getLogger(`${LOG4JS_FEDERATION_CATEGORY_NAME}.validateCommunities`) + export async function startValidateCommunities(timerInterval: number): Promise { if (Number.isNaN(timerInterval) || timerInterval <= 0) { throw new LogError('FEDERATION_VALIDATE_COMMUNITY_TIMER is not a positive number') } - logger.info( - `Federation: startValidateCommunities loop with an interval of ${timerInterval} ms...`, - ) + logger.info(`startValidateCommunities loop with an interval of ${timerInterval} ms...`) // delete all foreign federated community entries to avoid increasing validation efforts and log-files await DbFederatedCommunity.delete({ foreign: true }) @@ -40,17 +40,13 @@ export async function validateCommunities(): Promise { .orWhere('verified_at < last_announced_at') .getMany() - logger.debug(`Federation: found ${dbFederatedCommunities.length} dbCommunities`) + logger.debug(`found ${dbFederatedCommunities.length} dbCommunities`) for (const dbCom of dbFederatedCommunities) { - logger.debug('Federation: dbCom', new FederatedCommunityLoggingView(dbCom)) + logger.debug('dbCom', new FederatedCommunityLoggingView(dbCom)) const apiValueStrings: string[] = Object.values(ApiVersionType) logger.debug(`suppported ApiVersions=`, apiValueStrings) if (!apiValueStrings.includes(dbCom.apiVersion)) { - logger.debug( - 'Federation: dbCom with unsupported apiVersion', - dbCom.endPoint, - dbCom.apiVersion, - ) + logger.debug('dbCom with unsupported apiVersion', dbCom.endPoint, dbCom.apiVersion) continue } try { @@ -60,21 +56,17 @@ export async function validateCommunities(): Promise { const pubKey = await client.getPublicKey() if (pubKey && pubKey === dbCom.publicKey.toString('hex')) { await DbFederatedCommunity.update({ id: dbCom.id }, { verifiedAt: new Date() }) - logger.debug(`Federation: verified community with:`, dbCom.endPoint) + logger.debug(`verified community with:`, dbCom.endPoint) const pubComInfo = await client.getPublicCommunityInfo() if (pubComInfo) { await writeForeignCommunity(dbCom, pubComInfo) await startCommunityAuthentication(dbCom) - logger.debug(`Federation: write publicInfo of community: name=${pubComInfo.name}`) + logger.debug(`write publicInfo of community: name=${pubComInfo.name}`) } else { - logger.debug('Federation: missing result of getPublicCommunityInfo') + logger.debug('missing result of getPublicCommunityInfo') } } else { - logger.debug( - 'Federation: received not matching publicKey:', - pubKey, - dbCom.publicKey.toString('hex'), - ) + logger.debug('received not matching publicKey:', pubKey, dbCom.publicKey.toString('hex')) } } } catch (err) { diff --git a/backend/src/graphql/resolver/BalanceResolver.ts b/backend/src/graphql/resolver/BalanceResolver.ts index 2ea34cc5a..f3c7d4709 100644 --- a/backend/src/graphql/resolver/BalanceResolver.ts +++ b/backend/src/graphql/resolver/BalanceResolver.ts @@ -9,9 +9,10 @@ import { RIGHTS } from '@/auth/RIGHTS' import { BalanceLoggingView } from '@/logging/BalanceLogging.view' import { DecayLoggingView } from '@/logging/DecayLogging.view' import { Context, getUser } from '@/server/context' -import { backendLogger as logger } from '@/server/logger' import { calculateDecay } from '@/util/decay' +import { getLogger } from 'log4js' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' import { GdtResolver } from './GdtResolver' import { getLastTransaction } from './util/getLastTransaction' import { transactionLinkSummary } from './util/transactionLinkSummary' @@ -23,9 +24,10 @@ export class BalanceResolver { async balance(@Ctx() context: Context): Promise { const user = getUser(context) const now = new Date() + const logger = getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.BalanceResolver`) logger.addContext('user', user.id) - logger.info(`balance(userId=${user.id})...`) + logger.info(`balance...`) let balanceGDT if (!context.balanceGDT) { diff --git a/backend/src/graphql/resolver/CommunityResolver.test.ts b/backend/src/graphql/resolver/CommunityResolver.test.ts index 2400fdc75..428915765 100644 --- a/backend/src/graphql/resolver/CommunityResolver.test.ts +++ b/backend/src/graphql/resolver/CommunityResolver.test.ts @@ -1,11 +1,11 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { Community as DbCommunity, FederatedCommunity as DbFederatedCommunity } from 'database' import { GraphQLError } from 'graphql/error/GraphQLError' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { v4 as uuidv4 } from 'uuid' import { cleanDB, testEnvironment } from '@test/helpers' -import { i18n as localization, logger } from '@test/testSetup' +import { i18n as localization } from '@test/testSetup' import { userFactory } from '@/seeds/factory/user' import { login, updateHomeCommunityQuery } from '@/seeds/graphql/mutations' @@ -18,19 +18,23 @@ import { } from '@/seeds/graphql/queries' import { peterLustig } from '@/seeds/users/peter-lustig' +import { getLogger } from 'config-schema/test/testSetup' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' import { getCommunityByUuid } from './util/communities' jest.mock('@/password/EncryptorUtils') +const logger = getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.CommunityResolver`) + // to do: We need a setup for the tests that closes the connection let mutate: ApolloServerTestClient['mutate'] let query: ApolloServerTestClient['query'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } const peterLoginData = { @@ -49,7 +53,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) // real valid ed25519 key pairs diff --git a/backend/src/graphql/resolver/ContributionLinkResolver.test.ts b/backend/src/graphql/resolver/ContributionLinkResolver.test.ts index abcf231be..9db6a7d99 100644 --- a/backend/src/graphql/resolver/ContributionLinkResolver.test.ts +++ b/backend/src/graphql/resolver/ContributionLinkResolver.test.ts @@ -2,11 +2,11 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { ContributionLink as DbContributionLink, Event as DbEvent } from 'database' import { Decimal } from 'decimal.js-light' import { GraphQLError } from 'graphql' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { cleanDB, resetToken, testEnvironment } from '@test/helpers' -import { logger } from '@test/testSetup' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { EventType } from '@/event/Events' import { userFactory } from '@/seeds/factory/user' import { @@ -18,16 +18,19 @@ import { import { listContributionLinks } from '@/seeds/graphql/queries' import { bibiBloxberg } from '@/seeds/users/bibi-bloxberg' import { peterLustig } from '@/seeds/users/peter-lustig' +import { getLogger } from 'config-schema/test/testSetup' jest.mock('@/password/EncryptorUtils') +const logErrorLogger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.LogError`) + let mutate: ApolloServerTestClient['mutate'] let query: ApolloServerTestClient['query'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { @@ -42,7 +45,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('Contribution Links', () => { @@ -286,7 +289,7 @@ describe('Contribution Links', () => { }) it('logs the error "A Start-Date must be set"', () => { - expect(logger.error).toBeCalledWith('A Start-Date must be set') + expect(logErrorLogger.error).toBeCalledWith('A Start-Date must be set') }) it('returns an error if missing endDate', async () => { @@ -307,7 +310,7 @@ describe('Contribution Links', () => { }) it('logs the error "An End-Date must be set"', () => { - expect(logger.error).toBeCalledWith('An End-Date must be set') + expect(logErrorLogger.error).toBeCalledWith('An End-Date must be set') }) it('returns an error if endDate is before startDate', async () => { @@ -331,7 +334,7 @@ describe('Contribution Links', () => { }) it('logs the error "The value of validFrom must before or equals the validTo"', () => { - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( `The value of validFrom must before or equals the validTo`, ) }) @@ -531,7 +534,7 @@ describe('Contribution Links', () => { }) it('logs the error "Contribution Link not found"', () => { - expect(logger.error).toBeCalledWith('Contribution Link not found', -1) + expect(logErrorLogger.error).toBeCalledWith('Contribution Link not found', -1) }) describe('valid id', () => { @@ -613,7 +616,7 @@ describe('Contribution Links', () => { }) it('logs the error "Contribution Link not found"', () => { - expect(logger.error).toBeCalledWith('Contribution Link not found', -1) + expect(logErrorLogger.error).toBeCalledWith('Contribution Link not found', -1) }) }) diff --git a/backend/src/graphql/resolver/ContributionMessageResolver.test.ts b/backend/src/graphql/resolver/ContributionMessageResolver.test.ts index f43639101..a9aa3fa91 100644 --- a/backend/src/graphql/resolver/ContributionMessageResolver.test.ts +++ b/backend/src/graphql/resolver/ContributionMessageResolver.test.ts @@ -1,14 +1,16 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { Contribution as DbContribution, Event as DbEvent } from 'database' import { GraphQLError } from 'graphql' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { ContributionStatus } from '@enum/ContributionStatus' import { cleanDB, resetToken, testEnvironment } from '@test/helpers' -import { i18n as localization, logger } from '@test/testSetup' +import { i18n as localization } from '@test/testSetup' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { sendAddedContributionMessageEmail } from '@/emails/sendEmailVariants' import { EventType } from '@/event/Events' +import { LOG4JS_INTERACTION_CATEGORY_NAME } from '@/interactions' import { userFactory } from '@/seeds/factory/user' import { adminCreateContributionMessage, @@ -20,6 +22,14 @@ import { adminListContributionMessages, listContributionMessages } from '@/seeds import { bibiBloxberg } from '@/seeds/users/bibi-bloxberg' import { bobBaumeister } from '@/seeds/users/bob-baumeister' import { peterLustig } from '@/seeds/users/peter-lustig' +import { clearLogs, getLogger, printLogs } from 'config-schema/test/testSetup' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' + +const logger = getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.ContributionMessageResolver`) +const logErrorLogger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.LogError`) +const interactionLogger = getLogger( + `${LOG4JS_INTERACTION_CATEGORY_NAME}.updateUnconfirmedContribution`, +) jest.mock('@/password/EncryptorUtils') jest.mock('@/emails/sendEmailVariants', () => { @@ -34,11 +44,11 @@ jest.mock('@/emails/sendEmailVariants', () => { }) let mutate: ApolloServerTestClient['mutate'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } let result: any @@ -51,7 +61,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('ContributionMessageResolver', () => { @@ -121,7 +131,7 @@ describe('ContributionMessageResolver', () => { }) it('logs the error "ContributionMessage was not sent successfully: Error: Contribution not found"', () => { - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'ContributionMessage was not sent successfully: Error: Contribution not found', new Error('Contribution not found'), ) @@ -148,9 +158,7 @@ describe('ContributionMessageResolver', () => { message: 'Test', }, }) - expect(logger.debug).toBeCalledTimes(5) - expect(logger.debug).toHaveBeenNthCalledWith( - 5, + expect(interactionLogger.debug).toBeCalledWith( 'use UnconfirmedContributionUserAddMessageRole', ) expect(mutationResult).toEqual( @@ -244,9 +252,11 @@ describe('ContributionMessageResolver', () => { lastName: 'Bloxberg', email: 'bibi@bloxberg.de', language: 'de', + message: 'Admin Test', senderFirstName: 'Peter', senderLastName: 'Lustig', contributionMemo: 'Test env contribution', + contributionFrontendLink: `http://localhost/contributions/own-contributions/1#contributionListItem-${result.data.createContribution.id}`, }) }) @@ -325,7 +335,7 @@ describe('ContributionMessageResolver', () => { }) it('logs the error "ContributionMessage was not sent successfully: Error: Contribution not found"', () => { - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'ContributionMessage was not sent successfully: Error: Contribution not found', new Error('Contribution not found'), ) @@ -346,9 +356,7 @@ describe('ContributionMessageResolver', () => { }, }) - expect(logger.debug).toBeCalledTimes(5) - expect(logger.debug).toHaveBeenNthCalledWith( - 5, + expect(interactionLogger.debug).toBeCalledWith( 'use UnconfirmedContributionAdminAddMessageRole', ) @@ -380,10 +388,7 @@ describe('ContributionMessageResolver', () => { message: 'Test', }, }) - - expect(logger.debug).toBeCalledTimes(5) - expect(logger.debug).toHaveBeenNthCalledWith( - 5, + expect(interactionLogger.debug).toBeCalledWith( 'use UnconfirmedContributionAdminAddMessageRole', ) @@ -399,13 +404,12 @@ describe('ContributionMessageResolver', () => { }) it('logs the error "ContributionMessage was not sent successfully: Error: missing right ADMIN_CREATE_CONTRIBUTION_MESSAGE for user"', () => { - expect(logger.debug).toBeCalledTimes(5) - expect(logger.error).toHaveBeenNthCalledWith( + expect(logErrorLogger.error).toHaveBeenNthCalledWith( 1, 'missing right ADMIN_CREATE_CONTRIBUTION_MESSAGE for user', expect.any(Number), ) - expect(logger.error).toHaveBeenNthCalledWith( + expect(logErrorLogger.error).toHaveBeenNthCalledWith( 2, 'ContributionMessage was not sent successfully: Error: missing right ADMIN_CREATE_CONTRIBUTION_MESSAGE for user', new Error('missing right ADMIN_CREATE_CONTRIBUTION_MESSAGE for user'), diff --git a/backend/src/graphql/resolver/ContributionMessageResolver.ts b/backend/src/graphql/resolver/ContributionMessageResolver.ts index a1701f048..505711b9e 100644 --- a/backend/src/graphql/resolver/ContributionMessageResolver.ts +++ b/backend/src/graphql/resolver/ContributionMessageResolver.ts @@ -1,10 +1,11 @@ import { + AppDatabase, Contribution as DbContribution, ContributionMessage as DbContributionMessage, User as DbUser, } from 'database' import { Arg, Args, Authorized, Ctx, Int, Mutation, Query, Resolver } from 'type-graphql' -import { EntityManager, FindOptionsRelations, getConnection } from 'typeorm' +import { EntityManager, FindOptionsRelations } from 'typeorm' import { ContributionMessageArgs } from '@arg/ContributionMessageArgs' import { Paginated } from '@arg/Paginated' @@ -21,10 +22,15 @@ import { import { UpdateUnconfirmedContributionContext } from '@/interactions/updateUnconfirmedContribution/UpdateUnconfirmedContribution.context' import { LogError } from '@/server/LogError' import { Context, getUser } from '@/server/context' -import { backendLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' +import { contributionFrontendLink } from './util/contributions' import { findContributionMessages } from './util/findContributionMessages' +const db = AppDatabase.getInstance() +const createLogger = () => getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.ContributionMessageResolver`) + @Resolver() export class ContributionMessageResolver { @Authorized([RIGHTS.CREATE_CONTRIBUTION_MESSAGE]) @@ -43,9 +49,9 @@ export class ContributionMessageResolver { let finalContributionMessage: DbContributionMessage | undefined try { - await getConnection().transaction( - 'REPEATABLE READ', - async (transactionalEntityManager: EntityManager) => { + await db + .getDataSource() + .transaction('REPEATABLE READ', async (transactionalEntityManager: EntityManager) => { const { contribution, contributionMessage, contributionChanged } = await updateUnconfirmedContributionContext.run(transactionalEntityManager) @@ -62,8 +68,7 @@ export class ContributionMessageResolver { finalContribution = contribution finalContributionMessage = contributionMessage - }, - ) + }) } catch (e) { throw new LogError(`ContributionMessage was not sent successfully: ${e}`, e) } @@ -123,7 +128,9 @@ export class ContributionMessageResolver { @Args() contributionMessageArgs: ContributionMessageArgs, @Ctx() context: Context, ): Promise { + const logger = createLogger() const { contributionId, messageType } = contributionMessageArgs + logger.addContext('contribution', contributionMessageArgs.contributionId) const updateUnconfirmedContributionContext = new UpdateUnconfirmedContributionContext( contributionId, contributionMessageArgs, @@ -137,9 +144,9 @@ export class ContributionMessageResolver { let finalContributionMessage: DbContributionMessage | undefined try { - await getConnection().transaction( - 'REPEATABLE READ', - async (transactionalEntityManager: EntityManager) => { + await db + .getDataSource() + .transaction('REPEATABLE READ', async (transactionalEntityManager: EntityManager) => { const { contribution, contributionMessage, contributionChanged } = await updateUnconfirmedContributionContext.run(transactionalEntityManager, relations) if (contributionChanged) { @@ -159,8 +166,7 @@ export class ContributionMessageResolver { } finalContribution = contribution finalContributionMessage = contributionMessage - }, - ) + }) } catch (e) { throw new LogError(`ContributionMessage was not sent successfully: ${e}`, e) } @@ -179,6 +185,11 @@ export class ContributionMessageResolver { senderFirstName: moderator.firstName, senderLastName: moderator.lastName, contributionMemo: finalContribution.memo, + contributionFrontendLink: await contributionFrontendLink( + finalContribution.id, + finalContribution.createdAt, + ), + message: finalContributionMessage.message, }) } diff --git a/backend/src/graphql/resolver/ContributionResolver.test.ts b/backend/src/graphql/resolver/ContributionResolver.test.ts index b9dd50560..8bdbd98a8 100644 --- a/backend/src/graphql/resolver/ContributionResolver.test.ts +++ b/backend/src/graphql/resolver/ContributionResolver.test.ts @@ -1,9 +1,8 @@ -import { UserInputError } from 'apollo-server-express' import { ApolloServerTestClient } from 'apollo-server-testing' import { Contribution, Event as DbEvent, Transaction as DbTransaction, User } from 'database' import { Decimal } from 'decimal.js-light' import { GraphQLError } from 'graphql' -import { Connection, Equal } from 'typeorm' +import { DataSource, Equal } from 'typeorm' import { ContributionMessageType } from '@enum/ContributionMessageType' import { ContributionStatus } from '@enum/ContributionStatus' @@ -15,8 +14,9 @@ import { resetToken, testEnvironment, } from '@test/helpers' -import { i18n as localization, logger } from '@test/testSetup' +import { i18n as localization } from '@test/testSetup' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { sendContributionConfirmedEmail, sendContributionDeletedEmail, @@ -51,17 +51,21 @@ import { peterLustig } from '@/seeds/users/peter-lustig' import { raeuberHotzenplotz } from '@/seeds/users/raeuber-hotzenplotz' import { stephenHawking } from '@/seeds/users/stephen-hawking' import { getFirstDayOfPreviousNMonth } from '@/util/utilities' +import { clearLogs, getLogger, printLogs } from 'config-schema/test/testSetup' +import { getLogger as originalGetLogger } from 'log4js' jest.mock('@/emails/sendEmailVariants') jest.mock('@/password/EncryptorUtils') +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.LogError`) + let mutate: ApolloServerTestClient['mutate'] let query: ApolloServerTestClient['query'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } let creation: Contribution | null let admin: User @@ -73,7 +77,7 @@ let contributionToDelete: any let bibiCreatedContribution: Contribution beforeAll(async () => { - testEnv = await testEnvironment(logger, localization) + testEnv = await testEnvironment(originalGetLogger('apollo'), localization) mutate = testEnv.mutate query = testEnv.query con = testEnv.con @@ -82,7 +86,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('ContributionResolver', () => { @@ -877,6 +881,7 @@ describe('ContributionResolver', () => { senderFirstName: 'Peter', senderLastName: 'Lustig', contributionMemo: 'Test contribution to deny', + contributionFrontendLink: `http://localhost/contributions/own-contributions/1#contributionListItem-${contributionToDeny.data.createContribution.id}`, }) }) }) @@ -1954,6 +1959,7 @@ describe('ContributionResolver', () => { senderFirstName: 'Peter', senderLastName: 'Lustig', contributionMemo: 'Das war leider zu Viel!', + contributionFrontendLink: `http://localhost/contributions/own-contributions/1#contributionListItem-${creation?.id}`, }) }) }) @@ -2120,6 +2126,7 @@ describe('ContributionResolver', () => { senderLastName: 'Lustig', contributionMemo: 'Herzlich Willkommen bei Gradido liebe Bibi!', contributionAmount: expect.decimalEqual(450), + contributionFrontendLink: `http://localhost/contributions/own-contributions/1#contributionListItem-${creation?.id}`, }) }) diff --git a/backend/src/graphql/resolver/ContributionResolver.ts b/backend/src/graphql/resolver/ContributionResolver.ts index 319678157..687aa0196 100644 --- a/backend/src/graphql/resolver/ContributionResolver.ts +++ b/backend/src/graphql/resolver/ContributionResolver.ts @@ -7,7 +7,7 @@ import { import { Decimal } from 'decimal.js-light' import { GraphQLResolveInfo } from 'graphql' import { Arg, Args, Authorized, Ctx, Info, Int, Mutation, Query, Resolver } from 'type-graphql' -import { EntityManager, IsNull, getConnection } from 'typeorm' +import { EntityManager, IsNull } from 'typeorm' import { AdminCreateContributionArgs } from '@arg/AdminCreateContributionArgs' import { AdminUpdateContributionArgs } from '@arg/AdminUpdateContributionArgs' @@ -43,20 +43,28 @@ import { import { UpdateUnconfirmedContributionContext } from '@/interactions/updateUnconfirmedContribution/UpdateUnconfirmedContribution.context' import { LogError } from '@/server/LogError' import { Context, getClientTimezoneOffset, getUser } from '@/server/context' -import { backendLogger as logger } from '@/server/logger' import { TRANSACTIONS_LOCK } from '@/util/TRANSACTIONS_LOCK' import { calculateDecay } from '@/util/decay' import { fullName } from '@/util/utilities' -import { start } from 'repl' -import { ContributionMessageType } from '../enum/ContributionMessageType' -import { loadAllContributions, loadUserContributions } from './util/contributions' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '@/graphql/resolver' +import { ContributionMessageType } from '@enum/ContributionMessageType' +import { AppDatabase } from 'database' +import { getLogger } from 'log4js' +import { + contributionFrontendLink, + loadAllContributions, + loadUserContributions, +} from './util/contributions' import { getOpenCreations, getUserCreation, validateContribution } from './util/creations' import { extractGraphQLFields } from './util/extractGraphQLFields' import { findContributions } from './util/findContributions' import { getLastTransaction } from './util/getLastTransaction' import { sendTransactionsToDltConnector } from './util/sendTransactionsToDltConnector' +const db = AppDatabase.getInstance() +const createLogger = () => getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.ContributionResolver`) + @Resolver(() => Contribution) export class ContributionResolver { @Authorized([RIGHTS.ADMIN_LIST_CONTRIBUTIONS]) @@ -79,6 +87,8 @@ export class ContributionResolver { const user = getUser(context) const creations = await getUserCreation(user.id, clientTimezoneOffset) + const logger = createLogger() + logger.addContext('user', user.id) logger.trace('creations', creations) const contributionDateObj = new Date(contributionDate) validateContribution(creations, amount, contributionDateObj, clientTimezoneOffset) @@ -191,7 +201,7 @@ export class ContributionResolver { context, ) const { contribution, contributionMessage } = await updateUnconfirmedContributionContext.run() - await getConnection().transaction(async (transactionalEntityManager: EntityManager) => { + await db.getDataSource().transaction(async (transactionalEntityManager: EntityManager) => { await transactionalEntityManager.save(contribution) if (contributionMessage) { await transactionalEntityManager.save(contributionMessage) @@ -209,6 +219,8 @@ export class ContributionResolver { @Args() { email, amount, memo, creationDate }: AdminCreateContributionArgs, @Ctx() context: Context, ): Promise { + const logger = createLogger() + logger.addContext('admin', context.user?.id) logger.info( `adminCreateContribution(email=${email}, amount=${amount.toString()}, memo=${memo}, creationDate=${creationDate})`, ) @@ -260,6 +272,8 @@ export class ContributionResolver { @Args() adminUpdateContributionArgs: AdminUpdateContributionArgs, @Ctx() context: Context, ): Promise { + const logger = createLogger() + logger.addContext('contribution', adminUpdateContributionArgs.id) const updateUnconfirmedContributionContext = new UpdateUnconfirmedContributionContext( adminUpdateContributionArgs.id, adminUpdateContributionArgs, @@ -267,11 +281,10 @@ export class ContributionResolver { ) const { contribution, contributionMessage, createdByUserChangedByModerator } = await updateUnconfirmedContributionContext.run() - await getConnection().transaction(async (transactionalEntityManager: EntityManager) => { + await db.getDataSource().transaction(async (transactionalEntityManager: EntityManager) => { await transactionalEntityManager.save(contribution) // TODO: move into specialized view or formatting for logging class logger.debug('saved changed contribution', { - id: contribution.id, amount: contribution.amount.toString(), memo: contribution.memo, contributionDate: contribution.contributionDate.toString(), @@ -282,7 +295,6 @@ export class ContributionResolver { await transactionalEntityManager.save(contributionMessage) // TODO: move into specialized view or formatting for logging class logger.debug('save new contributionMessage', { - contributionId: contributionMessage.contributionId, type: contributionMessage.type, message: contributionMessage.message, isModerator: contributionMessage.isModerator, @@ -317,6 +329,10 @@ export class ContributionResolver { senderLastName: moderator.lastName, contributionMemo: updateUnconfirmedContributionContext.getOldMemo(), contributionMemoUpdated: contribution.memo, + contributionFrontendLink: await contributionFrontendLink( + contribution.id, + contribution.createdAt, + ), }) } @@ -403,6 +419,10 @@ export class ContributionResolver { senderFirstName: moderator.firstName, senderLastName: moderator.lastName, contributionMemo: contribution.memo, + contributionFrontendLink: await contributionFrontendLink( + contribution.id, + contribution.createdAt, + ), }) return !!res @@ -414,6 +434,9 @@ export class ContributionResolver { @Arg('id', () => Int) id: number, @Ctx() context: Context, ): Promise { + const logger = createLogger() + logger.addContext('contribution', id) + // acquire lock const releaseLock = await TRANSACTIONS_LOCK.acquire() try { @@ -449,7 +472,7 @@ export class ContributionResolver { ) const receivedCallDate = new Date() - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() await queryRunner.startTransaction('REPEATABLE READ') // 'READ COMMITTED') @@ -510,6 +533,10 @@ export class ContributionResolver { senderLastName: moderatorUser.lastName, contributionMemo: contribution.memo, contributionAmount: contribution.amount, + contributionFrontendLink: await contributionFrontendLink( + contribution.id, + contribution.createdAt, + ), }) } catch (e) { await queryRunner.rollbackTransaction() @@ -593,6 +620,10 @@ export class ContributionResolver { senderFirstName: moderator.firstName, senderLastName: moderator.lastName, contributionMemo: contributionToUpdate.memo, + contributionFrontendLink: await contributionFrontendLink( + contributionToUpdate.id, + contributionToUpdate.createdAt, + ), }) return !!res diff --git a/backend/src/graphql/resolver/EmailOptinCodes.test.ts b/backend/src/graphql/resolver/EmailOptinCodes.test.ts index b9cdaa513..b916d23b6 100644 --- a/backend/src/graphql/resolver/EmailOptinCodes.test.ts +++ b/backend/src/graphql/resolver/EmailOptinCodes.test.ts @@ -1,7 +1,7 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { User as DbUser } from 'database' import { GraphQLError } from 'graphql' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { cleanDB, testEnvironment } from '@test/helpers' @@ -12,11 +12,11 @@ import { queryOptIn } from '@/seeds/graphql/queries' let mutate: ApolloServerTestClient['mutate'] let query: ApolloServerTestClient['query'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } CONFIG.EMAIL_CODE_VALID_TIME = 1440 @@ -33,7 +33,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('EmailOptinCodes', () => { @@ -101,6 +101,7 @@ describe('EmailOptinCodes', () => { describe('forgotPassword', () => { it('throws an error', async () => { + await mutate({ mutation: forgotPassword, variables: { email: 'peter@lustig.de' } }) await expect( mutate({ mutation: forgotPassword, variables: { email: 'peter@lustig.de' } }), ).resolves.toMatchObject({ diff --git a/backend/src/graphql/resolver/GdtResolver.ts b/backend/src/graphql/resolver/GdtResolver.ts index 933fdb397..9d7451ed8 100644 --- a/backend/src/graphql/resolver/GdtResolver.ts +++ b/backend/src/graphql/resolver/GdtResolver.ts @@ -10,8 +10,10 @@ import { RIGHTS } from '@/auth/RIGHTS' import { CONFIG } from '@/config' import { LogError } from '@/server/LogError' import { Context, getUser } from '@/server/context' +import { getLogger } from 'log4js' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' -import { backendLogger as logger } from '@/server/logger' +const logger = getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.GdtResolver`) @Resolver() export class GdtResolver { diff --git a/backend/src/graphql/resolver/KlicktippResolver.test.ts b/backend/src/graphql/resolver/KlicktippResolver.test.ts index 8c127f10f..10ef6a760 100644 --- a/backend/src/graphql/resolver/KlicktippResolver.test.ts +++ b/backend/src/graphql/resolver/KlicktippResolver.test.ts @@ -2,15 +2,19 @@ import { Event as DbEvent, UserContact } from 'database' import { GraphQLError } from 'graphql' import { cleanDB, resetToken, testEnvironment } from '@test/helpers' -import { i18n as localization, logger } from '@test/testSetup' +import { i18n as localization } from '@test/testSetup' +import { getLogger } from 'config-schema/test/testSetup' import { EventType } from '@/event/Events' import { userFactory } from '@/seeds/factory/user' import { login, subscribeNewsletter, unsubscribeNewsletter } from '@/seeds/graphql/mutations' import { bibiBloxberg } from '@/seeds/users/bibi-bloxberg' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' jest.mock('@/password/EncryptorUtils') +const logger = getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.KlicktippResolver`) + let testEnv: any let mutate: any let con: any @@ -24,7 +28,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('KlicktippResolver', () => { diff --git a/backend/src/graphql/resolver/ProjectBrandingResolver.ts b/backend/src/graphql/resolver/ProjectBrandingResolver.ts index 3742ac2c4..a7ef1c8b8 100644 --- a/backend/src/graphql/resolver/ProjectBrandingResolver.ts +++ b/backend/src/graphql/resolver/ProjectBrandingResolver.ts @@ -9,7 +9,10 @@ import { SpaceList } from '@model/SpaceList' import { HumHubClient } from '@/apis/humhub/HumHubClient' import { RIGHTS } from '@/auth/RIGHTS' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' + +const logger = getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.ProjectBrandingResolver`) @Resolver(() => ProjectBranding) export class ProjectBrandingResolver { diff --git a/backend/src/graphql/resolver/StatisticsResolver.ts b/backend/src/graphql/resolver/StatisticsResolver.ts index 6d2aa2e20..6713cbb54 100644 --- a/backend/src/graphql/resolver/StatisticsResolver.ts +++ b/backend/src/graphql/resolver/StatisticsResolver.ts @@ -1,13 +1,14 @@ -import { Transaction as DbTransaction, User as DbUser } from 'database' +import { AppDatabase, Transaction as DbTransaction, User as DbUser } from 'database' import { Decimal } from 'decimal.js-light' import { Authorized, FieldResolver, Query, Resolver } from 'type-graphql' -import { getConnection } from 'typeorm' import { CommunityStatistics, DynamicStatisticsFields } from '@model/CommunityStatistics' import { RIGHTS } from '@/auth/RIGHTS' import { calculateDecay } from '@/util/decay' +const db = AppDatabase.getInstance() + @Resolver(() => CommunityStatistics) export class StatisticsResolver { @Authorized([RIGHTS.COMMUNITY_STATISTICS]) @@ -33,7 +34,7 @@ export class StatisticsResolver { @FieldResolver() async totalGradidoCreated(): Promise { - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() try { await queryRunner.connect() const { totalGradidoCreated } = await queryRunner.manager @@ -50,7 +51,7 @@ export class StatisticsResolver { @FieldResolver() async totalGradidoDecayed(): Promise { - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() try { await queryRunner.connect() const { totalGradidoDecayed } = await queryRunner.manager @@ -72,7 +73,7 @@ export class StatisticsResolver { const receivedCallDate = new Date() - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() try { await queryRunner.connect() diff --git a/backend/src/graphql/resolver/TransactionLinkResolver.test.ts b/backend/src/graphql/resolver/TransactionLinkResolver.test.ts index c6592164f..7862f71f0 100644 --- a/backend/src/graphql/resolver/TransactionLinkResolver.test.ts +++ b/backend/src/graphql/resolver/TransactionLinkResolver.test.ts @@ -8,11 +8,10 @@ import { } from 'database' import { Decimal } from 'decimal.js-light' import { GraphQLError } from 'graphql' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { UnconfirmedContribution } from '@model/UnconfirmedContribution' import { cleanDB, resetEntity, resetToken, testEnvironment } from '@test/helpers' -import { logger } from '@test/testSetup' import { EventType } from '@/event/Events' import { creations } from '@/seeds/creation/index' @@ -35,8 +34,12 @@ import { bibiBloxberg } from '@/seeds/users/bibi-bloxberg' import { peterLustig } from '@/seeds/users/peter-lustig' import { TRANSACTIONS_LOCK } from '@/util/TRANSACTIONS_LOCK' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' +import { getLogger } from 'config-schema/test/testSetup' import { transactionLinkCode } from './TransactionLinkResolver' +const logErrorLogger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.LogError`) + jest.mock('@/password/EncryptorUtils') // mock semaphore to allow use fake timers @@ -45,11 +48,11 @@ TRANSACTIONS_LOCK.acquire = jest.fn().mockResolvedValue(jest.fn()) let mutate: ApolloServerTestClient['mutate'] let query: ApolloServerTestClient['query'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } let user: User @@ -66,7 +69,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('TransactionLinkResolver', () => { @@ -221,7 +224,7 @@ describe('TransactionLinkResolver', () => { }) }) it('logs the error "User has not enough GDD"', () => { - expect(logger.error).toBeCalledWith('User has not enough GDD', expect.any(Number)) + expect(logErrorLogger.error).toBeCalledWith('User has not enough GDD', expect.any(Number)) }) }) }) @@ -273,11 +276,11 @@ describe('TransactionLinkResolver', () => { }) it('logs the error "No contribution link found to given code"', () => { - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'No contribution link found to given code', 'CL-123456', ) - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'Creation from contribution link was not successful', new Error('No contribution link found to given code'), ) @@ -317,8 +320,11 @@ describe('TransactionLinkResolver', () => { }) it('logs the error "Contribution link is not valid yet"', () => { - expect(logger.error).toBeCalledWith('Contribution link is not valid yet', validFrom) - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( + 'Contribution link is not valid yet', + validFrom, + ) + expect(logErrorLogger.error).toBeCalledWith( 'Creation from contribution link was not successful', new Error('Contribution link is not valid yet'), ) @@ -356,8 +362,11 @@ describe('TransactionLinkResolver', () => { }) it('logs the error "Contribution link has unknown cycle"', () => { - expect(logger.error).toBeCalledWith('Contribution link has unknown cycle', 'INVALID') - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( + 'Contribution link has unknown cycle', + 'INVALID', + ) + expect(logErrorLogger.error).toBeCalledWith( 'Creation from contribution link was not successful', new Error('Contribution link has unknown cycle'), ) @@ -395,8 +404,11 @@ describe('TransactionLinkResolver', () => { }) it('logs the error "Contribution link is no longer valid"', () => { - expect(logger.error).toBeCalledWith('Contribution link is no longer valid', validTo) - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( + 'Contribution link is no longer valid', + validTo, + ) + expect(logErrorLogger.error).toBeCalledWith( 'Creation from contribution link was not successful', new Error('Contribution link is no longer valid'), ) @@ -491,7 +503,7 @@ describe('TransactionLinkResolver', () => { }) it('logs the error "Creation from contribution link was not successful"', () => { - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'Creation from contribution link was not successful', new Error( 'The amount to be created exceeds the amount still available for this month', @@ -566,7 +578,7 @@ describe('TransactionLinkResolver', () => { }) it('logs the error "Creation from contribution link was not successful"', () => { - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'Creation from contribution link was not successful', new Error('Contribution link already redeemed today'), ) @@ -618,7 +630,7 @@ describe('TransactionLinkResolver', () => { }) it('logs the error "Creation from contribution link was not successful"', () => { - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'Creation from contribution link was not successful', new Error('Contribution link already redeemed today'), ) @@ -652,7 +664,7 @@ describe('TransactionLinkResolver', () => { ).resolves.toMatchObject({ errors: [new GraphQLError('Transaction link not found')], }) - expect(logger.error).toBeCalledWith('Transaction link not found', 'not-valid') + expect(logErrorLogger.error).toBeCalledWith('Transaction link not found', 'not-valid') }) }) @@ -723,7 +735,7 @@ describe('TransactionLinkResolver', () => { ).resolves.toMatchObject({ errors: [new GraphQLError('Cannot redeem own transaction link')], }) - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'Cannot redeem own transaction link', expect.any(Number), ) @@ -927,7 +939,7 @@ describe('TransactionLinkResolver', () => { }) it('logs the error "Could not find requested User"', () => { - expect(logger.error).toBeCalledWith('Could not find requested User', -1) + expect(logErrorLogger.error).toBeCalledWith('Could not find requested User', -1) }) }) diff --git a/backend/src/graphql/resolver/TransactionLinkResolver.ts b/backend/src/graphql/resolver/TransactionLinkResolver.ts index 0f375b387..34c073e45 100644 --- a/backend/src/graphql/resolver/TransactionLinkResolver.ts +++ b/backend/src/graphql/resolver/TransactionLinkResolver.ts @@ -15,6 +15,7 @@ import { TransactionLink, TransactionLinkResult } from '@model/TransactionLink' import { User } from '@model/User' import { QueryLinkResult } from '@union/QueryLinkResult' import { + AppDatabase, Contribution as DbContribution, ContributionLink as DbContributionLink, Transaction as DbTransaction, @@ -23,7 +24,6 @@ import { } from 'database' import { Decimal } from 'decimal.js-light' import { Arg, Args, Authorized, Ctx, Int, Mutation, Query, Resolver } from 'type-graphql' -import { getConnection } from 'typeorm' import { RIGHTS } from '@/auth/RIGHTS' import { decode, encode, verify } from '@/auth/jwt/JWT' @@ -36,7 +36,6 @@ import { } from '@/event/Events' import { LogError } from '@/server/LogError' import { Context, getClientTimezoneOffset, getUser } from '@/server/context' -import { backendLogger as logger } from '@/server/logger' import { TRANSACTIONS_LOCK } from '@/util/TRANSACTIONS_LOCK' import { TRANSACTION_LINK_LOCK } from '@/util/TRANSACTION_LINK_LOCK' import { calculateDecay } from '@/util/decay' @@ -44,6 +43,8 @@ import { fullName } from '@/util/utilities' import { calculateBalance } from '@/util/validate' import { DisburseJwtPayloadType } from '@/auth/jwt/payloadtypes/DisburseJwtPayloadType' +import { Logger, getLogger } from 'log4js' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' import { executeTransaction } from './TransactionResolver' import { getAuthenticatedCommunities, @@ -55,6 +56,8 @@ import { getLastTransaction } from './util/getLastTransaction' import { sendTransactionsToDltConnector } from './util/sendTransactionsToDltConnector' import { transactionLinkList } from './util/transactionLinkList' +const createLogger = () => getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.TransactionLinkResolver`) + // TODO: do not export, test it inside the resolver export const transactionLinkCode = (date: Date): string => { const time = date.getTime().toString(16) @@ -66,6 +69,7 @@ export const transactionLinkCode = (date: Date): string => { } const CODE_VALID_DAYS_DURATION = 14 +const db = AppDatabase.getInstance() export const transactionLinkExpireDate = (date: Date): Date => { const validUntil = new Date(date) @@ -147,7 +151,9 @@ export class TransactionLinkResolver { @Authorized([RIGHTS.QUERY_TRANSACTION_LINK]) @Query(() => QueryLinkResult) async queryTransactionLink(@Arg('code') code: string): Promise { - logger.debug('TransactionLinkResolver.queryTransactionLink... code=', code) + const logger = createLogger() + logger.addContext('code', code.substring(0, 6)) + logger.debug('TransactionLinkResolver.queryTransactionLink...') if (code.match(/^CL-/)) { const contributionLink = await DbContributionLink.findOneOrFail({ where: { code: code.replace('CL-', '') }, @@ -183,7 +189,7 @@ export class TransactionLinkResolver { return new TransactionLink(dbTransactionLink, new User(user), redeemedBy, communities) } else { // redeem jwt-token - return await this.queryRedeemJwtLink(code) + return await this.queryRedeemJwtLink(code, logger) } } } @@ -194,6 +200,8 @@ export class TransactionLinkResolver { @Arg('code', () => String) code: string, @Ctx() context: Context, ): Promise { + const logger = createLogger() + logger.addContext('code', code.substring(0, 6)) const clientTimezoneOffset = getClientTimezoneOffset(context) // const homeCom = await DbCommunity.findOneOrFail({ where: { foreign: false } }) const user = getUser(context) @@ -203,7 +211,7 @@ export class TransactionLinkResolver { try { logger.info('redeem contribution link...') const now = new Date() - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() await queryRunner.startTransaction('REPEATABLE READ') try { @@ -378,6 +386,7 @@ export class TransactionLinkResolver { transactionLink.memo, linkedUser, user, + logger, transactionLink, ) await EVENT_TRANSACTION_LINK_REDEEM( @@ -407,6 +416,8 @@ export class TransactionLinkResolver { @Arg('alias', { nullable: true }) alias?: string, @Arg('validUntil', { nullable: true }) validUntil?: string, ): Promise { + const logger = createLogger() + logger.addContext('code', code.substring(0, 6)) logger.debug('TransactionLinkResolver.queryRedeemJwt... args=', { gradidoId, senderCommunityUuid, @@ -455,6 +466,8 @@ export class TransactionLinkResolver { @Arg('validUntil', { nullable: true }) validUntil?: string, @Arg('recipientAlias', { nullable: true }) recipientAlias?: string, ): Promise { + const logger = createLogger() + logger.addContext('code', code.substring(0, 6)) logger.debug('TransactionLinkResolver.disburseTransactionLink... args=', { senderGradidoId, senderCommunityUuid, @@ -526,7 +539,7 @@ export class TransactionLinkResolver { return transactionLinkList(paginated, filters, user) } - async queryRedeemJwtLink(code: string): Promise { + async queryRedeemJwtLink(code: string, logger: Logger): Promise { logger.debug('TransactionLinkResolver.queryRedeemJwtLink... redeem jwt-token found') // decode token first to get the senderCommunityUuid as input for verify token const decodedPayload = decode(code) @@ -651,6 +664,8 @@ export class TransactionLinkResolver { validUntil: string, recipientAlias: string, ): Promise { + const logger = createLogger() + logger.addContext('code', code.substring(0, 6)) logger.debug('TransactionLinkResolver.createDisburseJwt... args=', { senderCommunityUuid, senderGradidoId, diff --git a/backend/src/graphql/resolver/TransactionResolver.test.ts b/backend/src/graphql/resolver/TransactionResolver.test.ts index d164cd5a8..97b3084d9 100644 --- a/backend/src/graphql/resolver/TransactionResolver.test.ts +++ b/backend/src/graphql/resolver/TransactionResolver.test.ts @@ -8,13 +8,13 @@ import { User, } from 'database' import { GraphQLError } from 'graphql' -import { Connection, In } from 'typeorm' +import { DataSource, In } from 'typeorm' import { v4 as uuidv4 } from 'uuid' import { cleanDB, testEnvironment } from '@test/helpers' -import { logger } from '@test/testSetup' import { CONFIG } from '@/config' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { EventType } from '@/event/Events' import { SendCoinsClient } from '@/federation/client/1_0/SendCoinsClient' import { SendCoinsArgs } from '@/federation/client/1_0/model/SendCoinsArgs' @@ -32,16 +32,19 @@ import { bobBaumeister } from '@/seeds/users/bob-baumeister' import { garrickOllivander } from '@/seeds/users/garrick-ollivander' import { peterLustig } from '@/seeds/users/peter-lustig' import { stephenHawking } from '@/seeds/users/stephen-hawking' +import { getLogger } from 'config-schema/test/testSetup' jest.mock('@/password/EncryptorUtils') +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.LogError`) + let mutate: ApolloServerTestClient['mutate'] let query: ApolloServerTestClient['query'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { @@ -477,8 +480,6 @@ describe('send coins', () => { }) it('has wait till sendTransactionsToDltConnector created all dlt-transactions', () => { - expect(logger.info).toBeCalledWith('sendTransactionsToDltConnector...') - expect(dltTransactions).toEqual( expect.arrayContaining([ expect.objectContaining({ diff --git a/backend/src/graphql/resolver/TransactionResolver.ts b/backend/src/graphql/resolver/TransactionResolver.ts index d92d24638..511c7167d 100644 --- a/backend/src/graphql/resolver/TransactionResolver.ts +++ b/backend/src/graphql/resolver/TransactionResolver.ts @@ -1,4 +1,5 @@ import { + AppDatabase, Community as DbCommunity, PendingTransaction as DbPendingTransaction, Transaction as dbTransaction, @@ -7,7 +8,7 @@ import { } from 'database' import { Decimal } from 'decimal.js-light' import { Args, Authorized, Ctx, Mutation, Query, Resolver } from 'type-graphql' -import { In, IsNull, getConnection } from 'typeorm' +import { In, IsNull } from 'typeorm' import { Paginated } from '@arg/Paginated' import { TransactionSendArgs } from '@arg/TransactionSendArgs' @@ -28,13 +29,14 @@ import { EVENT_TRANSACTION_RECEIVE, EVENT_TRANSACTION_SEND } from '@/event/Event import { SendCoinsResult } from '@/federation/client/1_0/model/SendCoinsResult' import { LogError } from '@/server/LogError' import { Context, getUser } from '@/server/context' -import { backendLogger as logger } from '@/server/logger' import { TRANSACTIONS_LOCK } from '@/util/TRANSACTIONS_LOCK' import { communityUser } from '@/util/communityUser' import { fullName } from '@/util/utilities' import { calculateBalance } from '@/util/validate' import { virtualDecayTransaction, virtualLinkTransaction } from '@/util/virtualTransactions' +import { Logger, getLogger } from 'log4js' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' import { BalanceResolver } from './BalanceResolver' import { GdtResolver } from './GdtResolver' import { getCommunityByIdentifier, getCommunityName, isHomeCommunity } from './util/communities' @@ -49,15 +51,20 @@ import { sendTransactionsToDltConnector } from './util/sendTransactionsToDltConn import { storeForeignUser } from './util/storeForeignUser' import { transactionLinkSummary } from './util/transactionLinkSummary' +const db = AppDatabase.getInstance() +const createLogger = () => getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.TransactionResolver`) + export const executeTransaction = async ( amount: Decimal, memo: string, sender: dbUser, recipient: dbUser, + logger: Logger, transactionLink?: dbTransactionLink | null, ): Promise => { // acquire lock const releaseLock = await TRANSACTIONS_LOCK.acquire() + try { logger.info('executeTransaction', amount, memo, sender, recipient) @@ -96,7 +103,7 @@ export const executeTransaction = async ( throw new LogError('User has not enough GDD or amount is < 0', sendBalance) } - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() await queryRunner.startTransaction('REPEATABLE READ') logger.debug(`open Transaction to write...`) @@ -225,9 +232,9 @@ export class TransactionResolver { ): Promise { const now = new Date() const user = getUser(context) - + const logger = createLogger() logger.addContext('user', user.id) - logger.info(`transactionList(user=${user.firstName}.${user.lastName}, ${user.emailId})`) + logger.info(`transactionList`) let balanceGDTPromise: Promise = Promise.resolve(null) if (CONFIG.GDT_ACTIVE) { @@ -237,7 +244,7 @@ export class TransactionResolver { // find current balance const lastTransaction = await getLastTransaction(user.id) - logger.debug(`lastTransaction=${lastTransaction}`) + logger.debug(`lastTransaction=${lastTransaction?.id}`) const balanceResolver = new BalanceResolver() context.lastTransaction = lastTransaction @@ -285,10 +292,10 @@ export class TransactionResolver { }, ], }) - logger.debug('found dbRemoteUser:', dbRemoteUser) + logger.debug(`found dbRemoteUser: ${dbRemoteUser?.id}`) const remoteUser = new User(dbRemoteUser) if (dbRemoteUser === null) { - logger.debug('no dbRemoteUser found, init from tx:', transaction) + logger.debug(`no dbRemoteUser found, init from tx: ${transaction.id}`) if (transaction.linkedUserCommunityUuid !== null) { remoteUser.communityUuid = transaction.linkedUserCommunityUuid } @@ -309,7 +316,10 @@ export class TransactionResolver { } } logger.debug(`involvedUserIds=`, involvedUserIds) - logger.debug(`involvedRemoteUsers=`, involvedRemoteUsers) + logger.debug( + `involvedRemoteUsers=`, + involvedRemoteUsers.map((u) => u.id), + ) // We need to show the name for deleted users for old transactions const involvedDbUsers = await dbUser.find({ @@ -318,7 +328,10 @@ export class TransactionResolver { relations: ['emailContact'], }) const involvedUsers = involvedDbUsers.map((u) => new User(u)) - logger.debug(`involvedUsers=`, involvedUsers) + logger.debug( + `involvedUsers=`, + involvedUsers.map((u) => u.id), + ) const self = new User(user) const transactions: Transaction[] = [] @@ -329,11 +342,11 @@ export class TransactionResolver { context.linkCount = transactionLinkcount logger.debug(`transactionLinkcount=${transactionLinkcount}`) context.sumHoldAvailableAmount = sumHoldAvailableAmount - logger.debug(`sumHoldAvailableAmount=${sumHoldAvailableAmount}`) + logger.debug(`sumHoldAvailableAmount=${sumHoldAvailableAmount.toString()}`) // decay & link transactions if (currentPage === 1 && order === Order.DESC) { - logger.debug(`currentPage == 1: transactions=${transactions}`) + logger.debug(`currentPage == 1: transactions=${transactions.map((t) => t.id)}`) // The virtual decay is always on the booked amount, not including the generated, not yet booked links, // since the decay is substantially different when the amount is less transactions.push( @@ -345,7 +358,7 @@ export class TransactionResolver { sumHoldAvailableAmount, ), ) - logger.debug(`transactions=${transactions}`) + logger.debug(`transactions=${transactions.map((t) => t.id)}`) // virtual transaction for pending transaction-links sum if (sumHoldAvailableAmount.isZero()) { @@ -370,7 +383,7 @@ export class TransactionResolver { ) } } else if (sumHoldAvailableAmount.greaterThan(0)) { - logger.debug(`sumHoldAvailableAmount > 0: transactions=${transactions}`) + logger.debug(`sumHoldAvailableAmount > 0: transactions=${transactions.map((t) => t.id)}`) transactions.push( virtualLinkTransaction( lastTransaction.balance.minus(sumHoldAvailableAmount.toString()), @@ -383,7 +396,7 @@ export class TransactionResolver { (userTransactions.length && userTransactions[0].balance) || new Decimal(0), ), ) - logger.debug(`transactions=`, transactions) + logger.debug(`transactions=${transactions.map((t) => t.id)}`) } } @@ -398,19 +411,22 @@ export class TransactionResolver { let linkedUser: User | undefined if ((userTransaction.typeId as TransactionTypeId) === TransactionTypeId.CREATION) { linkedUser = communityUser - logger.debug('CREATION-linkedUser=', linkedUser) + logger.debug(`CREATION-linkedUser=${linkedUser.id}`) } else if (userTransaction.linkedUserId) { linkedUser = involvedUsers.find((u) => u.id === userTransaction.linkedUserId) - logger.debug('local linkedUser=', linkedUser) + logger.debug(`local linkedUser=${linkedUser?.id}`) } else if (userTransaction.linkedUserCommunityUuid) { linkedUser = involvedRemoteUsers.find( (u) => u.gradidoID === userTransaction.linkedUserGradidoID, ) - logger.debug('remote linkedUser=', linkedUser) + logger.debug(`remote linkedUser=${linkedUser?.id}`) } transactions.push(new Transaction(userTransaction, self, linkedUser)) }) - logger.debug(`TransactionTypeId.CREATION: transactions=`, transactions) + logger.debug( + `TransactionTypeId.CREATION: transactions=`, + transactions.map((t) => t.id), + ) transactions.forEach((transaction: Transaction) => { if (transaction.typeId !== TransactionTypeId.DECAY) { @@ -436,6 +452,9 @@ export class TransactionResolver { { recipientCommunityIdentifier, recipientIdentifier, amount, memo }: TransactionSendArgs, @Ctx() context: Context, ): Promise { + const logger = createLogger() + logger.addContext('from', context.user?.id) + logger.addContext('amount', amount.toString()) logger.debug( `sendCoins(recipientCommunityIdentifier=${recipientCommunityIdentifier}, recipientIdentifier=${recipientIdentifier}, amount=${amount}, memo=${memo})`, ) @@ -451,28 +470,28 @@ export class TransactionResolver { if (!recipientUser) { throw new LogError('The recipient user was not found', recipientUser) } + logger.addContext('to', recipientUser?.id) if (recipientUser.foreign) { throw new LogError('Found foreign recipient user for a local transaction:', recipientUser) } - await executeTransaction(amount, memo, senderUser, recipientUser) - logger.info('successful executeTransaction', amount, memo, senderUser, recipientUser) + await executeTransaction(amount, memo, senderUser, recipientUser, logger) + logger.info('successful executeTransaction') } else { // processing a x-community sendCoins - logger.debug('X-Com: processing a x-community transaction...') + logger.info('X-Com: processing a x-community transaction...') if (!CONFIG.FEDERATION_XCOM_SENDCOINS_ENABLED) { throw new LogError('X-Community sendCoins disabled per configuration!') } const recipCom = await getCommunityByIdentifier(recipientCommunityIdentifier) - logger.debug('recipient commuity: ', recipCom) + logger.debug('recipient community: ', recipCom?.id) if (recipCom === null) { throw new LogError( - 'no recipient commuity found for identifier:', - recipientCommunityIdentifier, + `no recipient community found for identifier: ${recipientCommunityIdentifier}`, ) } if (recipCom !== null && recipCom.authenticatedAt === null) { - throw new LogError('recipient commuity is connected, but still not authenticated yet!') + throw new LogError('recipient community is connected, but still not authenticated yet!') } let pendingResult: SendCoinsResult let committingResult: SendCoinsResult diff --git a/backend/src/graphql/resolver/UserResolver.test.ts b/backend/src/graphql/resolver/UserResolver.test.ts index e7b64213f..9a8c223f1 100644 --- a/backend/src/graphql/resolver/UserResolver.test.ts +++ b/backend/src/graphql/resolver/UserResolver.test.ts @@ -9,7 +9,7 @@ import { UserRole, } from 'database' import { GraphQLError } from 'graphql' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { v4 as uuidv4, validate as validateUUID, version as versionUUID } from 'uuid' import { GmsPublishLocationType } from '@enum/GmsPublishLocationType' @@ -20,7 +20,7 @@ import { UserContactType } from '@enum/UserContactType' import { ContributionLink } from '@model/ContributionLink' import { Location } from '@model/Location' import { cleanDB, headerPushMock, resetToken, testEnvironment } from '@test/helpers' -import { i18n as localization, logger } from '@test/testSetup' +import { i18n as localization } from '@test/testSetup' import { subscribe } from '@/apis/KlicktippController' import { CONFIG } from '@/config' @@ -67,6 +67,9 @@ import { stephenHawking } from '@/seeds/users/stephen-hawking' import { printTimeDuration } from '@/util/time' import { objectValuesToArray } from '@/util/utilities' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' +import { clearLogs, getLogger, printLogs } from 'config-schema/test/testSetup' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '.' import { Location2Point } from './util/Location2Point' jest.mock('@/apis/humhub/HumHubClient') @@ -93,17 +96,20 @@ jest.mock('@/apis/KlicktippController', () => { } }) +const logger = getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.UserResolver`) +const logErrorLogger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.LogError`) + CONFIG.EMAIL_CODE_REQUEST_TIME = 10 let admin: User let user: User let mutate: ApolloServerTestClient['mutate'] let query: ApolloServerTestClient['query'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { @@ -117,7 +123,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('UserResolver', () => { @@ -275,7 +281,8 @@ describe('UserResolver', () => { }) it('logs an info', () => { - expect(logger.info).toBeCalledWith('User already exists with this email=peter@lustig.de') + expect(logger.info).toBeCalledWith('User already exists') + expect(logger.addContext).toBeCalledWith('user', user[0].id) }) it('sends an account multi registration email', () => { @@ -642,7 +649,7 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'Please enter a valid password with at least 8 characters, upper and lower case letters, at least one number and one special character!', ) }) @@ -672,7 +679,7 @@ describe('UserResolver', () => { }) it('logs the error found', () => { - expect(logger.error).toBeCalledWith('Could not login with emailVerificationCode') + expect(logger.warn).toBeCalledWith('invalid emailVerificationCode=not valid') }) }) }) @@ -691,9 +698,13 @@ describe('UserResolver', () => { }) describe('no users in database', () => { + beforeAll(() => { + clearLogs() + }) it('throws an error', async () => { jest.clearAllMocks() - expect(await mutate({ mutation: login, variables })).toEqual( + const result = await mutate({ mutation: login, variables }) + expect(result).toEqual( expect.objectContaining({ errors: [new GraphQLError('No user with this credentials')], }), @@ -701,7 +712,10 @@ describe('UserResolver', () => { }) it('logs the error found', () => { - expect(logger.error).toBeCalledWith('No user with this credentials', variables.email) + printLogs() + expect(logger.warn).toBeCalledWith( + `findUserByEmail failed, user with email=${variables.email} not found`, + ) }) }) @@ -782,8 +796,8 @@ describe('UserResolver', () => { ) }) - it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('No user with this credentials', variables.email) + it('logs warning before error is thrown', () => { + expect(logger.warn).toBeCalledWith('login failed, wrong password') }) }) @@ -813,14 +827,8 @@ describe('UserResolver', () => { ) }) - it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith( - 'This user was permanently deleted. Contact support for questions', - expect.objectContaining({ - firstName: stephenHawking.firstName, - lastName: stephenHawking.lastName, - }), - ) + it('logs warning before error is thrown', () => { + expect(logger.warn).toBeCalledWith('login failed, user was deleted') }) }) @@ -848,14 +856,8 @@ describe('UserResolver', () => { ) }) - it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith( - 'The Users email is not validate yet', - expect.objectContaining({ - firstName: garrickOllivander.firstName, - lastName: garrickOllivander.lastName, - }), - ) + it('logs warning before error is thrown', () => { + expect(logger.warn).toBeCalledWith('login failed, user email not checked') }) }) @@ -881,14 +883,8 @@ describe('UserResolver', () => { ) }) - it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith( - 'The User has not set a password yet', - expect.objectContaining({ - firstName: bibiBloxberg.firstName, - lastName: bibiBloxberg.lastName, - }), - ) + it('logs warning before error is thrown', () => { + expect(logger.warn).toBeCalledWith('login failed, user has not set a password yet') }) }) }) @@ -1114,7 +1110,7 @@ describe('UserResolver', () => { }) describe('request reset password again', () => { - it('thows an error', async () => { + it('throws an error', async () => { CONFIG.EMAIL_CODE_REQUEST_TIME = emailCodeRequestTime await expect(mutate({ mutation: forgotPassword, variables })).resolves.toEqual( expect.objectContaining({ @@ -1123,8 +1119,10 @@ describe('UserResolver', () => { ) }) - it('logs the error found', () => { - expect(logger.error).toBeCalledWith(`Email already sent less than 10 minutes ago`) + it('logs warning before throwing error', () => { + expect(logger.warn).toBeCalledWith( + 'email already sent 0 minutes ago, min wait time: 10 minutes', + ) }) }) }) @@ -1374,13 +1372,13 @@ describe('UserResolver', () => { }), ).resolves.toEqual( expect.objectContaining({ - errors: [new GraphQLError('Given language is not a valid language')], + errors: [new GraphQLError('Given language is not a valid language or not supported')], }), ) }) it('logs the error found', () => { - expect(logger.error).toBeCalledWith('Given language is not a valid language', 'not-valid') + expect(logger.warn).toBeCalledWith('try to set unsupported language', 'not-valid') }) }) @@ -1403,8 +1401,8 @@ describe('UserResolver', () => { ) }) - it('logs the error found', () => { - expect(logger.error).toBeCalledWith(`Old password is invalid`) + it('logs if logger is in debug mode', () => { + expect(logger.debug).toBeCalledWith(`old password is invalid`) }) }) @@ -1430,10 +1428,8 @@ describe('UserResolver', () => { ) }) - it('logs the error found', () => { - expect(logger.error).toBeCalledWith( - 'Please enter a valid password with at least 8 characters, upper and lower case letters, at least one number and one special character!', - ) + it('logs warning', () => { + expect(logger.warn).toBeCalledWith('try to set invalid password') }) }) @@ -1490,10 +1486,8 @@ describe('UserResolver', () => { ) }) - it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith( - 'Please enter a valid password with at least 8 characters, upper and lower case letters, at least one number and one special character!', - ) + it('log warning', () => { + expect(logger.warn).toBeCalledWith('login failed, wrong password') }) }) }) @@ -1776,7 +1770,10 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('Could not find user with given ID', admin.id + 1) + expect(logErrorLogger.error).toBeCalledWith( + 'Could not find user with given ID', + admin.id + 1, + ) }) }) @@ -1892,7 +1889,9 @@ describe('UserResolver', () => { ) }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('Administrator can not change his own role') + expect(logErrorLogger.error).toBeCalledWith( + 'Administrator can not change his own role', + ) }) }) @@ -1937,7 +1936,10 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('User already has role=', RoleNames.ADMIN) + expect(logErrorLogger.error).toBeCalledWith( + 'User already has role=', + RoleNames.ADMIN, + ) }) }) @@ -1961,7 +1963,10 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('User already has role=', RoleNames.MODERATOR) + expect(logErrorLogger.error).toBeCalledWith( + 'User already has role=', + RoleNames.MODERATOR, + ) }) }) @@ -1982,7 +1987,7 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('User is already an usual user') + expect(logErrorLogger.error).toBeCalledWith('User is already an usual user') }) }) }) @@ -2055,7 +2060,10 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('Could not find user with given ID', admin.id + 1) + expect(logErrorLogger.error).toBeCalledWith( + 'Could not find user with given ID', + admin.id + 1, + ) }) }) @@ -2072,7 +2080,7 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('Moderator can not delete his own account') + expect(logErrorLogger.error).toBeCalledWith('Moderator can not delete his own account') }) }) @@ -2125,7 +2133,10 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('Could not find user with given ID', user.id) + expect(logErrorLogger.error).toBeCalledWith( + 'Could not find user with given ID', + user.id, + ) }) }) }) @@ -2201,7 +2212,9 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('No user with this credentials', 'invalid') + expect(logger.warn).toBeCalledWith( + 'findUserByEmail failed, user with email=invalid not found', + ) }) }) @@ -2218,11 +2231,8 @@ describe('UserResolver', () => { ) }) - it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith( - 'User with given email contact is deleted', - 'stephen@hawking.uk', - ) + it('log warning', () => { + expect(logger.warn).toBeCalledWith('call for activation of deleted user') }) }) @@ -2348,7 +2358,10 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('Could not find user with given ID', admin.id + 1) + expect(logErrorLogger.error).toBeCalledWith( + 'Could not find user with given ID', + admin.id + 1, + ) }) }) @@ -2369,7 +2382,7 @@ describe('UserResolver', () => { }) it('logs the error thrown', () => { - expect(logger.error).toBeCalledWith('User is not deleted') + expect(logErrorLogger.error).toBeCalledWith('User is not deleted') }) describe('undelete deleted user', () => { @@ -2682,7 +2695,7 @@ describe('UserResolver', () => { errors: [new GraphQLError('401 Unauthorized')], }), ) - expect(logger.error).toBeCalledWith('401 Unauthorized') + expect(logErrorLogger.error).toBeCalledWith('401 Unauthorized') }) }) @@ -2720,7 +2733,7 @@ describe('UserResolver', () => { errors: [new GraphQLError('Unknown identifier type')], }), ) - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'Unknown identifier type', 'identifier_is_no_valid_alias!', ) @@ -2742,7 +2755,7 @@ describe('UserResolver', () => { errors: [new GraphQLError('No user found to given identifier(s)')], }), ) - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'No user found to given identifier(s)', uuid, homeCom1.communityUuid, @@ -2765,7 +2778,7 @@ describe('UserResolver', () => { errors: [new GraphQLError('No user with this credentials')], }), ) - expect(logger.error).toBeCalledWith( + expect(logErrorLogger.error).toBeCalledWith( 'No user with this credentials', 'bibi@bloxberg.de', foreignCom1.communityUuid, diff --git a/backend/src/graphql/resolver/UserResolver.ts b/backend/src/graphql/resolver/UserResolver.ts index 063f3d56a..14be59307 100644 --- a/backend/src/graphql/resolver/UserResolver.ts +++ b/backend/src/graphql/resolver/UserResolver.ts @@ -1,4 +1,5 @@ import { + AppDatabase, ContributionLink as DbContributionLink, TransactionLink as DbTransactionLink, User as DbUser, @@ -22,7 +23,7 @@ import { Root, } from 'type-graphql' import { IRestResponse } from 'typed-rest-client' -import { In, Point, getConnection } from 'typeorm' +import { EntityNotFoundError, In, Point } from 'typeorm' import { v4 as uuidv4 } from 'uuid' import { UserArgs } from '@arg//UserArgs' @@ -79,15 +80,16 @@ import { isValidPassword } from '@/password/EncryptorUtils' import { encryptPassword, verifyPassword } from '@/password/PasswordEncryptor' import { LogError } from '@/server/LogError' import { Context, getClientTimezoneOffset, getUser } from '@/server/context' -import { backendLogger as logger } from '@/server/logger' import { communityDbUser } from '@/util/communityUser' import { hasElopageBuys } from '@/util/hasElopageBuys' -import { getTimeDurationObject, printTimeDuration } from '@/util/time' +import { durationInMinutesFromDates, getTimeDurationObject, printTimeDuration } from '@/util/time' import { delay } from '@/util/utilities' import random from 'random-bigint' import { randombytes_random } from 'sodium-native' +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '@/graphql/resolver' +import { Logger, getLogger } from 'log4js' import { FULL_CREATION_AVAILABLE } from './const/const' import { Location2Point, Point2Location } from './util/Location2Point' import { authenticateGmsUserPlayground } from './util/authenticateGmsUserPlayground' @@ -105,11 +107,13 @@ import { validateAlias } from './util/validateAlias' const LANGUAGES = ['de', 'en', 'es', 'fr', 'nl'] const DEFAULT_LANGUAGE = 'de' +const db = AppDatabase.getInstance() +const createLogger = () => getLogger(`${LOG4JS_RESOLVER_CATEGORY_NAME}.UserResolver`) const isLanguage = (language: string): boolean => { return LANGUAGES.includes(language) } -const newEmailContact = (email: string, userId: number): DbUserContact => { +const newEmailContact = (email: string, userId: number, logger: Logger): DbUserContact => { logger.trace(`newEmailContact...`) const emailContact = new DbUserContact() emailContact.email = email @@ -122,12 +126,12 @@ const newEmailContact = (email: string, userId: number): DbUserContact => { return emailContact } -export const activationLink = (verificationCode: string): string => { +export const activationLink = (verificationCode: string, logger: Logger): string => { logger.debug(`activationLink(${verificationCode})...`) return CONFIG.EMAIL_LINK_SETPASSWORD + verificationCode.toString() } -const newGradidoID = async (): Promise => { +const newGradidoID = async (logger: Logger): Promise => { let gradidoId: string let countIds: number do { @@ -145,14 +149,16 @@ export class UserResolver { @Authorized([RIGHTS.VERIFY_LOGIN]) @Query(() => User) async verifyLogin(@Ctx() context: Context): Promise { + const logger = createLogger() logger.info('verifyLogin...') // TODO refactor and do not have duplicate code with login(see below) const userEntity = getUser(context) + logger.addContext('user', userEntity.id) const user = new User(userEntity) // Elopage Status & Stored PublisherId user.hasElopage = await this.hasElopage(context) - logger.debug(`verifyLogin... successful: ${user.firstName}.${user.lastName}`) + logger.debug(`verifyLogin... successful`) user.klickTipp = await getKlicktippState(userEntity.emailContact.email) return user } @@ -163,31 +169,39 @@ export class UserResolver { @Args() { email, password, publisherId, project }: UnsecureLoginArgs, @Ctx() context: Context, ): Promise { - logger.info(`login with ${email}, ***, ${publisherId}, project=${project} ...`) + const logger = createLogger() + logger.info(`login with ${email.substring(0, 3)}..., project=${project} ...`) email = email.trim().toLowerCase() let dbUser: DbUser try { dbUser = await findUserByEmail(email) + // add pubKey in logger-context for layout-pattern X{user} to print it in each logging message + logger.addContext('user', dbUser.id) + logger.trace('user before login', new UserLoggingView(dbUser)) } catch (e) { // simulate delay which occur on password encryption 650 ms +- 50 rnd await delay(650 + Math.floor(Math.random() * 101) - 50) throw e } - + // TODO: discuss need we logging all this cases? if (dbUser.deletedAt) { - throw new LogError('This user was permanently deleted. Contact support for questions', dbUser) + logger.warn('login failed, user was deleted') + throw new Error('This user was permanently deleted. Contact support for questions') } if (!dbUser.emailContact.emailChecked) { - throw new LogError('The Users email is not validate yet', dbUser) + logger.warn('login failed, user email not checked') + throw new Error('The Users email is not validate yet') } // TODO: at least in test this does not work since `dbUser.password = 0` and `BigInto(0) = 0n` if (dbUser.password === BigInt(0)) { // TODO we want to catch this on the frontend and ask the user to check his emails or resend code - throw new LogError('The User has not set a password yet', dbUser) + logger.warn('login failed, user has not set a password yet') + throw new Error('The User has not set a password yet') } if (!(await verifyPassword(dbUser, password))) { - throw new LogError('No user with this credentials', dbUser) + logger.warn('login failed, wrong password') + throw new Error('No user with this credentials') } // request to humhub and klicktipp run in parallel @@ -215,17 +229,14 @@ export class UserResolver { dbUser.password = await encryptPassword(dbUser, password) await dbUser.save() } - // add pubKey in logger-context for layout-pattern X{user} to print it in each logging message - logger.addContext('user', dbUser.id) logger.debug('validation of login credentials successful...') const user = new User(dbUser) - logger.debug(`user= ${JSON.stringify(user, null, 2)}`) i18n.setLocale(user.language) // Elopage Status & Stored PublisherId user.hasElopage = await this.hasElopage({ ...context, user: dbUser }) - logger.info('user.hasElopage', user.hasElopage) + logger.debug('user.hasElopage', user.hasElopage) if (!user.hasElopage && publisherId) { user.publisherId = publisherId dbUser.publisherId = publisherId @@ -239,7 +250,7 @@ export class UserResolver { await EVENT_USER_LOGIN(dbUser) const projectBranding = await projectBrandingPromise - logger.debug('project branding: ', projectBranding) + logger.debug('project branding: ', projectBranding?.id) // load humhub state if (humhubUserPromise) { try { @@ -258,7 +269,8 @@ export class UserResolver { } } user.klickTipp = await klicktippStatePromise - logger.info(`successful Login: ${JSON.stringify(user, null, 2)}`) + logger.info('successful Login') + logger.trace('user after login', new UserLoggingView(dbUser)) return user } @@ -266,8 +278,6 @@ export class UserResolver { @Mutation(() => Boolean) async logout(@Ctx() context: Context): Promise { await EVENT_USER_LOGOUT(getUser(context)) - // remove user from logger context - logger.addContext('user', 'unknown') return true } @@ -286,10 +296,24 @@ export class UserResolver { project = null, }: CreateUserArgs, ): Promise { - logger.addContext('user', 'unknown') - logger.info( - `createUser(email=${email}, firstName=${firstName}, lastName=${lastName}, language=${language}, publisherId=${publisherId}, redeemCode=${redeemCode}, project=${project})`, - ) + const logger = createLogger() + const shortEmail = email.substring(0, 3) + logger.addContext('email', shortEmail) + + const shortRedeemCode = redeemCode?.substring(0, 6) + const infos = [] + infos.push(`language=${language}`) + if (publisherId) { + infos.push(`publisherId=${publisherId}`) + } + if (redeemCode) { + infos.push(`redeemCode=${shortRedeemCode}`) + } + if (project) { + infos.push(`project=${project}`) + } + logger.info(`createUser(${infos.join(', ')})`) + // TODO: wrong default value (should be null), how does graphql work here? Is it an required field? // default int publisher_id = 0; @@ -303,15 +327,16 @@ export class UserResolver { email = email.trim().toLowerCase() if (await checkEmailExists(email)) { const foundUser = await findUserByEmail(email) - logger.info('DbUser.findOne', email, foundUser) + logger.info('DbUser.findOne', foundUser.id) if (foundUser) { + logger.addContext('user', foundUser.id) + logger.removeContext('email') // ATTENTION: this logger-message will be exactly expected during tests, next line - logger.info(`User already exists with this email=${email}`) + logger.info(`User already exists`) logger.info( - `Specified username when trying to register multiple times with this email: firstName=${firstName}, lastName=${lastName}`, + `Specified username when trying to register multiple times with this email: firstName=${firstName.substring(0, 4)}, lastName=${lastName.substring(0, 4)}`, ) - // TODO: this is unsecure, but the current implementation of the login server. This way it can be queried if the user with given EMail is existent. const user = new User(communityDbUser) user.id = randombytes_random() % (2048 * 16) // TODO: for a better faking derive id from email so that it will be always the same id when the same email comes in? @@ -323,7 +348,7 @@ export class UserResolver { if (alias && (await validateAlias(alias))) { user.alias = alias } - logger.debug('partly faked user', user) + logger.debug('partly faked user', { id: user.id, gradidoID: user.gradidoID }) await sendAccountMultiRegistrationEmail({ firstName: foundUser.firstName, // this is the real name of the email owner, but just "firstName" would be the name of the new registrant which shall not be passed to the outside @@ -333,9 +358,6 @@ export class UserResolver { }) await EVENT_EMAIL_ACCOUNT_MULTIREGISTRATION(foundUser) - logger.info( - `sendAccountMultiRegistrationEmail by ${firstName} ${lastName} to ${foundUser.firstName} ${foundUser.lastName} <${email}>`, - ) /* uncomment this, when you need the activation link on the console */ // In case EMails are disabled log the activation link for the user logger.info('createUser() faked and send multi registration mail...') @@ -350,7 +372,7 @@ export class UserResolver { select: { logoUrl: true, spaceId: true }, }) } - const gradidoID = await newGradidoID() + const gradidoID = await newGradidoID(logger) const eventRegisterRedeem = Event( EventType.USER_REGISTER_REDEEM, @@ -373,28 +395,28 @@ export class UserResolver { } dbUser.publisherId = publisherId ?? 0 dbUser.passwordEncryptionType = PasswordEncryptionType.NO_PASSWORD - logger.debug('new dbUser', dbUser) + logger.debug('new dbUser', new UserLoggingView(dbUser)) if (redeemCode) { if (redeemCode.match(/^CL-/)) { const contributionLink = await DbContributionLink.findOne({ where: { code: redeemCode.replace('CL-', '') }, }) - logger.info('redeemCode found contributionLink', contributionLink) if (contributionLink) { + logger.info('redeemCode found contributionLink', contributionLink.id) dbUser.contributionLinkId = contributionLink.id eventRegisterRedeem.involvedContributionLink = contributionLink } } else { const transactionLink = await DbTransactionLink.findOne({ where: { code: redeemCode } }) - logger.info('redeemCode found transactionLink', transactionLink) if (transactionLink) { + logger.info('redeemCode found transactionLink', transactionLink.id) dbUser.referrerId = transactionLink.userId eventRegisterRedeem.involvedTransactionLink = transactionLink } } } - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() await queryRunner.startTransaction('REPEATABLE READ') let projectBranding: ProjectBranding | null | undefined @@ -402,7 +424,7 @@ export class UserResolver { dbUser = await queryRunner.manager.save(dbUser).catch((error) => { throw new LogError('Error while saving dbUser', error) }) - let emailContact = newEmailContact(email, dbUser.id) + let emailContact = newEmailContact(email, dbUser.id, logger) emailContact = await queryRunner.manager.save(emailContact).catch((error) => { throw new LogError('Error while saving user email contact', error) }) @@ -429,7 +451,7 @@ export class UserResolver { timeDurationObject: getTimeDurationObject(CONFIG.EMAIL_CODE_VALID_TIME), logoUrl: projectBranding?.logoUrl, }) - logger.info(`sendAccountActivationEmail of ${firstName}.${lastName} to ${email}`) + logger.info('sendAccountActivationEmail') await EVENT_EMAIL_CONFIRMATION(dbUser) @@ -483,18 +505,33 @@ export class UserResolver { @Authorized([RIGHTS.SEND_RESET_PASSWORD_EMAIL]) @Mutation(() => Boolean) async forgotPassword(@Arg('email') email: string): Promise { - logger.addContext('user', 'unknown') - logger.info(`forgotPassword(${email})...`) + const logger = createLogger() + const shortEmail = email.substring(0, 3) + logger.addContext('email', shortEmail) + logger.info('forgotPassword...') email = email.trim().toLowerCase() - const user = await findUserByEmail(email).catch((error) => { - logger.warn(`fail on find UserContact per ${email} because: ${error}`) - }) + let user: DbUser + try { + user = await findUserByEmail(email) + logger.removeContext('email') + logger.addContext('user', user.id) + } catch (_e) { + logger.warn(`fail on find UserContact`) + return true + } - if (!user || user.deletedAt) { - logger.warn(`no user found with ${email}`) + if (user.deletedAt) { + logger.warn(`user was deleted`) return true } if (!canEmailResend(user.emailContact.updatedAt || user.emailContact.createdAt)) { + const diff = durationInMinutesFromDates( + user.emailContact.updatedAt || user.emailContact.createdAt, + new Date(), + ) + logger.warn( + `email already sent ${printTimeDuration(diff)} ago, min wait time: ${printTimeDuration(CONFIG.EMAIL_CODE_REQUEST_TIME)}`, + ) throw new LogError( `Email already sent less than ${printTimeDuration(CONFIG.EMAIL_CODE_REQUEST_TIME)} ago`, ) @@ -505,21 +542,19 @@ export class UserResolver { user.emailContact.emailVerificationCode = random(64).toString() user.emailContact.emailOptInTypeId = OptInType.EMAIL_OPT_IN_RESET_PASSWORD await user.emailContact.save().catch(() => { - throw new LogError('Unable to save email verification code', user.emailContact) + throw new LogError('Unable to save email verification code', user.emailContact.id) }) - logger.info('optInCode for', email, user.emailContact) - await sendResetPasswordEmail({ firstName: user.firstName, lastName: user.lastName, email, language: user.language, - resetLink: activationLink(user.emailContact.emailVerificationCode), + resetLink: activationLink(user.emailContact.emailVerificationCode, logger), timeDurationObject: getTimeDurationObject(CONFIG.EMAIL_CODE_VALID_TIME), }) - logger.info(`forgotPassword(${email}) successful...`) + logger.info(`forgotPassword successful...`) await EVENT_EMAIL_FORGOT_PASSWORD(user) return true @@ -531,7 +566,8 @@ export class UserResolver { @Arg('code') code: string, @Arg('password') password: string, ): Promise { - logger.info(`setPassword(${code}, ***)...`) + const logger = createLogger() + logger.info(`setPassword...`) // Validate Password if (!isValidPassword(password)) { throw new LogError( @@ -543,8 +579,11 @@ export class UserResolver { where: { emailVerificationCode: code }, relations: ['user'], }).catch(() => { - throw new LogError('Could not login with emailVerificationCode') + // code wasn't in db, so we can write it into log without hesitation + logger.warn(`invalid emailVerificationCode=${code}`) + throw new Error('Could not login with emailVerificationCode') }) + logger.addContext('user', userContact.user.id) logger.debug('userContact loaded...') // Code is only valid for `CONFIG.EMAIL_CODE_VALID_TIME` minutes if (!isEmailVerificationCodeValid(userContact.updatedAt || userContact.createdAt)) { @@ -566,7 +605,7 @@ export class UserResolver { user.password = await encryptPassword(user, password) logger.debug('User credentials updated ...') - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() await queryRunner.startTransaction('REPEATABLE READ') @@ -594,9 +633,7 @@ export class UserResolver { if ((userContact.emailOptInTypeId as OptInType) === OptInType.EMAIL_OPT_IN_REGISTER) { try { await subscribe(userContact.email, user.language, user.firstName, user.lastName) - logger.debug( - `subscribe(${userContact.email}, ${user.language}, ${user.firstName}, ${user.lastName})`, - ) + logger.debug('Success subscribe to klicktipp') } catch (e) { logger.error('Error subscribing to klicktipp', e) } @@ -609,18 +646,21 @@ export class UserResolver { @Authorized([RIGHTS.QUERY_OPT_IN]) @Query(() => Boolean) async queryOptIn(@Arg('optIn') optIn: string): Promise { - logger.info(`queryOptIn(${optIn})...`) + const logger = createLogger() + logger.addContext('optIn', optIn.substring(0, 4)) + logger.info(`queryOptIn...`) const userContact = await DbUserContact.findOneOrFail({ where: { emailVerificationCode: optIn }, }) - logger.debug('found optInCode', userContact) + logger.addContext('user', userContact.userId) + logger.debug('found optInCode', userContact.id) // Code is only valid for `CONFIG.EMAIL_CODE_VALID_TIME` minutes if (!isEmailVerificationCodeValid(userContact.updatedAt || userContact.createdAt)) { throw new LogError( `Email was sent more than ${printTimeDuration(CONFIG.EMAIL_CODE_VALID_TIME)} ago`, ) } - logger.info(`queryOptIn(${optIn}) successful...`) + logger.info(`queryOptIn successful...`) return true } @@ -657,10 +697,27 @@ export class UserResolver { gmsLocation, gmsPublishLocation, } = updateUserInfosArgs - logger.info( - `updateUserInfos(${firstName}, ${lastName}, ${alias}, ${language}, ***, ***, ${hideAmountGDD}, ${hideAmountGDT}, ${gmsAllowed}, ${gmsPublishName}, ${gmsLocation}, ${gmsPublishLocation})...`, - ) const user = getUser(context) + const logger = createLogger() + logger.addContext('user', user.id) + // log only if a value is set + logger.info(`updateUserInfos...`, { + firstName: firstName !== undefined, + lastName: lastName !== undefined, + alias: alias !== undefined, + language: language !== undefined, + password: password !== undefined, + passwordNew: passwordNew !== undefined, + hideAmountGDD: hideAmountGDD !== undefined, + hideAmountGDT: hideAmountGDT !== undefined, + humhubAllowed: humhubAllowed !== undefined, + gmsAllowed: gmsAllowed !== undefined, + gmsPublishName: gmsPublishName !== undefined, + humhubPublishName: humhubPublishName !== undefined, + gmsLocation: gmsLocation !== undefined, + gmsPublishLocation: gmsPublishLocation !== undefined, + }) + const updateUserInGMS = compareGmsRelevantUserSettings(user, updateUserInfosArgs) const publishNameLogic = new PublishNameLogic(user) const oldHumhubUsername = publishNameLogic.getUserIdentifier( @@ -683,7 +740,8 @@ export class UserResolver { if (language) { if (!isLanguage(language)) { - throw new LogError('Given language is not a valid language', language) + logger.warn('try to set unsupported language', language) + throw new LogError('Given language is not a valid language or not supported') } user.language = language i18n.setLocale(language) @@ -692,12 +750,15 @@ export class UserResolver { if (password && passwordNew) { // Validate Password if (!isValidPassword(passwordNew)) { - throw new LogError( + // TODO: log which rule(s) wasn't met + logger.warn('try to set invalid password') + throw new Error( 'Please enter a valid password with at least 8 characters, upper and lower case letters, at least one number and one special character!', ) } if (!(await verifyPassword(user, password))) { + logger.debug('old password is invalid') throw new LogError(`Old password is invalid`) } @@ -735,7 +796,7 @@ export class UserResolver { // } catch (err) { // console.log('error:', err) // } - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() await queryRunner.startTransaction('REPEATABLE READ') @@ -761,7 +822,7 @@ export class UserResolver { logger.debug(`changed user-settings relevant for gms-user update...`) const homeCom = await getHomeCommunity() if (homeCom.gmsApiKey !== null) { - logger.debug(`send User to Gms...`, user) + logger.debug(`send User to Gms...`) await sendUserToGms(user, homeCom) logger.debug(`sendUserToGms successfully.`) } @@ -783,18 +844,22 @@ export class UserResolver { @Authorized([RIGHTS.HAS_ELOPAGE]) @Query(() => Boolean) async hasElopage(@Ctx() context: Context): Promise { - logger.info(`hasElopage()...`) - const userEntity = getUser(context) - const elopageBuys = hasElopageBuys(userEntity.emailContact.email) - logger.debug('has ElopageBuys', elopageBuys) + const dbUser = getUser(context) + const logger = createLogger() + logger.addContext('user', dbUser.id) + const elopageBuys = await hasElopageBuys(dbUser.emailContact.email) + logger.info(`has Elopage (ablify): ${elopageBuys}`) return elopageBuys } @Authorized([RIGHTS.GMS_USER_PLAYGROUND]) @Query(() => GmsUserAuthenticationResult) async authenticateGmsUserSearch(@Ctx() context: Context): Promise { - logger.info(`authenticateGmsUserSearch()...`) const dbUser = getUser(context) + const logger = createLogger() + logger.addContext('user', dbUser.id) + logger.info(`authenticateGmsUserSearch()...`) + let result = new GmsUserAuthenticationResult() if (context.token) { const homeCom = await getHomeCommunity() @@ -813,8 +878,11 @@ export class UserResolver { @Authorized([RIGHTS.GMS_USER_PLAYGROUND]) @Query(() => UserLocationResult) async userLocation(@Ctx() context: Context): Promise { - logger.info(`userLocation()...`) const dbUser = getUser(context) + const logger = createLogger() + logger.addContext('user', dbUser.id) + logger.info(`userLocation()...`) + const result = new UserLocationResult() if (context.token) { const homeCom = await getHomeCommunity() @@ -833,8 +901,11 @@ export class UserResolver { @Ctx() context: Context, @Arg('project', () => String, { nullable: true }) project?: string | null, ): Promise { - logger.info(`authenticateHumhubAutoLogin()...`) const dbUser = getUser(context) + const logger = createLogger() + logger.addContext('user', dbUser.id) + logger.info(`authenticateHumhubAutoLogin()...`) + const humhubClient = HumHubClient.getInstance() if (!humhubClient) { throw new LogError('cannot create humhub client') @@ -1027,13 +1098,15 @@ export class UserResolver { @Arg('email') email: string, @Ctx() context: Context, ): Promise { + const logger = createLogger() email = email.trim().toLowerCase() - // const user = await dbUser.findOne({ id: emailContact.userId }) const user = await findUserByEmail(email) + logger.addContext('user', user.id) + logger.info('sendActivationEmail...') if (user.deletedAt || user.emailContact.deletedAt) { - throw new LogError('User with given email contact is deleted', email) + logger.warn('call for activation of deleted user') + throw new Error('User with given email contact is deleted') } - user.emailContact.emailResendCount++ await user.emailContact.save() @@ -1042,7 +1115,7 @@ export class UserResolver { lastName: user.lastName, email, language: user.language, - activationLink: activationLink(user.emailContact.emailVerificationCode), + activationLink: activationLink(user.emailContact.emailVerificationCode, logger), timeDurationObject: getTimeDurationObject(CONFIG.EMAIL_CODE_VALID_TIME), }) @@ -1088,16 +1161,25 @@ export class UserResolver { } export async function findUserByEmail(email: string): Promise { - const dbUser = await DbUser.findOneOrFail({ - where: { - emailContact: { email }, - }, - withDeleted: true, - relations: { userRoles: true, emailContact: true }, - }).catch(() => { - throw new LogError('No user with this credentials', email) - }) - return dbUser + try { + const dbUser = await DbUser.findOneOrFail({ + where: { + emailContact: { email }, + }, + withDeleted: true, + relations: { userRoles: true, emailContact: true }, + }) + return dbUser + } catch (e) { + const logger = createLogger() + if (e instanceof EntityNotFoundError || (e as Error).name === 'EntityNotFoundError') { + // TODO: discuss if it is ok to print email in log for this case + logger.warn(`findUserByEmail failed, user with email=${email} not found`) + } else { + logger.error(`findUserByEmail failed, unknown error: ${e}`) + } + throw new Error('No user with this credentials') + } } async function checkEmailExists(email: string): Promise { diff --git a/backend/src/graphql/resolver/const/const.ts b/backend/src/graphql/resolver/const/const.ts index b97694221..2d4532cf5 100644 --- a/backend/src/graphql/resolver/const/const.ts +++ b/backend/src/graphql/resolver/const/const.ts @@ -10,3 +10,5 @@ export const CONTRIBUTIONLINK_NAME_MAX_CHARS = 100 export const CONTRIBUTIONLINK_NAME_MIN_CHARS = 5 export const MEMO_MAX_CHARS = 255 export const MEMO_MIN_CHARS = 5 +export const DEFAULT_PAGINATION_PAGE_SIZE = 25 +export const FRONTEND_CONTRIBUTIONS_ITEM_ANCHOR_PREFIX = 'contributionListItem-' diff --git a/backend/src/graphql/resolver/index.ts b/backend/src/graphql/resolver/index.ts new file mode 100644 index 000000000..0ee59f7f6 --- /dev/null +++ b/backend/src/graphql/resolver/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' + +export const LOG4JS_RESOLVER_CATEGORY_NAME = `${LOG4JS_BASE_CATEGORY_NAME}.graphql.resolver` diff --git a/backend/src/graphql/resolver/semaphore.test.ts b/backend/src/graphql/resolver/semaphore.test.ts index 7ed1ea40e..3917efd31 100644 --- a/backend/src/graphql/resolver/semaphore.test.ts +++ b/backend/src/graphql/resolver/semaphore.test.ts @@ -2,7 +2,7 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { Community as DbCommunity } from 'database' import { Decimal } from 'decimal.js-light' import { GraphQLError } from 'graphql' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { v4 as uuidv4 } from 'uuid' import { cleanDB, contributionDateFormatter, testEnvironment } from '@test/helpers' @@ -25,11 +25,11 @@ import { peterLustig } from '@/seeds/users/peter-lustig' jest.mock('@/password/EncryptorUtils') let mutate: ApolloServerTestClient['mutate'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { @@ -41,7 +41,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('semaphore', () => { diff --git a/backend/src/graphql/resolver/util/authenticateGmsUserPlayground.ts b/backend/src/graphql/resolver/util/authenticateGmsUserPlayground.ts index 5d7b9f723..01d423993 100644 --- a/backend/src/graphql/resolver/util/authenticateGmsUserPlayground.ts +++ b/backend/src/graphql/resolver/util/authenticateGmsUserPlayground.ts @@ -3,8 +3,13 @@ import { User as DbUser } from 'database' import { verifyAuthToken } from '@/apis/gms/GmsClient' import { CONFIG } from '@/config' import { GmsUserAuthenticationResult } from '@/graphql/model/GmsUserAuthenticationResult' -import { backendLogger as logger } from '@/server/logger' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' import { ensureUrlEndsWithSlash } from '@/util/utilities' +import { getLogger } from 'log4js' + +const logger = getLogger( + `${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.authenticateGmsUserPlayground`, +) export async function authenticateGmsUserPlayground( _apiKey: string, diff --git a/backend/src/graphql/resolver/util/communities.ts b/backend/src/graphql/resolver/util/communities.ts index 31189bebc..028116c99 100644 --- a/backend/src/graphql/resolver/util/communities.ts +++ b/backend/src/graphql/resolver/util/communities.ts @@ -1,10 +1,13 @@ -import { Community as DbCommunity, FederatedCommunity as DbFederatedCommunity } from 'database' +import { + AppDatabase, + Community as DbCommunity, + FederatedCommunity as DbFederatedCommunity, +} from 'database' import { FindOneOptions, IsNull, Not } from 'typeorm' import { Paginated } from '@arg/Paginated' import { LogError } from '@/server/LogError' -import { Connection } from '@/typeorm/connection' function findWithCommunityIdentifier(communityIdentifier: string): FindOneOptions { return { @@ -115,14 +118,15 @@ export async function getAllCommunities({ pageSize = 25, currentPage = 1, }: Paginated): Promise { - const connection = await Connection.getInstance() - if (!connection) { + const connection = AppDatabase.getInstance() + if (!connection.isConnected()) { throw new LogError('Cannot connect to db') } // foreign: 'ASC', // createdAt: 'DESC', // lastAnnouncedAt: 'DESC', const result = await connection + .getDataSource() .getRepository(DbFederatedCommunity) .createQueryBuilder('federatedCommunity') .leftJoinAndSelect('federatedCommunity.community', 'community') diff --git a/backend/src/graphql/resolver/util/compareGmsRelevantUserSettings.ts b/backend/src/graphql/resolver/util/compareGmsRelevantUserSettings.ts index cc53ff5a1..80a1e608a 100644 --- a/backend/src/graphql/resolver/util/compareGmsRelevantUserSettings.ts +++ b/backend/src/graphql/resolver/util/compareGmsRelevantUserSettings.ts @@ -5,10 +5,15 @@ import { UpdateUserInfosArgs } from '@/graphql/arg/UpdateUserInfosArgs' import { GmsPublishLocationType } from '@/graphql/enum/GmsPublishLocationType' import { PublishNameType } from '@/graphql/enum/PublishNameType' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '.' import { Point2Location } from './Location2Point' +const logger = getLogger( + `${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.compareGmsRelevantUserSettings`, +) + export function compareGmsRelevantUserSettings( orgUser: DbUser, updateUserInfosArgs: UpdateUserInfosArgs, diff --git a/backend/src/graphql/resolver/util/contributions.ts b/backend/src/graphql/resolver/util/contributions.ts index 58e0914ba..adf9da800 100644 --- a/backend/src/graphql/resolver/util/contributions.ts +++ b/backend/src/graphql/resolver/util/contributions.ts @@ -1,7 +1,12 @@ +import { CONFIG } from '@/config' import { Order } from '@/graphql/enum/Order' +import { + DEFAULT_PAGINATION_PAGE_SIZE, + FRONTEND_CONTRIBUTIONS_ITEM_ANCHOR_PREFIX, +} from '@/graphql/resolver/const/const' import { Paginated } from '@arg/Paginated' import { Contribution as DbContribution } from 'database' -import { FindManyOptions, In } from 'typeorm' +import { FindManyOptions, In, MoreThan } from 'typeorm' // TODO: combine with Pagination class for all queries to use function buildPaginationOptions(paginated: Paginated): FindManyOptions { @@ -91,3 +96,18 @@ export const loadAllContributions = async ( }) */ } + +export const contributionFrontendLink = async ( + contributionId: number, + _createdAt: Date, +): Promise => { + // TODO: page is sometimes wrong, use page 1 for now, and fix later with more time at hand + // simplified, don't account for order by id, so when the nearly impossible case occur that createdAt is the same for two contributions, + // maybe it is the wrong page + //const countBefore = await DbContribution.count({ + // where: { createdAt: MoreThan(createdAt) }, + //}) + // const page = Math.floor(countBefore / DEFAULT_PAGINATION_PAGE_SIZE) + 1 + const anchor = `${FRONTEND_CONTRIBUTIONS_ITEM_ANCHOR_PREFIX}${contributionId}` + return `${CONFIG.COMMUNITY_URL}/contributions/own-contributions/1#${anchor}` +} diff --git a/backend/src/graphql/resolver/util/creations.test.ts b/backend/src/graphql/resolver/util/creations.test.ts index 9a1fddd22..26c09fb79 100644 --- a/backend/src/graphql/resolver/util/creations.test.ts +++ b/backend/src/graphql/resolver/util/creations.test.ts @@ -1,6 +1,6 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { Contribution, User } from 'database' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { cleanDB, contributionDateFormatter, testEnvironment } from '@test/helpers' @@ -17,11 +17,11 @@ jest.mock('@/password/EncryptorUtils') CONFIG.HUMHUB_ACTIVE = false let mutate: ApolloServerTestClient['mutate'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { @@ -33,7 +33,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) const setZeroHours = (date: Date): Date => { diff --git a/backend/src/graphql/resolver/util/creations.ts b/backend/src/graphql/resolver/util/creations.ts index 74c2987cf..02439e10a 100644 --- a/backend/src/graphql/resolver/util/creations.ts +++ b/backend/src/graphql/resolver/util/creations.ts @@ -1,13 +1,17 @@ import { Contribution } from 'database' import { Decimal } from 'decimal.js-light' -import { getConnection } from 'typeorm' import { OpenCreation } from '@model/OpenCreation' import { FULL_CREATION_AVAILABLE, MAX_CREATION_AMOUNT } from '@/graphql/resolver/const/const' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { getFirstDayOfPreviousNMonth } from '@/util/utilities' +import { AppDatabase } from 'database' +import { getLogger } from 'log4js' + +const db = AppDatabase.getInstance() +const logger = getLogger(`${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.creations`) interface CreationMap { id: number @@ -46,7 +50,7 @@ export const getUserCreations = async ( const months = getCreationMonths(timezoneOffset) logger.trace('getUserCreations months', months) - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() const dateFilter = 'last_day(curdate() - interval 3 month) + interval 1 day' diff --git a/backend/src/graphql/resolver/util/findContributions.ts b/backend/src/graphql/resolver/util/findContributions.ts index 1334f468b..dd89b2199 100644 --- a/backend/src/graphql/resolver/util/findContributions.ts +++ b/backend/src/graphql/resolver/util/findContributions.ts @@ -1,9 +1,8 @@ -import { Contribution as DbContribution } from 'database' +import { AppDatabase, Contribution as DbContribution } from 'database' import { Brackets, In, IsNull, LessThanOrEqual, Like, Not, SelectQueryBuilder } from 'typeorm' import { Paginated } from '@arg/Paginated' import { SearchContributionsFilterArgs } from '@arg/SearchContributionsFilterArgs' -import { Connection } from '@typeorm/connection' import { LogError } from '@/server/LogError' @@ -32,11 +31,14 @@ export const findContributions = async ( relations: Relations | undefined = undefined, countOnly = false, ): Promise<[DbContribution[], number]> => { - const connection = await Connection.getInstance() - if (!connection) { + const connection = AppDatabase.getInstance() + if (!connection.isConnected()) { throw new LogError('Cannot connect to db') } - const queryBuilder = connection.getRepository(DbContribution).createQueryBuilder('Contribution') + const queryBuilder = connection + .getDataSource() + .getRepository(DbContribution) + .createQueryBuilder('Contribution') if (relations) { joinRelationsRecursive(relations, queryBuilder, 'Contribution') } diff --git a/backend/src/graphql/resolver/util/findUserByIdentifiers.test.ts b/backend/src/graphql/resolver/util/findUserByIdentifiers.test.ts index 74e9d27e8..94010b846 100644 --- a/backend/src/graphql/resolver/util/findUserByIdentifiers.test.ts +++ b/backend/src/graphql/resolver/util/findUserByIdentifiers.test.ts @@ -1,6 +1,6 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { Community as DbCommunity, User as DbUser } from 'database' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { cleanDB, testEnvironment } from '@test/helpers' @@ -14,11 +14,11 @@ import { findUserByIdentifier } from './findUserByIdentifier' jest.mock('@/password/EncryptorUtils') -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { @@ -29,7 +29,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('graphql/resolver/util/findUserByIdentifier', () => { diff --git a/backend/src/graphql/resolver/util/getKlicktippState.ts b/backend/src/graphql/resolver/util/getKlicktippState.ts index e006ff20a..719286223 100644 --- a/backend/src/graphql/resolver/util/getKlicktippState.ts +++ b/backend/src/graphql/resolver/util/getKlicktippState.ts @@ -1,7 +1,10 @@ import { KlickTipp } from '@model/KlickTipp' import { getKlickTippUser } from '@/apis/KlicktippController' -import { klickTippLogger as logger } from '@/server/logger' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' +import { getLogger } from 'log4js' + +const logger = getLogger(`${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.getKlicktippState`) export const getKlicktippState = async (email: string): Promise => { try { @@ -10,7 +13,7 @@ export const getKlicktippState = async (email: string): Promise => { return new KlickTipp(klickTippUser.status === 'Subscribed') } } catch (err) { - logger.error('There is no klicktipp user for email', email, err) + logger.error('There is no klicktipp user for email', email.substring(0, 3), '...', err) } return new KlickTipp(false) } diff --git a/backend/src/graphql/resolver/util/index.ts b/backend/src/graphql/resolver/util/index.ts new file mode 100644 index 000000000..bf0fba6d1 --- /dev/null +++ b/backend/src/graphql/resolver/util/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_RESOLVER_CATEGORY_NAME } from '@/graphql/resolver' + +export const LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME = `${LOG4JS_RESOLVER_CATEGORY_NAME}.util` diff --git a/backend/src/graphql/resolver/util/processXComSendCoins.ts b/backend/src/graphql/resolver/util/processXComSendCoins.ts index 67b6d71bd..572a50f97 100644 --- a/backend/src/graphql/resolver/util/processXComSendCoins.ts +++ b/backend/src/graphql/resolver/util/processXComSendCoins.ts @@ -14,13 +14,16 @@ import { SendCoinsResult } from '@/federation/client/1_0/model/SendCoinsResult' import { SendCoinsClientFactory } from '@/federation/client/SendCoinsClientFactory' import { PendingTransactionState } from '@/graphql/enum/PendingTransactionState' import { TransactionTypeId } from '@/graphql/enum/TransactionTypeId' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { calculateSenderBalance } from '@/util/calculateSenderBalance' import { fullName } from '@/util/utilities' +import { getLogger } from 'log4js' import { settlePendingSenderTransaction } from './settlePendingSenderTransaction' +const logger = getLogger(`${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.processXComSendCoins`) + export async function processXComPendingSendCoins( receiverCom: DbCommunity, senderCom: DbCommunity, @@ -63,11 +66,11 @@ export async function processXComPendingSendCoins( if (!senderBalance) { throw new LogError('User has not enough GDD or amount is < 0', senderBalance) } - logger.debug(`X-Com: calculated senderBalance = `, senderBalance) + logger.debug(`calculated senderBalance = `, senderBalance) const receiverFCom = await DbFederatedCommunity.findOneOrFail({ where: { - publicKey: receiverCom.publicKey, + publicKey: Buffer.from(receiverCom.publicKey), apiVersion: CONFIG.FEDERATION_BACKEND_SEND_ON_API, }, }) @@ -88,11 +91,11 @@ export async function processXComPendingSendCoins( args.senderUserUuid = sender.gradidoID args.senderUserName = fullName(sender.firstName, sender.lastName) args.senderAlias = sender.alias - logger.debug(`X-Com: ready for voteForSendCoins with args=`, args) + logger.debug(`ready for voteForSendCoins with args=`, args) voteResult = await client.voteForSendCoins(args) - logger.debug(`X-Com: returned from voteForSendCoins:`, voteResult) + logger.debug(`returned from voteForSendCoins:`, voteResult) if (voteResult.vote) { - logger.debug(`X-Com: prepare pendingTransaction for sender...`) + logger.debug(`prepare pendingTransaction for sender...`) // writing the pending transaction on receiver-side was successfull, so now write the sender side try { const pendingTx = DbPendingTransaction.create() @@ -120,20 +123,20 @@ export async function processXComPendingSendCoins( pendingTx.userId = sender.id pendingTx.userGradidoID = sender.gradidoID pendingTx.userName = fullName(sender.firstName, sender.lastName) - logger.debug(`X-Com: initialized sender pendingTX=`, pendingTx) + logger.debug(`initialized sender pendingTX=`, pendingTx) await DbPendingTransaction.insert(pendingTx) - logger.debug(`X-Com: sender pendingTx successfully inserted...`) + logger.debug(`sender pendingTx successfully inserted...`) } catch (err) { logger.error(`Error in writing sender pending transaction: `, err) // revert the existing pending transaction on receiver side let revertCount = 0 - logger.debug(`X-Com: first try to revertSendCoins of receiver`) + logger.debug(`first try to revertSendCoins of receiver`) do { if (await client.revertSendCoins(args)) { logger.debug(`revertSendCoins()-1_0... successfull after revertCount=`, revertCount) // treat revertingSendCoins as an error of the whole sendCoins-process - throw new LogError('Error in writing sender pending transaction: `, err') + throw new LogError('Error in writing sender pending transaction: ', err) } } while (CONFIG.FEDERATION_XCOM_MAXREPEAT_REVERTSENDCOINS > revertCount++) throw new LogError( @@ -143,10 +146,7 @@ export async function processXComPendingSendCoins( } logger.debug(`voteForSendCoins()-1_0... successfull`) } else { - logger.error( - `X-Com: break with error on writing pendingTransaction for recipient...`, - voteResult, - ) + logger.error(`break with error on writing pendingTransaction for recipient...`, voteResult) } return voteResult } @@ -168,7 +168,7 @@ export async function processXComCommittingSendCoins( const sendCoinsResult = new SendCoinsResult() try { logger.debug( - `XCom: processXComCommittingSendCoins...`, + `processXComCommittingSendCoins...`, receiverCom, senderCom, creationDate, @@ -191,10 +191,10 @@ export async function processXComCommittingSendCoins( memo, }) if (pendingTx) { - logger.debug(`X-Com: find pending Tx for settlement:`, pendingTx) + logger.debug('find pending Tx for settlement:', pendingTx) const receiverFCom = await DbFederatedCommunity.findOneOrFail({ where: { - publicKey: receiverCom.publicKey, + publicKey: Buffer.from(receiverCom.publicKey), apiVersion: CONFIG.FEDERATION_BACKEND_SEND_ON_API, }, @@ -218,9 +218,9 @@ export async function processXComCommittingSendCoins( args.senderUserName = pendingTx.userName } args.senderAlias = sender.alias - logger.debug(`X-Com: ready for settleSendCoins with args=`, args) + logger.debug('ready for settleSendCoins with args=', args) const acknowledge = await client.settleSendCoins(args) - logger.debug(`X-Com: returnd from settleSendCoins:`, acknowledge) + logger.debug('returnd from settleSendCoins:', acknowledge) if (acknowledge) { // settle the pending transaction on receiver-side was successfull, so now settle the sender side try { @@ -244,22 +244,22 @@ export async function processXComCommittingSendCoins( sendCoinsResult.recipAlias = recipient.recipAlias } } catch (err) { - logger.error(`Error in writing sender pending transaction: `, err) + logger.error('Error in writing sender pending transaction: ', err) // revert the existing pending transaction on receiver side let revertCount = 0 - logger.debug(`X-Com: first try to revertSetteledSendCoins of receiver`) + logger.debug('first try to revertSetteledSendCoins of receiver') do { if (await client.revertSettledSendCoins(args)) { logger.debug( - `revertSettledSendCoins()-1_0... successfull after revertCount=`, + 'revertSettledSendCoins()-1_0... successfull after revertCount=', revertCount, ) // treat revertingSettledSendCoins as an error of the whole sendCoins-process - throw new LogError('Error in settle sender pending transaction: `, err') + throw new LogError('Error in settle sender pending transaction: ', err) } } while (CONFIG.FEDERATION_XCOM_MAXREPEAT_REVERTSENDCOINS > revertCount++) throw new LogError( - `Error in reverting receiver pending transaction even after revertCount=`, + 'Error in reverting receiver pending transaction even after revertCount=', revertCount, ) } @@ -267,7 +267,7 @@ export async function processXComCommittingSendCoins( } } } catch (err) { - logger.error(`Error:`, err) + logger.error('Error: ', err) sendCoinsResult.vote = false } return sendCoinsResult diff --git a/backend/src/graphql/resolver/util/sendTransactionsToDltConnector.test.ts b/backend/src/graphql/resolver/util/sendTransactionsToDltConnector.test.ts index 151b583f5..4dd1672af 100644 --- a/backend/src/graphql/resolver/util/sendTransactionsToDltConnector.test.ts +++ b/backend/src/graphql/resolver/util/sendTransactionsToDltConnector.test.ts @@ -5,11 +5,11 @@ import { Decimal } from 'decimal.js-light' // import { Response } from 'graphql-request/dist/types' import { GraphQLClient } from 'graphql-request' import { Response } from 'graphql-request/dist/types' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { v4 as uuidv4 } from 'uuid' import { cleanDB, testEnvironment } from '@test/helpers' -import { i18n as localization, logger } from '@test/testSetup' +import { i18n as localization } from '@test/testSetup' import { CONFIG } from '@/config' import { TransactionTypeId } from '@/graphql/enum/TransactionTypeId' @@ -20,11 +20,17 @@ import { bibiBloxberg } from '@/seeds/users/bibi-bloxberg' import { bobBaumeister } from '@/seeds/users/bob-baumeister' import { peterLustig } from '@/seeds/users/peter-lustig' import { raeuberHotzenplotz } from '@/seeds/users/raeuber-hotzenplotz' +import { getLogger } from 'config-schema/test/testSetup' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '.' import { sendTransactionsToDltConnector } from './sendTransactionsToDltConnector' jest.mock('@/password/EncryptorUtils') +const logger = getLogger( + `${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.sendTransactionsToDltConnector`, +) + /* // Mock the GraphQLClient jest.mock('graphql-request', () => { @@ -328,11 +334,11 @@ async function createTxReceive1FromSend3(verified: boolean): Promise { @@ -343,7 +349,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('create and send Transactions to DltConnector', () => { @@ -417,7 +423,7 @@ describe('create and send Transactions to DltConnector', () => { ]), ) - expect(logger.info).nthCalledWith(3, 'sending to DltConnector currently not configured...') + expect(logger.info).nthCalledWith(2, 'sending to DltConnector currently not configured...') }) }) diff --git a/backend/src/graphql/resolver/util/sendTransactionsToDltConnector.ts b/backend/src/graphql/resolver/util/sendTransactionsToDltConnector.ts index 010188dca..98e53456e 100644 --- a/backend/src/graphql/resolver/util/sendTransactionsToDltConnector.ts +++ b/backend/src/graphql/resolver/util/sendTransactionsToDltConnector.ts @@ -3,8 +3,13 @@ import { IsNull } from 'typeorm' import { DltConnectorClient } from '@dltConnector/DltConnectorClient' -import { backendLogger as logger } from '@/server/logger' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' import { Monitor, MonitorNames } from '@/util/Monitor' +import { getLogger } from 'log4js' + +const logger = getLogger( + `${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.sendTransactionsToDltConnector`, +) export async function sendTransactionsToDltConnector(): Promise { logger.info('sendTransactionsToDltConnector...') @@ -35,7 +40,7 @@ export async function sendTransactionsToDltConnector(): Promise { if (result) { dltTx.messageId = 'sended' await DltTransaction.save(dltTx) - logger.info('store messageId=%s in dltTx=%d', dltTx.messageId, dltTx.id) + logger.info(`store messageId=${dltTx.messageId} in dltTx=${dltTx.id}`) } } catch (e) { logger.error( diff --git a/backend/src/graphql/resolver/util/sendUserToGms.ts b/backend/src/graphql/resolver/util/sendUserToGms.ts index 26ac070d5..835d36343 100644 --- a/backend/src/graphql/resolver/util/sendUserToGms.ts +++ b/backend/src/graphql/resolver/util/sendUserToGms.ts @@ -3,8 +3,11 @@ import { Community as DbCommunity, User as DbUser } from 'database' import { createGmsUser, updateGmsUser } from '@/apis/gms/GmsClient' import { GmsUser } from '@/apis/gms/model/GmsUser' import { CONFIG } from '@/config' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' + +const logger = getLogger(`${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.sendUserToGms`) export async function sendUserToGms( user: DbUser, diff --git a/backend/src/graphql/resolver/util/settlePendingSenderTransaction.ts b/backend/src/graphql/resolver/util/settlePendingSenderTransaction.ts index 429c9c08b..81f1d5042 100644 --- a/backend/src/graphql/resolver/util/settlePendingSenderTransaction.ts +++ b/backend/src/graphql/resolver/util/settlePendingSenderTransaction.ts @@ -1,20 +1,25 @@ import { + AppDatabase, Community as DbCommunity, PendingTransaction as DbPendingTransaction, User as DbUser, Transaction as dbTransaction, } from 'database' import { Decimal } from 'decimal.js-light' -import { getConnection } from 'typeorm' import { PendingTransactionState } from '@/graphql/enum/PendingTransactionState' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { TRANSACTIONS_LOCK } from '@/util/TRANSACTIONS_LOCK' import { calculateSenderBalance } from '@/util/calculateSenderBalance' - +import { getLogger } from 'log4js' import { getLastTransaction } from './getLastTransaction' +const db = AppDatabase.getInstance() +const logger = getLogger( + `${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.settlePendingSenderTransaction`, +) + export async function settlePendingSenderTransaction( homeCom: DbCommunity, senderUser: DbUser, @@ -23,13 +28,13 @@ export async function settlePendingSenderTransaction( // TODO: synchronisation with TRANSACTION_LOCK of federation-modul necessary!!! // acquire lock const releaseLock = await TRANSACTIONS_LOCK.acquire() - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() await queryRunner.startTransaction('REPEATABLE READ') logger.debug(`start Transaction for write-access...`) try { - logger.info('X-Com: settlePendingSenderTransaction:', homeCom, senderUser, pendingTx) + logger.info('settlePendingSenderTransaction:', homeCom, senderUser, pendingTx) // ensure that no other pendingTx with the same sender or recipient exists const openSenderPendingTx = await DbPendingTransaction.count({ diff --git a/backend/src/graphql/resolver/util/storeForeignUser.ts b/backend/src/graphql/resolver/util/storeForeignUser.ts index 929aa9cca..bd0eeb68f 100644 --- a/backend/src/graphql/resolver/util/storeForeignUser.ts +++ b/backend/src/graphql/resolver/util/storeForeignUser.ts @@ -1,7 +1,10 @@ import { Community as DbCommunity, User as DbUser } from 'database' import { SendCoinsResult } from '@/federation/client/1_0/model/SendCoinsResult' -import { backendLogger as logger } from '@/server/logger' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' +import { getLogger } from 'log4js' + +const logger = getLogger(`${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.storeForeignUser`) export async function storeForeignUser( recipCom: DbCommunity, @@ -18,7 +21,7 @@ export async function storeForeignUser( }) if (!user) { logger.debug( - 'X-Com: no foreignUser found for:', + 'no foreignUser found for:', recipCom.communityUuid, committingResult.recipGradidoID, ) @@ -36,7 +39,7 @@ export async function storeForeignUser( } foreignUser.gradidoID = committingResult.recipGradidoID foreignUser = await DbUser.save(foreignUser) - logger.debug('X-Com: new foreignUser inserted:', foreignUser) + logger.debug('new foreignUser inserted:', foreignUser) return true } else if ( @@ -45,7 +48,7 @@ export async function storeForeignUser( user.alias !== committingResult.recipAlias ) { logger.warn( - 'X-Com: foreignUser still exists, but with different name or alias:', + 'foreignUser still exists, but with different name or alias:', user, committingResult, ) @@ -62,11 +65,11 @@ export async function storeForeignUser( logger.debug('update recipient successful.', user) return true } else { - logger.debug('X-Com: foreignUser still exists...:', user) + logger.debug('foreignUser still exists...:', user) return true } } catch (err) { - logger.error('X-Com: error in storeForeignUser;', err) + logger.error('error in storeForeignUser;', err) return false } } diff --git a/backend/src/graphql/resolver/util/syncHumhub.test.ts b/backend/src/graphql/resolver/util/syncHumhub.test.ts index 594f5fdfd..2eba3be40 100644 --- a/backend/src/graphql/resolver/util/syncHumhub.test.ts +++ b/backend/src/graphql/resolver/util/syncHumhub.test.ts @@ -4,8 +4,9 @@ import { HumHubClient } from '@/apis/humhub/HumHubClient' import { GetUser } from '@/apis/humhub/model/GetUser' import { UpdateUserInfosArgs } from '@/graphql/arg/UpdateUserInfosArgs' import { PublishNameType } from '@/graphql/enum/PublishNameType' -import { backendLogger as logger } from '@/server/logger' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' +import { getLogger } from 'config-schema/test/testSetup' import { syncHumhub } from './syncHumhub' jest.mock('@/apis/humhub/HumHubClient') @@ -19,6 +20,8 @@ mockUser.humhubPublishName = PublishNameType.PUBLISH_NAME_FULL const mockUpdateUserInfosArg = new UpdateUserInfosArgs() const mockHumHubUser = new GetUser(mockUser, 1) +const logger = getLogger(`${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.syncHumhub`) + describe('syncHumhub', () => { beforeEach(() => { jest.spyOn(logger, 'debug').mockImplementation() @@ -33,8 +36,7 @@ describe('syncHumhub', () => { it('Should not sync if no relevant changes', async () => { await syncHumhub(mockUpdateUserInfosArg, new User(), 'username') expect(HumHubClient.getInstance).not.toBeCalled() - // language logging from some other place - expect(logger.debug).toBeCalledTimes(5) + expect(logger.debug).toBeCalledTimes(1) expect(logger.info).toBeCalledTimes(0) }) @@ -42,7 +44,7 @@ describe('syncHumhub', () => { mockUpdateUserInfosArg.firstName = 'New' // Relevant changes mockUser.firstName = 'New' await syncHumhub(mockUpdateUserInfosArg, mockUser, 'username') - expect(logger.debug).toHaveBeenCalledTimes(8) // Four language logging calls, two debug calls in function, one for not syncing + expect(logger.debug).toHaveBeenCalledTimes(4) // Four language logging calls, two debug calls in function, one for not syncing expect(logger.info).toHaveBeenLastCalledWith('finished sync user with humhub', { localId: mockUser.id, externId: mockHumHubUser.id, diff --git a/backend/src/graphql/resolver/util/syncHumhub.ts b/backend/src/graphql/resolver/util/syncHumhub.ts index b483af1ce..c866daf5a 100644 --- a/backend/src/graphql/resolver/util/syncHumhub.ts +++ b/backend/src/graphql/resolver/util/syncHumhub.ts @@ -7,7 +7,10 @@ import { ExecutedHumhubAction, syncUser } from '@/apis/humhub/syncUser' import { PublishNameLogic } from '@/data/PublishName.logic' import { UpdateUserInfosArgs } from '@/graphql/arg/UpdateUserInfosArgs' import { PublishNameType } from '@/graphql/enum/PublishNameType' -import { backendLogger as logger } from '@/server/logger' +import { LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME } from '@/graphql/resolver/util' +import { getLogger } from 'log4js' + +const createLogger = () => getLogger(`${LOG4JS_GRAPHQL_RESOLVER_UTIL_CATEGORY_NAME}.syncHumhub`) /** * Syncs the user with humhub @@ -21,6 +24,8 @@ export async function syncHumhub( oldHumhubUsername: string, spaceId?: number | null, ): Promise { + const logger = createLogger() + logger.addContext('user', user.id) // check for humhub relevant changes if ( updateUserInfosArg && diff --git a/backend/src/graphql/resolver/util/transactionLinkSummary.ts b/backend/src/graphql/resolver/util/transactionLinkSummary.ts index 86382fc96..74cac7f32 100644 --- a/backend/src/graphql/resolver/util/transactionLinkSummary.ts +++ b/backend/src/graphql/resolver/util/transactionLinkSummary.ts @@ -1,9 +1,10 @@ -import { TransactionLink as DbTransactionLink } from 'database' +import { AppDatabase, TransactionLink as DbTransactionLink } from 'database' import { Decimal } from 'decimal.js-light' -import { getConnection } from 'typeorm' import { LogError } from '@/server/LogError' +const db = AppDatabase.getInstance() + export const transactionLinkSummary = async ( userId: number, date: Date, @@ -14,7 +15,7 @@ export const transactionLinkSummary = async ( firstDate: Date | null transactionLinkcount: number }> => { - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() try { await queryRunner.connect() const { sumHoldAvailableAmount, sumAmount, lastDate, firstDate, count } = diff --git a/backend/src/graphql/resolver/util/validateAlias.test.ts b/backend/src/graphql/resolver/util/validateAlias.test.ts index 75e599e07..f10f54886 100644 --- a/backend/src/graphql/resolver/util/validateAlias.test.ts +++ b/backend/src/graphql/resolver/util/validateAlias.test.ts @@ -1,31 +1,35 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { User } from 'database' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { cleanDB, testEnvironment } from '@test/helpers' -import { i18n as localization, logger } from '@test/testSetup' +import { i18n as localization } from '@test/testSetup' +import { getLogger } from 'config-schema/test/testSetup' import { userFactory } from '@/seeds/factory/user' import { bibiBloxberg } from '@/seeds/users/bibi-bloxberg' import { validateAlias } from './validateAlias' -let con: Connection +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.LogError`) + +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { - testEnv = await testEnvironment(logger, localization) + testEnv = await testEnvironment(getLogger('apollo'), localization) con = testEnv.con await cleanDB() }) afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('validate alias', () => { diff --git a/backend/src/index.ts b/backend/src/index.ts index 01f7f47a3..6197f39df 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -1,10 +1,14 @@ import 'reflect-metadata' +import 'source-map-support/register' +import { getLogger } from 'log4js' import { CONFIG } from './config' import { startValidateCommunities } from './federation/validateCommunities' import { createServer } from './server/createServer' +import { initLogging } from './server/logger' async function main() { - const { app } = await createServer() + initLogging() + const { app } = await createServer(getLogger('apollo')) app.listen(CONFIG.PORT, () => { // biome-ignore lint/suspicious/noConsole: no need for logging the start message diff --git a/backend/src/interactions/index.ts b/backend/src/interactions/index.ts new file mode 100644 index 000000000..3aad98ba5 --- /dev/null +++ b/backend/src/interactions/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' + +export const LOG4JS_INTERACTION_CATEGORY_NAME = `${LOG4JS_BASE_CATEGORY_NAME}.interaction` diff --git a/backend/src/interactions/updateUnconfirmedContribution/AbstractUnconfirmedContribution.role.ts b/backend/src/interactions/updateUnconfirmedContribution/AbstractUnconfirmedContribution.role.ts index 36369987b..b713aeefd 100644 --- a/backend/src/interactions/updateUnconfirmedContribution/AbstractUnconfirmedContribution.role.ts +++ b/backend/src/interactions/updateUnconfirmedContribution/AbstractUnconfirmedContribution.role.ts @@ -5,13 +5,16 @@ import { Role } from '@/auth/Role' import { ContributionLogic } from '@/data/Contribution.logic' import { ContributionMessageBuilder } from '@/data/ContributionMessage.builder' import { ContributionStatus } from '@/graphql/enum/ContributionStatus' +import { LOG4JS_INTERACTION_CATEGORY_NAME } from '@/interactions' import { LogError } from '@/server/LogError' import { Context, getClientTimezoneOffset } from '@/server/context' +import { Logger, getLogger } from 'log4js' export abstract class AbstractUnconfirmedContributionRole { private availableCreationSums?: Decimal[] protected changed = true private currentStep = 0 + protected logger: Logger public constructor( protected self: Contribution, @@ -21,6 +24,8 @@ export abstract class AbstractUnconfirmedContributionRole { if (self.confirmedAt || self.deniedAt) { throw new LogError("this contribution isn't unconfirmed!") } + this.logger = getLogger(`${LOG4JS_INTERACTION_CATEGORY_NAME}.updateUnconfirmedContribution`) + this.logger.addContext('contribution', this.self.id) } public isChanged(): boolean { diff --git a/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionAdmin.role.ts b/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionAdmin.role.ts index a216a8919..f160966d3 100644 --- a/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionAdmin.role.ts +++ b/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionAdmin.role.ts @@ -6,8 +6,6 @@ import { ContributionMessageBuilder } from '@/data/ContributionMessage.builder' import { AdminUpdateContributionArgs } from '@/graphql/arg/AdminUpdateContributionArgs' import { ContributionStatus } from '@/graphql/enum/ContributionStatus' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' - import { AbstractUnconfirmedContributionRole } from './AbstractUnconfirmedContribution.role' /** @@ -25,7 +23,7 @@ export class UnconfirmedContributionAdminRole extends AbstractUnconfirmedContrib updateData.amount ?? contribution.amount, updateData.creationDate ? new Date(updateData.creationDate) : contribution.contributionDate, ) - logger.debug('use UnconfirmedContributionAdminRole') + this.logger.debug('use UnconfirmedContributionAdminRole') } /** diff --git a/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionAdminAddMessage.role.ts b/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionAdminAddMessage.role.ts index 42bb75e00..dcc10091f 100644 --- a/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionAdminAddMessage.role.ts +++ b/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionAdminAddMessage.role.ts @@ -7,7 +7,6 @@ import { ContributionMessageArgs } from '@/graphql/arg/ContributionMessageArgs' import { ContributionMessageType } from '@/graphql/enum/ContributionMessageType' import { ContributionStatus } from '@/graphql/enum/ContributionStatus' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { AbstractUnconfirmedContributionRole } from './AbstractUnconfirmedContribution.role' @@ -21,7 +20,7 @@ export class UnconfirmedContributionAdminAddMessageRole extends AbstractUnconfir private updateData: ContributionMessageArgs, ) { super(contribution, contribution.amount, contribution.contributionDate) - logger.debug('use UnconfirmedContributionAdminAddMessageRole') + this.logger.debug('use UnconfirmedContributionAdminAddMessageRole') } protected update(): void { diff --git a/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionUser.role.ts b/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionUser.role.ts index dedc7a1d1..6955cbfa7 100644 --- a/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionUser.role.ts +++ b/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionUser.role.ts @@ -4,7 +4,6 @@ import { ContributionMessageBuilder } from '@/data/ContributionMessage.builder' import { ContributionArgs } from '@/graphql/arg/ContributionArgs' import { ContributionStatus } from '@/graphql/enum/ContributionStatus' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { AbstractUnconfirmedContributionRole } from './AbstractUnconfirmedContribution.role' @@ -18,7 +17,7 @@ export class UnconfirmedContributionUserRole extends AbstractUnconfirmedContribu private updateData: ContributionArgs, ) { super(contribution, updateData.amount, new Date(updateData.contributionDate)) - logger.debug('use UnconfirmedContributionUserRole') + this.logger.debug('use UnconfirmedContributionUserRole') } protected update(): void { diff --git a/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionUserAddMessage.role.ts b/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionUserAddMessage.role.ts index 77d89ac2b..0440762f3 100644 --- a/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionUserAddMessage.role.ts +++ b/backend/src/interactions/updateUnconfirmedContribution/UnconfirmedContributionUserAddMessage.role.ts @@ -5,7 +5,6 @@ import { ContributionMessageArgs } from '@/graphql/arg/ContributionMessageArgs' import { ContributionMessageType } from '@/graphql/enum/ContributionMessageType' import { ContributionStatus } from '@/graphql/enum/ContributionStatus' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { AbstractUnconfirmedContributionRole } from './AbstractUnconfirmedContribution.role' @@ -19,7 +18,7 @@ export class UnconfirmedContributionUserAddMessageRole extends AbstractUnconfirm private updateData: ContributionMessageArgs, ) { super(contribution, contribution.amount, contribution.contributionDate) - logger.debug('use UnconfirmedContributionUserAddMessageRole') + this.logger.debug('use UnconfirmedContributionUserAddMessageRole') } protected update(): void { diff --git a/backend/src/locales/de.json b/backend/src/locales/de.json index 8c0c8ff0c..8b34f61f4 100644 --- a/backend/src/locales/de.json +++ b/backend/src/locales/de.json @@ -21,7 +21,8 @@ "readMessage": "Nachricht lesen und beantworten", "subject": "Nachricht zu deinem Gemeinwohl-Beitrag", "title": "Nachricht zu deinem Gemeinwohl-Beitrag", - "toSeeAndAnswerMessage": "Um die Nachricht zu sehen und darauf zu antworten, gehe in deinem Gradido-Konto ins Menü „Schöpfen“ auf den Tab „Meine Beiträge“." + "message": "„{message}“", + "toSeeAndAnswerMessage": "Um auf die Nachricht zu antworten, gehe in deinem Gradido-Konto ins Menü „Schöpfen“ auf den Tab „Meine Beiträge“." }, "contribution": { "toSeeContributionsAndMessages": "Um deine Gemeinwohl-Beiträge und dazugehörige Nachrichten zu sehen, gehe in deinem Gradido-Konto ins Menü „Schöpfen“ auf den Tab „Meine Beiträge“." diff --git a/backend/src/locales/en.json b/backend/src/locales/en.json index 9b6b2a4cf..fb90a0334 100644 --- a/backend/src/locales/en.json +++ b/backend/src/locales/en.json @@ -21,7 +21,8 @@ "readMessage": "Read and reply to message", "subject": "Message about your common good contribution", "title": "Message about your common good contribution", - "toSeeAndAnswerMessage": "To view and reply to the message, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab." + "message": "„{message}“", + "toSeeAndAnswerMessage": "To reply to the message, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab." }, "contribution": { "toSeeContributionsAndMessages": "To see your common good contributions and related messages, go to the “Creation” menu in your Gradido account and click on the “My contributions” tab." diff --git a/backend/src/password/EncryptorUtils.ts b/backend/src/password/EncryptorUtils.ts index 7da8bc9c1..20ff202b7 100644 --- a/backend/src/password/EncryptorUtils.ts +++ b/backend/src/password/EncryptorUtils.ts @@ -8,7 +8,6 @@ import { PasswordEncryptionType } from '@enum/PasswordEncryptionType' import { CONFIG } from '@/config' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' import { crypto_shorthash_KEYBYTES } from 'sodium-native' @@ -40,7 +39,6 @@ export const SecretKeyCryptographyCreateKey = async ( password: string, ): Promise => { try { - logger.trace('call worker for: SecretKeyCryptographyCreateKey') if (configLoginServerKey.length !== crypto_shorthash_KEYBYTES) { throw new LogError( 'ServerKey has an invalid size', diff --git a/backend/src/password/PasswordEncryptor.ts b/backend/src/password/PasswordEncryptor.ts index 1131b97ef..9efe4e60e 100644 --- a/backend/src/password/PasswordEncryptor.ts +++ b/backend/src/password/PasswordEncryptor.ts @@ -1,6 +1,5 @@ import { User } from 'database' -// import { logger } from '@test/testSetup' getting error "jest is not defined" import { SecretKeyCryptographyCreateKey, getUserCryptographicSalt } from './EncryptorUtils' export const encryptPassword = async (dbUser: User, password: string): Promise => { diff --git a/backend/src/password/__mocks__/EncryptorUtils.ts b/backend/src/password/__mocks__/EncryptorUtils.ts index 5575f72f7..1bac06239 100644 --- a/backend/src/password/__mocks__/EncryptorUtils.ts +++ b/backend/src/password/__mocks__/EncryptorUtils.ts @@ -4,8 +4,9 @@ import { PasswordEncryptionType } from '@enum/PasswordEncryptionType' import { CONFIG } from '@/config' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { crypto_box_SEEDBYTES, crypto_hash_sha512_BYTES, @@ -22,6 +23,8 @@ import { crypto_shorthash_KEYBYTES, } from 'sodium-native' +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.password.EncryptorUtils`) + const SecretKeyCryptographyCreateKeyMock = ( salt: string, password: string, diff --git a/backend/src/seeds/index.ts b/backend/src/seeds/index.ts index b21370833..506c93d41 100644 --- a/backend/src/seeds/index.ts +++ b/backend/src/seeds/index.ts @@ -4,8 +4,9 @@ import { datatype, internet, name } from 'faker' import { CONFIG } from '@/config' import { createServer } from '@/server/createServer' -import { backendLogger as logger } from '@/server/logger' +import { initLogging } from '@/server/logger' +import { getLogger } from 'log4js' import { writeHomeCommunityEntry } from './community' import { contributionLinks } from './contributionLink/index' import { creations } from './creation/index' @@ -17,6 +18,7 @@ import { transactionLinks } from './transactionLink/index' import { users } from './users/index' CONFIG.EMAIL = false +const logger = getLogger('seed') const context = { token: '', @@ -48,7 +50,8 @@ const resetEntity = async (entity: any) => { } const run = async () => { - const server = await createServer(context) + initLogging() + const server = await createServer(getLogger('apollo'), context) const seedClient = createTestClient(server.apollo) const { con } = server await cleanDB() @@ -93,7 +96,7 @@ const run = async () => { } logger.info('##seed## seeding all contributionLinks successful...') - await con.close() + await con.destroy() } run().catch((err) => { diff --git a/backend/src/server/LogError.test.ts b/backend/src/server/LogError.test.ts index 431b60e6e..3b98aea89 100644 --- a/backend/src/server/LogError.test.ts +++ b/backend/src/server/LogError.test.ts @@ -1,7 +1,10 @@ -import { logger } from '@test/testSetup' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' +import { getLogger } from 'config-schema/test/testSetup' import { LogError } from './LogError' +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.LogError`) + describe('LogError', () => { it('logs an Error when created', () => { new LogError('new LogError') diff --git a/backend/src/server/LogError.ts b/backend/src/server/LogError.ts index 346923019..c455eaa15 100644 --- a/backend/src/server/LogError.ts +++ b/backend/src/server/LogError.ts @@ -1,5 +1,21 @@ -import { backendLogger as logger } from './logger' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' +import { getLogger } from 'log4js' +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.LogError`) + +/** + * A custom Error that logs itself immediately upon instantiation. + * + * TODO: Anti-pattern warning: + * Logging inside the constructor introduces side effects during object creation, + * which breaks separation of concerns and can lead to duplicate or unwanted logs. + * It is generally better to log errors where they are caught, not where they are thrown. + * + * @class LogError + * @extends {Error} + * @param {string} msg - The error message. + * @param {...any} details - Additional details passed to the logger. + */ export class LogError extends Error { constructor(msg: string, ...details: any[]) { super(msg) diff --git a/backend/src/server/createServer.ts b/backend/src/server/createServer.ts index b87f4cb24..60f6fc31e 100644 --- a/backend/src/server/createServer.ts +++ b/backend/src/server/createServer.ts @@ -1,46 +1,42 @@ +import { CONFIG } from '@/config' +import { schema } from '@/graphql/schema' +import { elopageWebhook } from '@/webhook/elopage' +import { gmsWebhook } from '@/webhook/gms' import { ApolloServer } from 'apollo-server-express' import express, { Express, json, urlencoded } from 'express' import { slowDown } from 'express-slow-down' import helmet from 'helmet' -import { Logger } from 'log4js' -import { Connection as DbConnection } from 'typeorm' - -import { CONFIG } from '@/config' -import { schema } from '@/graphql/schema' -import { checkDBVersionUntil } from '@/typeorm/DBVersion' -import { elopageWebhook } from '@/webhook/elopage' -import { gmsWebhook } from '@/webhook/gms' +import { Logger, getLogger } from 'log4js' +import { DataSource } from 'typeorm' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' +import { AppDatabase } from 'database' import { context as serverContext } from './context' import { cors } from './cors' import { i18n } from './localization' -import { apolloLogger } from './logger' import { plugins } from './plugins' - // TODO implement // import queryComplexity, { simpleEstimator, fieldConfigEstimator } from "graphql-query-complexity"; interface ServerDef { apollo: ApolloServer app: Express - con: DbConnection + con: DataSource } export const createServer = async ( + apolloLogger: Logger, context: any = serverContext, - logger: Logger = apolloLogger, localization: i18n.I18n = i18n, ): Promise => { - logger.addContext('user', 'unknown') + const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.createServer`) logger.debug('createServer...') // open mariadb connection, retry connecting with mariadb // check for correct database version // retry max CONFIG.DB_CONNECT_RETRY_COUNT times, wait CONFIG.DB_CONNECT_RETRY_DELAY ms between tries - const con = await checkDBVersionUntil( - CONFIG.DB_CONNECT_RETRY_COUNT, - CONFIG.DB_CONNECT_RETRY_DELAY_MS, - ) + const db = AppDatabase.getInstance() + await db.init() // Express Server const app = express() @@ -95,7 +91,7 @@ export const createServer = async ( introspection: CONFIG.GRAPHIQL, context, plugins, - logger, + logger: apolloLogger, }) apollo.applyMiddleware({ app, path: '/' }) logger.info( @@ -103,5 +99,5 @@ export const createServer = async ( ) logger.debug('createServer...successful') - return { apollo, app, con } + return { apollo, app, con: db.getDataSource() } } diff --git a/backend/src/server/localization.ts b/backend/src/server/localization.ts index 8e533576a..1e587104a 100644 --- a/backend/src/server/localization.ts +++ b/backend/src/server/localization.ts @@ -1,7 +1,9 @@ import path from 'node:path' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import i18n from 'i18n' +import { getLogger } from 'log4js' -import { backendLogger } from './logger' +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.server.localization`) i18n.configure({ locales: ['en', 'de'], @@ -11,9 +13,9 @@ i18n.configure({ // autoReload: true, // if this is activated the seeding hangs at the very end updateFiles: false, objectNotation: true, - logDebugFn: (msg) => backendLogger.debug(msg), - logWarnFn: (msg) => backendLogger.info(msg), - logErrorFn: (msg) => backendLogger.error(msg), + logDebugFn: (msg) => logger.debug(msg), + logWarnFn: (msg) => logger.info(msg), + logErrorFn: (msg) => logger.error(msg), // this api is needed for email-template pug files api: { __: 't', // now req.__ becomes req.t diff --git a/backend/src/server/logger.ts b/backend/src/server/logger.ts index 24ae79c67..be9cababe 100644 --- a/backend/src/server/logger.ts +++ b/backend/src/server/logger.ts @@ -1,21 +1,17 @@ -import { readFileSync } from 'fs' - -import { configure, getLogger } from 'log4js' - import { CONFIG } from '@/config' +import { defaultCategory, initLogger } from 'config-schema' -const options = JSON.parse(readFileSync(CONFIG.LOG4JS_CONFIG, 'utf-8')) - -options.categories.backend.level = CONFIG.LOG_LEVEL -options.categories.apollo.level = CONFIG.LOG_LEVEL - -configure(options) - -const apolloLogger = getLogger('apollo') -const backendLogger = getLogger('backend') -const klickTippLogger = getLogger('klicktipp') -const gmsLogger = getLogger('gms') - -backendLogger.addContext('user', 'unknown') - -export { apolloLogger, backendLogger, klickTippLogger, gmsLogger } +export function initLogging() { + // init logger + initLogger( + [ + defaultCategory('backend', CONFIG.LOG_LEVEL), + defaultCategory('apollo', CONFIG.LOG_LEVEL), + defaultCategory('klicktipp', CONFIG.LOG_LEVEL), + defaultCategory('gms', CONFIG.LOG_LEVEL), + defaultCategory('seed', CONFIG.LOG_LEVEL), + ], + CONFIG.LOG_FILES_BASE_PATH, + CONFIG.LOG4JS_CONFIG, + ) +} diff --git a/backend/src/typeorm/DBVersion.ts b/backend/src/typeorm/DBVersion.ts deleted file mode 100644 index f60af6d9e..000000000 --- a/backend/src/typeorm/DBVersion.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { Migration } from 'database' - -import { backendLogger as logger } from '@/server/logger' - -import { CONFIG } from '@/config' -import { Connection } from '@/typeorm/connection' -import { Connection as DbConnection } from 'typeorm' - -async function checkDBVersionUntil(maxRetries: number, delayMs: number): Promise { - for (let attempt = 1; attempt <= maxRetries; attempt++) { - try { - const connection = await Connection.getInstance() - if (connection?.isInitialized) { - const dbVersion = await checkDBVersion(CONFIG.DB_VERSION) - if (dbVersion) { - logger.info('Database connection and version check succeeded.') - return connection - } - } - } catch (err) { - logger.warn(`Attempt ${attempt}: Waiting for DB...`, err) - } - await new Promise((resolve) => setTimeout(resolve, delayMs)) - } - - logger.fatal( - `Fatal: Could not connect to database or version check failed after ${maxRetries} attempts.`, - ) - throw new Error('Fatal: Database not ready.') -} - -const getDBVersion = async (): Promise => { - try { - const [dbVersion] = await Migration.find({ order: { version: 'DESC' }, take: 1 }) - return dbVersion ? dbVersion.fileName : null - } catch (error) { - logger.error(error) - return null - } -} - -const checkDBVersion = async (DB_VERSION: string): Promise => { - const dbVersion = await getDBVersion() - if (!dbVersion?.includes(DB_VERSION)) { - logger.error( - `Wrong database version detected - the backend requires '${DB_VERSION}' but found '${ - dbVersion ?? 'None' - }`, - ) - return false - } - return true -} - -export { checkDBVersion, getDBVersion, checkDBVersionUntil } diff --git a/backend/src/typeorm/connection.ts b/backend/src/typeorm/connection.ts deleted file mode 100644 index d56a95778..000000000 --- a/backend/src/typeorm/connection.ts +++ /dev/null @@ -1,55 +0,0 @@ -// TODO This is super weird - since the entities are defined in another project they have their own globals. -// We cannot use our connection here, but must use the external typeorm installation -import { entities } from 'database' -import { Connection as DbConnection, FileLogger, createConnection } from 'typeorm' - -import { CONFIG } from '@/config' - -export class Connection { - private static instance: DbConnection - - /** - * The Singleton's constructor should always be private to prevent direct - * construction calls with the `new` operator. - */ - private constructor() {} - - /** - * The static method that controls the access to the singleton instance. - * - * This implementation let you subclass the Singleton class while keeping - * just one instance of each subclass around. - */ - public static async getInstance(): Promise { - if (Connection.instance) { - return Connection.instance - } - try { - Connection.instance = await createConnection({ - name: 'default', - type: 'mysql', - legacySpatialSupport: false, - host: CONFIG.DB_HOST, - port: CONFIG.DB_PORT, - username: CONFIG.DB_USER, - password: CONFIG.DB_PASSWORD, - database: CONFIG.DB_DATABASE, - entities, - synchronize: false, - logging: true, - logger: new FileLogger('all', { - // workaround to let previous path working, because with esbuild the script root path has changed - logPath: (CONFIG.PRODUCTION ? '../' : '') + CONFIG.TYPEORM_LOGGING_RELATIVE_PATH, - }), - extra: { - charset: 'utf8mb4_unicode_ci', - }, - }) - return Connection.instance - } catch (error) { - // biome-ignore lint/suspicious/noConsole: maybe logger isn't initialized yet - console.log(error) - return null - } - } -} diff --git a/backend/src/util/Monitor.ts b/backend/src/util/Monitor.ts index f592a343d..31d6d8938 100644 --- a/backend/src/util/Monitor.ts +++ b/backend/src/util/Monitor.ts @@ -1,7 +1,8 @@ import { registerEnumType } from 'type-graphql' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { LogError } from '@/server/LogError' -import { backendLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' export enum MonitorNames { SEND_DLT_TRANSACTIONS = 1, @@ -15,6 +16,7 @@ registerEnumType(MonitorNames, { /* @typescript-eslint/no-extraneous-class */ export class Monitor { private static locks = new Map() + private static logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.util.Monitor`) private constructor() {} @@ -25,15 +27,15 @@ export class Monitor { public static isLocked(key: MonitorNames): boolean | undefined { if (this.locks.has(key)) { - logger.debug(`Monitor isLocked key=${key} = `, this.locks.get(key)) + this.logger.debug(`Monitor isLocked key=${key} = `, this.locks.get(key)) return this.locks.get(key) } - logger.debug(`Monitor isLocked key=${key} not exists`) + this.logger.debug(`Monitor isLocked key=${key} not exists`) return false } public static lockIt(key: MonitorNames): void { - logger.debug(`Monitor lockIt key=`, key) + this.logger.debug(`Monitor lockIt key=`, key) if (this.locks.has(key)) { throw new LogError('still existing Monitor with key=', key) } @@ -41,7 +43,7 @@ export class Monitor { } public static releaseIt(key: MonitorNames): void { - logger.debug(`Monitor releaseIt key=`, key) + this.logger.debug(`Monitor releaseIt key=`, key) if (this.locks.has(key)) { this.locks.delete(key) } diff --git a/backend/src/util/decay.ts b/backend/src/util/decay.ts index 1d1775cc9..96c7ddb4b 100644 --- a/backend/src/util/decay.ts +++ b/backend/src/util/decay.ts @@ -2,8 +2,13 @@ import { Decimal } from 'decimal.js-light' import { Decay } from '@model/Decay' -import { CONFIG } from '@/config' import { LogError } from '@/server/LogError' +import { DECAY_START_TIME } from 'config-schema' + +Decimal.set({ + precision: 25, + rounding: Decimal.ROUND_HALF_UP, +}) // TODO: externalize all those definitions and functions into an external decay library @@ -18,7 +23,7 @@ function calculateDecay( amount: Decimal, from: Date, to: Date, - startBlock: Date = CONFIG.DECAY_START_TIME, + startBlock: Date = DECAY_START_TIME, ): Decay { const fromMs = from.getTime() const toMs = to.getTime() diff --git a/backend/src/util/executeKlicktipp.ts b/backend/src/util/executeKlicktipp.ts index ade1c3470..47db2c735 100644 --- a/backend/src/util/executeKlicktipp.ts +++ b/backend/src/util/executeKlicktipp.ts @@ -1,12 +1,15 @@ -import { Connection } from '@/typeorm/connection' +import { AppDatabase } from 'database' import { exportEventDataToKlickTipp } from './klicktipp' +import { initLogging } from '@/server/logger' async function executeKlicktipp(): Promise { - const connection = await Connection.getInstance() - if (connection) { + initLogging() + const connection = AppDatabase.getInstance() + await connection.init() + if (connection.isConnected()) { await exportEventDataToKlickTipp() - await connection.close() + await connection.destroy() return true } else { return false diff --git a/backend/src/util/klicktipp.test.ts b/backend/src/util/klicktipp.test.ts index a2d7029d0..497753c9f 100644 --- a/backend/src/util/klicktipp.test.ts +++ b/backend/src/util/klicktipp.test.ts @@ -1,6 +1,6 @@ import { ApolloServerTestClient } from 'apollo-server-testing' import { Event as DbEvent } from 'database' -import { Connection } from 'typeorm' +import { DataSource } from 'typeorm' import { cleanDB, resetToken, testEnvironment } from '@test/helpers' @@ -18,11 +18,11 @@ jest.mock('@/apis/KlicktippController') jest.mock('@/password/EncryptorUtils') let mutate: ApolloServerTestClient['mutate'] -let con: Connection +let con: DataSource let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } beforeAll(async () => { @@ -34,7 +34,7 @@ beforeAll(async () => { afterAll(async () => { await cleanDB() - await con.close() + await con.destroy() }) describe('klicktipp', () => { diff --git a/backend/src/util/time.ts b/backend/src/util/time.ts index 0acaf2501..ccbb91c07 100644 --- a/backend/src/util/time.ts +++ b/backend/src/util/time.ts @@ -1,3 +1,6 @@ +/** + * @param {number} time - in minutes + */ export const getTimeDurationObject = ( time: number, ): { @@ -13,11 +16,27 @@ export const getTimeDurationObject = ( return { minutes: time } } +/** + * @param startDate + * @param endDate + * @returns duration in minutes + */ +export const durationInMinutesFromDates = (startDate: Date, endDate: Date): number => { + const diff = endDate.getTime() - startDate.getTime() + return Math.floor(diff / (1000 * 60)) +} + +/** + * @param duration in minutes + */ export const printTimeDuration = (duration: number): string => { const time = getTimeDurationObject(duration) const result = time.minutes > 0 ? `${time.minutes} minutes` : '' if (time.hours) { return `${time.hours} hours` + (result !== '' ? ` and ${result}` : '') } + if (result === '') { + return '0 minutes' + } return result } diff --git a/backend/src/webhook/elopage.ts b/backend/src/webhook/elopage.ts index b027633a6..64113062f 100644 --- a/backend/src/webhook/elopage.ts +++ b/backend/src/webhook/elopage.ts @@ -25,10 +25,13 @@ I assume that the webhook arrives via POST and transmits a string as shown above */ -import { backendLogger as logger } from '@/server/logger' import { LoginElopageBuys, UserContact as dbUserContact } from 'database' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { UserResolver } from '@/graphql/resolver/UserResolver' +import { getLogger } from 'log4js' + +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.webhook.elopage`) export const elopageWebhook = async (req: any, res: any): Promise => { logger.info('Elopage Hook received') diff --git a/backend/src/webhook/gms.ts b/backend/src/webhook/gms.ts index 915552390..06f48024d 100644 --- a/backend/src/webhook/gms.ts +++ b/backend/src/webhook/gms.ts @@ -1,7 +1,9 @@ import { User as DbUser } from 'database' import { decode } from '@/auth/JWT' -import { gmsLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' + +const logger = getLogger('gms.GmsWebhook') export const gmsWebhook = async (req: any, res: any): Promise => { logger.info('GMS Hook received') diff --git a/backend/test/helpers.ts b/backend/test/helpers.ts index 6d567f029..a966eccf6 100644 --- a/backend/test/helpers.ts +++ b/backend/test/helpers.ts @@ -3,7 +3,9 @@ import { entities } from 'database' import { createServer } from '@/server/createServer' -import { i18n, logger } from './testSetup' +import { i18n } from './testSetup' + +import { getLogger } from 'log4js' export const headerPushMock = jest.fn((t) => { context.token = t.value @@ -25,8 +27,8 @@ export const cleanDB = async () => { } } -export const testEnvironment = async (testLogger = logger, testI18n = i18n) => { - const server = await createServer(context, testLogger, testI18n) +export const testEnvironment = async (testLogger = getLogger('apollo'), testI18n = i18n) => { + const server = await createServer( testLogger, context, testI18n) const con = server.con const testClient = createTestClient(server.apollo) const mutate = testClient.mutate diff --git a/backend/test/testSetup.ts b/backend/test/testSetup.ts index 02c325794..c010ff705 100644 --- a/backend/test/testSetup.ts +++ b/backend/test/testSetup.ts @@ -1,7 +1,7 @@ import 'openai/shims/node' import { CONFIG } from '@/config' import { i18n } from '@/server/localization' -import { backendLogger as logger } from '@/server/logger' +import { getLogger, printLogs, clearLogs } from 'config-schema/test/testSetup' CONFIG.EMAIL = true CONFIG.EMAIL_TEST_MODUS = false @@ -10,23 +10,6 @@ CONFIG.GMS_ACTIVE = false jest.setTimeout(1000000) -jest.mock('@/server/logger', () => { - const originalModule = jest.requireActual('@/server/logger') - return { - __esModule: true, - ...originalModule, - backendLogger: { - addContext: jest.fn(), - trace: jest.fn(), - debug: jest.fn(), - warn: jest.fn(), - info: jest.fn(), - error: jest.fn(), - fatal: jest.fn(), - }, - } -}) - jest.mock('@/server/localization', () => { const originalModule = jest.requireActual('@/server/localization') return { @@ -41,4 +24,4 @@ jest.mock('@/server/localization', () => { } }) -export { logger, i18n } +export { i18n, getLogger, printLogs, clearLogs as cleanLogs } diff --git a/backend/tsconfig.json b/backend/tsconfig.json index 79bdd2cf3..2152f4b79 100644 --- a/backend/tsconfig.json +++ b/backend/tsconfig.json @@ -12,7 +12,7 @@ // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */ // "declaration": true, /* Generates corresponding '.d.ts' file. */ // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ - // "sourceMap": true, /* Generates corresponding '.map' file. */ + "sourceMap": true, /* Generates corresponding '.map' file. */ // "outFile": "./", /* Concatenate and emit output to single file. */ "outDir": "./build", /* Redirect output structure to the directory. */ // "rootDir" : ".", diff --git a/backend/turbo.json b/backend/turbo.json index e4dfea9a8..fa9af6ce2 100644 --- a/backend/turbo.json +++ b/backend/turbo.json @@ -24,6 +24,9 @@ }, "start": { "dependsOn": ["database#up", "build"] + }, + "start:bundle": { + "dependsOn": ["build:bundle"] } } } \ No newline at end of file diff --git a/biome.json b/biome.json index 0f2cc7978..13ccfdbe9 100644 --- a/biome.json +++ b/biome.json @@ -1,16 +1,18 @@ { - "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", + "$schema": "https://biomejs.dev/schemas/2.0.0/schema.json", "vcs": { "enabled": false, "clientKind": "git", "useIgnoreFile": false }, "files": { "ignoreUnknown": false, - "ignore": ["build", "node_modules", "coverage"], - "include": [ - "package.json", - "./src/**/*.js", - "./src/**/*.ts", - "./entity/**/*.ts", - "./logging/**/*.ts", - "./migrations/**/*.ts" + "includes": [ + "**/package.json", + "src/**/*.js", + "src/**/*.ts", + "entity/**/*.ts", + "logging/**/*.ts", + "migrations/**/*.ts", + "!**/build", + "!**/node_modules", + "!**/coverage" ] }, "formatter": { @@ -24,14 +26,13 @@ "attributePosition": "auto", "bracketSpacing": true }, - "organizeImports": { "enabled": true }, + "assist": { "actions": { "source": { "organizeImports": "on" } } }, "linter": { "enabled": true, "rules": { "recommended": false, "complexity": { "noExtraBooleanCast": "error", - "noMultipleSpacesInRegularExpressionLiterals": "error", "noUselessCatch": "error", "noUselessConstructor": "error", "noUselessLoneBlockStatements": "error", @@ -39,9 +40,10 @@ "noUselessTernary": "error", "noUselessUndefinedInitialization": "error", "noVoid": "error", - "noWith": "error", "useLiteralKeys": "error", - "useRegexLiterals": "error" + "useRegexLiterals": "error", + "noAdjacentSpacesInRegex": "error", + "noCommaOperator": "error" }, "correctness": { "noConstAssign": "error", @@ -52,7 +54,6 @@ "noInnerDeclarations": "error", "noInvalidConstructorSuper": "error", "noInvalidUseBeforeDeclaration": "error", - "noNewSymbol": "error", "noNodejsModules": "off", "noNonoctalDecimalEscape": "error", "noPrecisionLoss": "error", @@ -66,22 +67,22 @@ "noUnsafeOptionalChaining": "error", "noUnusedLabels": "error", "noUnusedVariables": "error", - "useArrayLiterals": "error", "useIsNan": "error", "useValidForDirection": "error", - "useYield": "error" + "useYield": "error", + "noInvalidBuiltinInstantiation": "error", + "useValidTypeof": "error" }, "security": { "noGlobalEval": "error" }, "style": { - "noCommaOperator": "error", "noDefaultExport": "error", - "noVar": "warn", "noYodaExpression": "error", "useBlockStatements": "error", "useConsistentBuiltinInstantiation": "error", "useConst": "error", "useImportType": "off", - "useSingleVarDeclarator": "error" + "useSingleVarDeclarator": "error", + "useArrayLiterals": "error" }, "suspicious": { "noAsyncPromiseExecutor": "error", @@ -110,10 +111,11 @@ "noUnsafeNegation": "error", "useDefaultSwitchClauseLast": "error", "useGetterReturn": "error", - "useValidTypeof": "error" + "noWith": "error", + "noVar": "warn" } }, - "ignore": ["**/node_modules", "**/*.min.js", "**/build", "**/coverage"] + "includes": ["**", "!**/node_modules", "!**/*.min.js", "!**/build", "!**/coverage"] }, "javascript": { "formatter": { @@ -147,9 +149,9 @@ }, "overrides": [ { - "include": ["*.ts", "*.tsx"], + "includes": ["**/*.ts", "**/*.tsx"], "linter": { "rules": { "complexity": { "noVoid": "error" } } } }, - { "include": ["*.test.ts"], "linter": { "rules": {} } } + { "includes": ["**/*.test.ts"], "linter": { "rules": {} } } ] } diff --git a/bun.lock b/bun.lock index 8c9420dc5..e5b37a676 100644 --- a/bun.lock +++ b/bun.lock @@ -10,12 +10,12 @@ "uuid": "^8.3.2", }, "devDependencies": { - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", }, }, "admin": { "name": "admin", - "version": "2.5.2", + "version": "2.6.0", "dependencies": { "@iconify/json": "^2.2.228", "@popperjs/core": "^2.11.8", @@ -84,7 +84,7 @@ }, "backend": { "name": "backend", - "version": "2.5.2", + "version": "2.6.0", "dependencies": { "cross-env": "^7.0.3", "email-templates": "^10.0.1", @@ -92,7 +92,7 @@ }, "devDependencies": { "@anatine/esbuild-decorators": "^0.2.19", - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@swc/cli": "^0.7.3", "@swc/core": "^1.11.24", "@swc/helpers": "^0.5.17", @@ -105,6 +105,7 @@ "@types/node": "^17.0.21", "@types/nodemailer": "^6.4.4", "@types/sodium-native": "^2.3.5", + "@types/source-map-support": "^0.5.10", "@types/uuid": "^8.3.4", "apollo-server-express": "^2.25.2", "apollo-server-testing": "^2.25.2", @@ -143,12 +144,13 @@ "random-bigint": "^0.0.1", "reflect-metadata": "^0.1.13", "regenerator-runtime": "^0.14.1", + "source-map-support": "^0.5.21", "ts-jest": "27.0.5", "ts-node": "^10.9.2", "tsconfig-paths": "^4.1.1", "type-graphql": "^1.1.1", "typed-rest-client": "^1.8.11", - "typeorm": "^0.3.16", + "typeorm": "^0.3.22", "typescript": "^4.9.5", "uuid": "^8.3.2", "workerpool": "^9.2.0", @@ -161,16 +163,21 @@ "dependencies": { "esbuild": "^0.25.2", "joi": "^17.13.3", + "log4js": "^6.9.1", + "source-map-support": "^0.5.21", + "yoctocolors-cjs": "^2.1.2", + "zod": "^3.25.61", }, "devDependencies": { - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@types/node": "^17.0.21", + "jest": "27.2.4", "typescript": "^4.9.5", }, }, "database": { "name": "database", - "version": "2.5.2", + "version": "2.6.0", "dependencies": { "@types/uuid": "^8.3.4", "cross-env": "^7.0.3", @@ -178,16 +185,19 @@ "dotenv": "^10.0.0", "esbuild": "^0.25.2", "geojson": "^0.5.0", + "joi-extract-type": "^15.0.8", + "log4js": "^6.9.1", "mysql2": "^2.3.0", "reflect-metadata": "^0.1.13", + "source-map-support": "^0.5.21", "ts-mysql-migrate": "^1.0.2", "tsx": "^4.19.4", - "typeorm": "^0.3.16", + "typeorm": "^0.3.22", "uuid": "^8.3.2", "wkx": "^0.5.0", }, "devDependencies": { - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@types/faker": "^5.5.9", "@types/geojson": "^7946.0.13", "@types/node": "^17.0.21", @@ -196,15 +206,19 @@ }, "dht-node": { "name": "dht-node", - "version": "2.5.2", + "version": "2.6.0", "dependencies": { "cross-env": "^7.0.3", "dht-rpc": "6.18.1", "sodium-universal": "4.0.1", }, "devDependencies": { - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@hyperswarm/dht": "6.5.1", + "@swc/cli": "^0.7.3", + "@swc/core": "^1.11.24", + "@swc/helpers": "^0.5.17", + "@swc/jest": "^0.2.38", "@types/dotenv": "^8.2.3", "@types/jest": "27.5.1", "@types/joi": "^17.2.3", @@ -217,7 +231,9 @@ "jest": "27.5.1", "joi": "^17.13.3", "log4js": "^6.9.1", + "nodemon": "^2.0.7", "prettier": "^2.8.8", + "source-map-support": "^0.5.21", "ts-jest": "27.1.4", "tsx": "^4.19.4", "typeorm": "^0.3.22", @@ -227,14 +243,14 @@ }, "federation": { "name": "federation", - "version": "2.5.2", + "version": "2.6.0", "dependencies": { "cross-env": "^7.0.3", "sodium-native": "^3.4.1", }, "devDependencies": { "@anatine/esbuild-decorators": "^0.2.19", - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@swc/cli": "^0.7.3", "@swc/core": "^1.11.24", "@swc/helpers": "^0.5.17", @@ -267,17 +283,18 @@ "nodemon": "^2.0.7", "prettier": "^3.5.3", "reflect-metadata": "^0.1.13", + "source-map-support": "^0.5.21", "ts-jest": "27.0.5", "tsconfig-paths": "^4.1.1", "type-graphql": "^1.1.1", - "typeorm": "^0.3.16", + "typeorm": "^0.3.22", "typescript": "^4.9.5", "uuid": "8.3.2", }, }, "frontend": { "name": "frontend", - "version": "2.5.2", + "version": "2.6.0", "dependencies": { "@morev/vue-transitions": "^3.0.2", "@types/leaflet": "^1.9.12", @@ -476,23 +493,23 @@ "@bcoe/v8-coverage": ["@bcoe/v8-coverage@0.2.3", "", {}, "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw=="], - "@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="], + "@biomejs/biome": ["@biomejs/biome@2.0.0", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.0.0", "@biomejs/cli-darwin-x64": "2.0.0", "@biomejs/cli-linux-arm64": "2.0.0", "@biomejs/cli-linux-arm64-musl": "2.0.0", "@biomejs/cli-linux-x64": "2.0.0", "@biomejs/cli-linux-x64-musl": "2.0.0", "@biomejs/cli-win32-arm64": "2.0.0", "@biomejs/cli-win32-x64": "2.0.0" }, "bin": { "biome": "bin/biome" } }, "sha512-BlUoXEOI/UQTDEj/pVfnkMo8SrZw3oOWBDrXYFT43V7HTkIUDkBRY53IC5Jx1QkZbaB+0ai1wJIfYwp9+qaJTQ=="], - "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.9.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw=="], + "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.0.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QvqWYtFFhhxdf8jMAdJzXW+Frc7X8XsnHQLY+TBM1fnT1TfeV/v9vsFI5L2J7GH6qN1+QEEJ19jHibCY2Ypplw=="], - "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.9.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg=="], + "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.0.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-5JFhls1EfmuIH4QGFPlNpxJQFC6ic3X1ltcoLN+eSRRIPr6H/lUS1ttuD0Fj7rPgPhZqopK/jfH8UVj/1hIsQw=="], - "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g=="], + "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.0.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-BAH4QVi06TzAbVchXdJPsL0Z/P87jOfes15rI+p3EX9/EGTfIjaQ9lBVlHunxcmoptaA5y1Hdb9UYojIhmnjIw=="], - "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA=="], + "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.0.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-Bxsz8ki8+b3PytMnS5SgrGV+mbAWwIxI3ydChb/d1rURlJTMdxTTq5LTebUnlsUWAX6OvJuFeiVq9Gjn1YbCyA=="], - "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg=="], + "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.0.0", "", { "os": "linux", "cpu": "x64" }, "sha512-09PcOGYTtkopWRm6mZ/B6Mr6UHdkniUgIG/jLBv+2J8Z61ezRE+xQmpi3yNgUrFIAU4lPA9atg7mhvE/5Bo7Wg=="], - "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg=="], + "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.0.0", "", { "os": "linux", "cpu": "x64" }, "sha512-tiQ0ABxMJb9I6GlfNp0ulrTiQSFacJRJO8245FFwE3ty3bfsfxlU/miblzDIi+qNrgGsLq5wIZcVYGp4c+HXZA=="], - "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.9.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg=="], + "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.0.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-vrTtuGu91xNTEQ5ZcMJBZuDlqr32DWU1r14UfePIGndF//s2WUAmer4FmgoPgruo76rprk37e8S2A2c0psXdxw=="], - "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.9.4", "", { "os": "win32", "cpu": "x64" }, "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA=="], + "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.0.0", "", { "os": "win32", "cpu": "x64" }, "sha512-2USVQ0hklNsph/KIR72ZdeptyXNnQ3JdzPn3NbjI4Sna34CnxeiYAaZcZzXPDl5PYNFBivV4xmvT3Z3rTmyDBg=="], "@cspotcode/source-map-support": ["@cspotcode/source-map-support@0.8.1", "", { "dependencies": { "@jridgewell/trace-mapping": "0.3.9" } }, "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw=="], @@ -574,10 +591,16 @@ "@graphql-typed-document-node/core": ["@graphql-typed-document-node/core@3.2.0", "", { "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ=="], + "@hapi/address": ["@hapi/address@2.1.4", "", {}, "sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ=="], + "@hapi/boom": ["@hapi/boom@10.0.1", "", { "dependencies": { "@hapi/hoek": "^11.0.2" } }, "sha512-ERcCZaEjdH3OgSJlyjVk8pHIFeus91CjKP3v+MpgBNp5IvGzP2l/bRiD78nqYcKPaZdbKkK5vDBVPd2ohHBlsA=="], + "@hapi/bourne": ["@hapi/bourne@1.3.2", "", {}, "sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA=="], + "@hapi/hoek": ["@hapi/hoek@9.3.0", "", {}, "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ=="], + "@hapi/joi": ["@hapi/joi@15.1.1", "", { "dependencies": { "@hapi/address": "2.x.x", "@hapi/bourne": "1.x.x", "@hapi/hoek": "8.x.x", "@hapi/topo": "3.x.x" } }, "sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ=="], + "@hapi/topo": ["@hapi/topo@5.1.0", "", { "dependencies": { "@hapi/hoek": "^9.0.0" } }, "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg=="], "@humanwhocodes/config-array": ["@humanwhocodes/config-array@0.13.0", "", { "dependencies": { "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" } }, "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw=="], @@ -632,6 +655,8 @@ "@jest/core": ["@jest/core@27.5.1", "", { "dependencies": { "@jest/console": "^27.5.1", "@jest/reporters": "^27.5.1", "@jest/test-result": "^27.5.1", "@jest/transform": "^27.5.1", "@jest/types": "^27.5.1", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", "emittery": "^0.8.1", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "jest-changed-files": "^27.5.1", "jest-config": "^27.5.1", "jest-haste-map": "^27.5.1", "jest-message-util": "^27.5.1", "jest-regex-util": "^27.5.1", "jest-resolve": "^27.5.1", "jest-resolve-dependencies": "^27.5.1", "jest-runner": "^27.5.1", "jest-runtime": "^27.5.1", "jest-snapshot": "^27.5.1", "jest-util": "^27.5.1", "jest-validate": "^27.5.1", "jest-watcher": "^27.5.1", "micromatch": "^4.0.4", "rimraf": "^3.0.0", "slash": "^3.0.0", "strip-ansi": "^6.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"] }, "sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ=="], + "@jest/create-cache-key-function": ["@jest/create-cache-key-function@29.7.0", "", { "dependencies": { "@jest/types": "^29.6.3" } }, "sha512-4QqS3LY5PBmTRHj9sAg1HLoPzqAI0uOX6wI/TRqHIcOxlFidy6YEmCQJk6FSZjNLGCeubDMfmkWL+qaLKhSGQA=="], + "@jest/environment": ["@jest/environment@27.5.1", "", { "dependencies": { "@jest/fake-timers": "^27.5.1", "@jest/types": "^27.5.1", "@types/node": "*", "jest-mock": "^27.5.1" } }, "sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA=="], "@jest/fake-timers": ["@jest/fake-timers@27.5.1", "", { "dependencies": { "@jest/types": "^27.5.1", "@sinonjs/fake-timers": "^8.0.1", "@types/node": "*", "jest-message-util": "^27.5.1", "jest-mock": "^27.5.1", "jest-util": "^27.5.1" } }, "sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ=="], @@ -876,6 +901,8 @@ "@swc/helpers": ["@swc/helpers@0.5.17", "", { "dependencies": { "tslib": "^2.8.0" } }, "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A=="], + "@swc/jest": ["@swc/jest@0.2.38", "", { "dependencies": { "@jest/create-cache-key-function": "^29.7.0", "@swc/counter": "^0.1.3", "jsonc-parser": "^3.2.0" }, "peerDependencies": { "@swc/core": "*" } }, "sha512-HMoZgXWMqChJwffdDjvplH53g9G2ALQes3HKXDEdliB/b85OQ0CTSbxG8VSeCwiAn7cOaDVEt4mwmZvbHcS52w=="], + "@swc/types": ["@swc/types@0.1.21", "", { "dependencies": { "@swc/counter": "^0.1.3" } }, "sha512-2YEtj5HJVbKivud9N4bpPBAyZhj4S2Ipe5LkUG94alTpr7in/GU/EARgPAd3BwU+YOmFVJC2+kjqhGRi3r0ZpQ=="], "@szmarczak/http-timer": ["@szmarczak/http-timer@5.0.1", "", { "dependencies": { "defer-to-connect": "^2.0.1" } }, "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw=="], @@ -936,6 +963,8 @@ "@types/graceful-fs": ["@types/graceful-fs@4.1.9", "", { "dependencies": { "@types/node": "*" } }, "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ=="], + "@types/hapi__joi": ["@types/hapi__joi@15.0.4", "", { "dependencies": { "@types/hapi__joi": "*" } }, "sha512-VSS6zc7AIOdHVXmqKaGNPYl8eGrMvWi0R5pt3evJL3UdxO8XS28/XAkBXNyLQoymHxhMd4bF3o1U9mZkWDeN8w=="], + "@types/html-to-text": ["@types/html-to-text@9.0.4", "", {}, "sha512-pUY3cKH/Nm2yYrEmDlPR1mR7yszjGx4DrwPjQ702C4/D5CwHuZTgZdIdwPkRbcuhs7BAh2L5rg3CL5cbRiGTCQ=="], "@types/http-assert": ["@types/http-assert@1.5.6", "", {}, "sha512-TTEwmtjgVbYAzZYWyeHPrrtWnfVkm8tQkP8P21uQifPgMRgjrow3XDEYqucuC8SKZJT7pUnhU/JymvjggxO9vw=="], @@ -1000,6 +1029,8 @@ "@types/sodium-native": ["@types/sodium-native@2.3.9", "", { "dependencies": { "@types/node": "*" } }, "sha512-jZIg5ltGH1okmnH3FrLQsgwjcjOVozMSHwSiEm1/LpMekhOMHbQqp21P4H24mizh1BjwI6Q8qmphmD/HJuAqWg=="], + "@types/source-map-support": ["@types/source-map-support@0.5.10", "", { "dependencies": { "source-map": "^0.6.0" } }, "sha512-tgVP2H469x9zq34Z0m/fgPewGhg/MLClalNOiPIzQlXrSS2YrKu/xCdSCKnEDwkFha51VKEKB6A9wW26/ZNwzA=="], + "@types/stack-utils": ["@types/stack-utils@2.0.3", "", {}, "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw=="], "@types/uuid": ["@types/uuid@8.3.4", "", {}, "sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw=="], @@ -2202,6 +2233,8 @@ "joi": ["joi@17.13.3", "", { "dependencies": { "@hapi/hoek": "^9.3.0", "@hapi/topo": "^5.1.0", "@sideway/address": "^4.1.5", "@sideway/formula": "^3.0.1", "@sideway/pinpoint": "^2.0.0" } }, "sha512-otDA4ldcIx+ZXsKHWmp0YizCweVRZG96J10b0FevjfuncLO1oX59THoAmHkNubYJ+9gWsYsp5k8v4ib6oDv1fA=="], + "joi-extract-type": ["joi-extract-type@15.0.8", "", { "dependencies": { "@hapi/joi": "~15", "@types/hapi__joi": "~15" } }, "sha512-Or97aW6QN6YJq0B+x/vYs65+nmcPvYDE7xhlwRl7yHzY+7Z8pVaj0zxjdJlXmIA9zRcbbYQKCGvW+I4g0kUHgA=="], + "jose": ["jose@4.15.9", "", {}, "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA=="], "js-beautify": ["js-beautify@1.15.4", "", { "dependencies": { "config-chain": "^1.1.13", "editorconfig": "^1.0.4", "glob": "^10.4.2", "js-cookie": "^3.0.5", "nopt": "^7.2.1" }, "bin": { "css-beautify": "js/bin/css-beautify.js", "html-beautify": "js/bin/html-beautify.js", "js-beautify": "js/bin/js-beautify.js" } }, "sha512-9/KXeZUKKJwqCXUdBxFJ3vPh467OCckSBmYDwSK/EtV090K+iMJ7zx2S3HLVDIWFQdqMIsZWbnaGiba18aWhaA=="], @@ -2230,6 +2263,8 @@ "jsonc-eslint-parser": ["jsonc-eslint-parser@2.4.0", "", { "dependencies": { "acorn": "^8.5.0", "eslint-visitor-keys": "^3.0.0", "espree": "^9.0.0", "semver": "^7.3.5" } }, "sha512-WYDyuc/uFcGp6YtM2H0uKmUwieOuzeE/5YocFJLnLfclZ4inf3mRn8ZVy1s7Hxji7Jxm6Ss8gqpexD/GlKoGgg=="], + "jsonc-parser": ["jsonc-parser@3.3.1", "", {}, "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ=="], + "jsonfile": ["jsonfile@6.1.0", "", { "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ=="], "jstransformer": ["jstransformer@1.0.0", "", { "dependencies": { "is-promise": "^2.0.0", "promise": "^7.0.1" } }, "sha512-C9YK3Rf8q6VAPDCCU9fnqo3mAfOH6vUGnMcP4AQAYIEpWtfGLpwOTmZ+igtdK5y+VvI2n3CyYSzy4Qh34eq24A=="], @@ -3290,13 +3325,15 @@ "yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], + "yoctocolors-cjs": ["yoctocolors-cjs@2.1.2", "", {}, "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA=="], + "yup": ["yup@1.6.1", "", { "dependencies": { "property-expr": "^2.0.5", "tiny-case": "^1.0.3", "toposort": "^2.0.2", "type-fest": "^2.19.0" } }, "sha512-JED8pB50qbA4FOkDol0bYF/p60qSEDQqBD0/qeIrUCG1KbPBIQ776fCUNb9ldbPcSTxA69g/47XTo4TqWiuXOA=="], "zen-observable": ["zen-observable@0.8.15", "", {}, "sha512-PQ2PC7R9rslx84ndNBZB/Dkv8V8fZEpk83RLgXtYd0fwUgEjseMn1Dgajh2x6S8QbZAFa9p2qVCEuYZNgve0dQ=="], "zen-observable-ts": ["zen-observable-ts@1.2.5", "", { "dependencies": { "zen-observable": "0.8.15" } }, "sha512-QZWQekv6iB72Naeake9hS1KxHlotfRpe+WGNbNx5/ta+R3DNjVO2bswf63gXlWDcs+EMd7XY8HfVQyP1X6T4Zg=="], - "zod": ["zod@3.25.20", "", {}, "sha512-z03fqpTMDF1G02VLKUMt6vyACE7rNWkh3gpXVHgPTw28NPtDFRGvcpTtPwn2kMKtQ0idtYJUTxchytmnqYswcw=="], + "zod": ["zod@3.25.61", "", {}, "sha512-fzfJgUw78LTNnHujj9re1Ov/JJQkRZZGDMcYqSx7Hp4rPOkKywaFHq0S6GoHeXs0wGNE/sIOutkXgnwzrVOGCQ=="], "@apollo/protobufjs/@types/node": ["@types/node@10.17.60", "", {}, "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw=="], @@ -3320,6 +3357,10 @@ "@hapi/boom/@hapi/hoek": ["@hapi/hoek@11.0.7", "", {}, "sha512-HV5undWkKzcB4RZUusqOpcgxOaq6VOAH7zhhIr2g3G8NF/MlFO75SjOr2NfuSx0Mh40+1FqCkagKLJRykUWoFQ=="], + "@hapi/joi/@hapi/hoek": ["@hapi/hoek@8.5.1", "", {}, "sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow=="], + + "@hapi/joi/@hapi/topo": ["@hapi/topo@3.1.6", "", { "dependencies": { "@hapi/hoek": "^8.3.0" } }, "sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ=="], + "@hyperswarm/secret-stream/noise-handshake": ["noise-handshake@4.1.0", "", { "dependencies": { "b4a": "^1.1.0", "nanoassert": "^2.0.0", "sodium-universal": "^5.0.0" } }, "sha512-ZHt2+mOXTvjtaWS2h/JPvQjmknfKrEld2xdSsRYWXnYiJmK/N+dtxrDVSt1cr9wGAlhH7Ek43lIZNsL5bVeX9A=="], "@hyperswarm/secret-stream/sodium-universal": ["sodium-universal@5.0.1", "", { "dependencies": { "sodium-native": "^5.0.1" }, "peerDependencies": { "sodium-javascript": "~0.8.0" }, "optionalPeers": ["sodium-javascript"] }, "sha512-rv+aH+tnKB5H0MAc2UadHShLMslpJsc4wjdnHRtiSIEYpOetCgu8MS4ExQRia+GL/MK3uuCyZPeEsi+J3h+Q+Q=="], @@ -3360,6 +3401,8 @@ "@jest/core/@types/node": ["@types/node@18.19.96", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-PzBvgsZ7YdFs/Kng1BSW8IGv68/SPcOxYYhT7luxD7QyzIhFS1xPTpfK3K9eHBa7hVwlW+z8nN0mOd515yaduQ=="], + "@jest/create-cache-key-function/@jest/types": ["@jest/types@29.6.3", "", { "dependencies": { "@jest/schemas": "^29.6.3", "@types/istanbul-lib-coverage": "^2.0.0", "@types/istanbul-reports": "^3.0.0", "@types/node": "*", "@types/yargs": "^17.0.8", "chalk": "^4.0.0" } }, "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw=="], + "@jest/environment/@types/node": ["@types/node@18.19.96", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-PzBvgsZ7YdFs/Kng1BSW8IGv68/SPcOxYYhT7luxD7QyzIhFS1xPTpfK3K9eHBa7hVwlW+z8nN0mOd515yaduQ=="], "@jest/fake-timers/@types/node": ["@types/node@18.19.96", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-PzBvgsZ7YdFs/Kng1BSW8IGv68/SPcOxYYhT7luxD7QyzIhFS1xPTpfK3K9eHBa7hVwlW+z8nN0mOd515yaduQ=="], @@ -3434,6 +3477,8 @@ "@types/sodium-native/@types/node": ["@types/node@18.19.96", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-PzBvgsZ7YdFs/Kng1BSW8IGv68/SPcOxYYhT7luxD7QyzIhFS1xPTpfK3K9eHBa7hVwlW+z8nN0mOd515yaduQ=="], + "@types/source-map-support/source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="], + "@types/ws/@types/node": ["@types/node@18.19.96", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-PzBvgsZ7YdFs/Kng1BSW8IGv68/SPcOxYYhT7luxD7QyzIhFS1xPTpfK3K9eHBa7hVwlW+z8nN0mOd515yaduQ=="], "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], @@ -3910,6 +3955,10 @@ "@istanbuljs/load-nyc-config/js-yaml/argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="], + "@jest/create-cache-key-function/@jest/types/@types/node": ["@types/node@18.19.96", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-PzBvgsZ7YdFs/Kng1BSW8IGv68/SPcOxYYhT7luxD7QyzIhFS1xPTpfK3K9eHBa7hVwlW+z8nN0mOd515yaduQ=="], + + "@jest/create-cache-key-function/@jest/types/@types/yargs": ["@types/yargs@17.0.33", "", { "dependencies": { "@types/yargs-parser": "*" } }, "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA=="], + "@jest/reporters/jest-worker/supports-color": ["supports-color@8.1.1", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="], "@jest/transform/write-file-atomic/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], diff --git a/config-schema/biome.json b/config-schema/biome.json deleted file mode 100644 index 52b921260..000000000 --- a/config-schema/biome.json +++ /dev/null @@ -1,131 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", - "vcs": { "enabled": false, "clientKind": "git", "useIgnoreFile": false }, - "files": { - "ignoreUnknown": false, - "ignore": ["build", "node_modules"], - "include": ["./src/**/*.js", "./src/**/*.ts"] - }, - "formatter": { - "enabled": true, - "useEditorconfig": true, - "formatWithErrors": false, - "indentStyle": "space", - "indentWidth": 2, - "lineEnding": "lf", - "lineWidth": 100, - "attributePosition": "auto", - "bracketSpacing": true - }, - "organizeImports": { "enabled": true }, - "linter": { - "enabled": true, - "rules": { - "recommended": false, - "complexity": { - "noExtraBooleanCast": "error", - "noMultipleSpacesInRegularExpressionLiterals": "error", - "noUselessCatch": "error", - "noUselessConstructor": "error", - "noUselessLoneBlockStatements": "error", - "noUselessRename": "error", - "noUselessTernary": "error", - "noUselessUndefinedInitialization": "error", - "noVoid": "error", - "noWith": "error", - "useLiteralKeys": "error", - "useRegexLiterals": "error" - }, - "correctness": { - "noConstAssign": "error", - "noConstantCondition": "error", - "noEmptyCharacterClassInRegex": "error", - "noEmptyPattern": "error", - "noGlobalObjectCalls": "error", - "noInnerDeclarations": "error", - "noInvalidConstructorSuper": "error", - "noInvalidUseBeforeDeclaration": "error", - "noNewSymbol": "error", - "noNodejsModules": "off", - "noNonoctalDecimalEscape": "error", - "noPrecisionLoss": "error", - "noSelfAssign": "error", - "noSetterReturn": "error", - "noSwitchDeclarations": "error", - "noUndeclaredVariables": "error", - "noUnreachable": "error", - "noUnreachableSuper": "error", - "noUnsafeFinally": "error", - "noUnsafeOptionalChaining": "error", - "noUnusedLabels": "error", - "noUnusedVariables": "error", - "useArrayLiterals": "error", - "useIsNan": "error", - "useValidForDirection": "error", - "useYield": "error" - }, - "security": { "noGlobalEval": "error" }, - "style": { - "noCommaOperator": "error", - "noDefaultExport": "error", - "noVar": "warn", - "noYodaExpression": "error", - "useBlockStatements": "error", - "useConsistentBuiltinInstantiation": "error", - "useConst": "error", - "useSingleVarDeclarator": "error" - }, - "suspicious": { - "noAsyncPromiseExecutor": "error", - "noCatchAssign": "error", - "noClassAssign": "error", - "noCompareNegZero": "error", - "noConsole": "error", - "noControlCharactersInRegex": "error", - "noDebugger": "error", - "noDoubleEquals": "error", - "noDuplicateCase": "error", - "noDuplicateClassMembers": "error", - "noDuplicateObjectKeys": "error", - "noDuplicateParameters": "error", - "noEmptyBlockStatements": "error", - "noFallthroughSwitchClause": "error", - "noFunctionAssign": "error", - "noGlobalAssign": "error", - "noImportAssign": "error", - "noMisleadingCharacterClass": "error", - "noPrototypeBuiltins": "error", - "noRedeclare": "error", - "noSelfCompare": "error", - "noShadowRestrictedNames": "error", - "noSparseArray": "error", - "noUnsafeNegation": "error", - "useDefaultSwitchClauseLast": "error", - "useGetterReturn": "error", - "useValidTypeof": "error" - } - }, - "ignore": ["**/node_modules", "**/*.min.js", "**/build", "**/coverage"] - }, - "javascript": { - "formatter": { - "jsxQuoteStyle": "single", - "quoteProperties": "asNeeded", - "trailingCommas": "all", - "semicolons": "asNeeded", - "arrowParentheses": "always", - "bracketSameLine": false, - "quoteStyle": "single", - "attributePosition": "auto", - "bracketSpacing": true - }, - "globals": ["document", "navigator", "window"] - }, - "overrides": [ - { - "include": ["*.ts", "*.tsx"], - "linter": { "rules": { "complexity": { "noVoid": "error" } } } - }, - { "include": ["*.test.ts"], "linter": { "rules": {} } } - ] -} diff --git a/config-schema/package.json b/config-schema/package.json index e0cccb0eb..c127bcd91 100644 --- a/config-schema/package.json +++ b/config-schema/package.json @@ -15,20 +15,26 @@ "license": "Apache-2.0", "private": true, "scripts": { - "build": "esbuild src/index.ts --outdir=build --platform=node --target=node18.20.7 --bundle --packages=external", + "build": "esbuild src/index.ts --outdir=build --sourcemap --platform=node --target=node18.20.7 --bundle --packages=external", "build:bun": "bun build src/index.ts --outdir=build --target=bun --packages=external", "typecheck": "tsc --noEmit", + "test": "bun test", "lint": "biome check --error-on-warnings .", "lint:fix": "biome check --error-on-warnings . --write" }, "devDependencies": { - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@types/node": "^17.0.21", + "jest": "27.2.4", "typescript": "^4.9.5" }, "dependencies": { + "esbuild": "^0.25.2", "joi": "^17.13.3", - "esbuild": "^0.25.2" + "log4js": "^6.9.1", + "source-map-support": "^0.5.21", + "yoctocolors-cjs": "^2.1.2", + "zod": "^3.25.61" }, "engines": { "node": ">=18" diff --git a/config-schema/src/DatabaseConfigSchema.ts b/config-schema/src/DatabaseConfigSchema.ts new file mode 100644 index 000000000..59d0101e1 --- /dev/null +++ b/config-schema/src/DatabaseConfigSchema.ts @@ -0,0 +1,87 @@ +import Joi from 'joi' + +export const DatabaseConfigSchema = Joi.object({ + DB_CONNECT_RETRY_COUNT: Joi.number() + .default(15) + .min(1) + .max(1000) + .description('Number of retries to connect to the database') + .optional(), + + DB_CONNECT_RETRY_DELAY_MS: Joi.number() + .default(500) + .min(100) + .max(10000) + .description('Delay in milliseconds between retries to connect to the database') + .optional(), + + TYPEORM_LOGGING_RELATIVE_PATH: Joi.string() + .pattern(/^[a-zA-Z0-9-_\.\/]+\.log$/) + .message('TYPEORM_LOGGING_RELATIVE_PATH must be a valid filename ending with .log') + .description('log file name for logging typeorm activities') + .default('typeorm.log') + .optional(), + + DB_HOST: Joi.string() + .hostname() + .message('must be a valid host with alphanumeric characters, numbers, points and -') + .description("database host like 'localhost' or 'mariadb' in docker setup") + .default('localhost') + .optional(), + + DB_LOGGING_ACTIVE: Joi.boolean() + .default(false) + .description('Enable sql query logging, only for debug, because produce many log entries') + .optional(), + + DB_LOG_LEVEL: Joi.string() + .valid('all', 'query', 'schema', 'error', 'warn', 'info', 'log', 'migration') + .description('set log level') + .default('info') + .optional(), + + DB_PORT: Joi.number() + .integer() + .min(1024) + .max(49151) + .description('database port, default: 3306') + .default(3306) + .optional(), + + DB_USER: Joi.string() + .pattern(/^[A-Za-z0-9]([A-Za-z0-9-_\.]*[A-Za-z0-9])?$/) // Validates MariaDB username rules + .min(1) // Minimum length 1 + .max(16) // Maximum length 16 + .message( + 'Valid database username (letters, numbers, hyphens, underscores, dots allowed; no spaces, must not start or end with hyphen, dot, or underscore)', + ) + .description('database username for mariadb') + .default('root') + .optional(), + + DB_PASSWORD: Joi.string() + .when(Joi.ref('NODE_ENV'), { + is: 'development', + then: Joi.string().allow(''), + otherwise: Joi.string() + .min(8) + .max(32) + .pattern(/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[!@#$%^&*(),.?":{}|<>]).+$/) + .message( + 'Password must be between 8 and 32 characters long, and contain at least one uppercase letter, one lowercase letter, one number, and one special character (e.g., !@#$%^&*).', + ), + }) + .description( + 'Password for the database user. In development mode, an empty password is allowed. In other environments, a complex password is required.', + ) + .default('') + .optional(), + + DB_DATABASE: Joi.string() + .pattern(/^[a-zA-Z][a-zA-Z0-9_-]{1,63}$/) + .description( + 'Database name like gradido_community (must start with a letter, and can only contain letters, numbers, underscores, or dashes)', + ) + .default('gradido_community') + .optional(), +}) diff --git a/config-schema/src/commonSchema.ts b/config-schema/src/commonSchema.ts index fa1afb473..12946fdae 100644 --- a/config-schema/src/commonSchema.ts +++ b/config-schema/src/commonSchema.ts @@ -21,36 +21,6 @@ export const browserUrls = Joi.array() .required() .description('All URLs need to have same protocol to prevent mixed block errors') -export const DECAY_START_TIME = Joi.date() - .iso() // ISO 8601 format for date validation - .description('The start time for decay, expected in ISO 8601 format (e.g. 2021-05-13T17:46:31Z)') - .default(new Date('2021-05-13T17:46:31Z')) // default to the specified date if not provided - .required() - -export const DB_VERSION = Joi.string() - .pattern(/^\d{4}-[a-z0-9-_]+$/) - .message( - 'DB_VERSION must be in the format: YYYY-description, e.g. "0087-add_index_on_user_roles".', - ) - .description( - 'db version string, last migration file name without ending or last folder in entity', - ) - .required() - -export const DB_CONNECT_RETRY_COUNT = Joi.number() - .default(15) - .min(1) - .max(1000) - .description('Number of retries to connect to the database') - .optional() - -export const DB_CONNECT_RETRY_DELAY_MS = Joi.number() - .default(500) - .min(100) - .max(10000) - .description('Delay in milliseconds between retries to connect to the database') - .optional() - export const COMMUNITY_URL = Joi.string() .uri({ scheme: ['http', 'https'] }) .custom((value: string, helpers: Joi.CustomHelpers) => { @@ -155,6 +125,22 @@ export const LOG4JS_CONFIG = Joi.string() .default('log4js-config.json') .required() +export const LOG4JS_CONFIG_PLACEHOLDER = Joi.string() + .pattern(/^[a-zA-Z0-9-_]+(%v)?\.json$/) + .message( + 'LOG4JS_CONFIG_PLACEHOLDER must be a valid filename ending with .json can contain %v as API Version placeholder before ending', + ) + .description('config file name for log4js config file') + .default('log4js-config.json') + .required() + +export const LOG_FILES_BASE_PATH = Joi.string() + .pattern(/^[a-zA-Z0-9-_\/\.]+$/) + .message('LOG_FILES_BASE_PATH must be a valid folder name, relative or absolute') + .description('log folder name for module log files') + .default('../logs/backend') + .optional() + export const LOGIN_APP_SECRET = Joi.string() .pattern(/^[a-fA-F0-9]+$/) .message('need to be valid hex') @@ -177,65 +163,6 @@ export const OPENAI_ACTIVE = Joi.boolean() .description('Flag to enable or disable OpenAI API') .required() -export const TYPEORM_LOGGING_RELATIVE_PATH = Joi.string() - .pattern(/^[a-zA-Z0-9-_\.\/]+\.log$/) - .message('TYPEORM_LOGGING_RELATIVE_PATH must be a valid filename ending with .log') - .description('log file name for logging typeorm activities') - .default('typeorm.log') - .required() - -export const DB_HOST = Joi.string() - .hostname() - .message('must be a valid host with alphanumeric characters, numbers, points and -') - .description("database host like 'localhost' or 'mariadb' in docker setup") - .default('localhost') - .required() - -export const DB_PORT = Joi.number() - .integer() - .min(1024) - .max(49151) - .description('database port, default: 3306') - .default(3306) - .required() - -export const DB_USER = Joi.string() - .pattern(/^[A-Za-z0-9]([A-Za-z0-9-_\.]*[A-Za-z0-9])?$/) // Validates MariaDB username rules - .min(1) // Minimum length 1 - .max(16) // Maximum length 16 - .message( - 'Valid database username (letters, numbers, hyphens, underscores, dots allowed; no spaces, must not start or end with hyphen, dot, or underscore)', - ) - .description('database username for mariadb') - .default('root') - .required() - -export const DB_PASSWORD = Joi.string() - .when(Joi.ref('NODE_ENV'), { - is: 'development', - then: Joi.string().allow(''), - otherwise: Joi.string() - .min(8) - .max(32) - .pattern(/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[!@#$%^&*(),.?":{}|<>]).+$/) - .message( - 'Password must be between 8 and 32 characters long, and contain at least one uppercase letter, one lowercase letter, one number, and one special character (e.g., !@#$%^&*).', - ), - }) - .description( - 'Password for the database user. In development mode, an empty password is allowed. In other environments, a complex password is required.', - ) - .default('') - .required() - -export const DB_DATABASE = Joi.string() - .pattern(/^[a-zA-Z][a-zA-Z0-9_-]{1,63}$/) - .description( - 'Database name like gradido_community (must start with a letter, and can only contain letters, numbers, underscores, or dashes)', - ) - .default('gradido_community') - .required() - export const APP_VERSION = Joi.string() .pattern(/^\d+\.\d+\.\d+$/) .message('Version must be in the format "major.minor.patch" (e.g., "2.4.1")') diff --git a/config-schema/src/const.ts b/config-schema/src/const.ts new file mode 100644 index 000000000..1a593f101 --- /dev/null +++ b/config-schema/src/const.ts @@ -0,0 +1 @@ +export const DECAY_START_TIME = new Date('2021-05-13T17:46:31Z') diff --git a/config-schema/src/index.ts b/config-schema/src/index.ts index 3da59fa79..9c7c32653 100644 --- a/config-schema/src/index.ts +++ b/config-schema/src/index.ts @@ -1,35 +1,7 @@ -import { ObjectSchema } from 'joi' +import 'source-map-support/register' export * from './commonSchema' - -export function validate(schema: ObjectSchema, data: any) { - const { error } = schema.validate(data) - const schemaJson = schema.describe() - if (error) { - error.details.forEach((err) => { - const details = JSON.stringify(err, null, 2) - if (!err.context) { - throw new Error('missing context in config validation with joi: ' + details) - } - if (!schemaJson) { - throw new Error('invalid schema in config validation with joi: ' + details) - } - const key = err.context.key - if (key === undefined) { - throw new Error('missing key in config validation with joi: ' + details) - } - const value = err.context.value - const description = schemaJson.keys[key] - ? schema.describe().keys[key].flags.description - : 'No description available' - if (data[key] === undefined) { - throw new Error( - `Environment Variable '${key}' is missing. ${description}, details: ${details}`, - ) - } else { - throw new Error( - `Error on Environment Variable ${key} with value = ${value}: ${err.message}. ${description}`, - ) - } - }) - } -} +export { DatabaseConfigSchema } from './DatabaseConfigSchema' +export { validate } from './validate' +export type { LogLevel, Category } from './log4js-config' +export { createLog4jsConfig, initLogger, defaultCategory } from './log4js-config' +export { DECAY_START_TIME } from './const' diff --git a/config-schema/src/log4js-config/appenders.ts b/config-schema/src/log4js-config/appenders.ts new file mode 100644 index 000000000..19f74dccd --- /dev/null +++ b/config-schema/src/log4js-config/appenders.ts @@ -0,0 +1,76 @@ +import type { + Appender, + DateFileAppender, + LogLevelFilterAppender, + StandardOutputAppender, +} from 'log4js' +import { CustomFileAppender } from './types' + +const fileAppenderTemplate = { + type: 'dateFile' as const, + pattern: 'yyyy-MM-dd', + compress: true, + keepFileExt: true, + fileNameSep: '_', + numBackups: 30, +} + +const defaultAppenders = { + errorFile: { + type: 'dateFile' as const, + filename: 'errors.log', + pattern: 'yyyy-MM-dd', + layout: { type: 'coloredContext' as const, withStack: true }, + compress: true, + keepFileExt: true, + fileNameSep: '_', + numBackups: 30, + } as DateFileAppender, + errors: { + type: 'logLevelFilter' as const, + level: 'error' as const, + appender: 'errorFile' as const, + } as LogLevelFilterAppender, + out: { + type: 'stdout' as const, + layout: { type: 'coloredContext' as const, withStack: 'error' }, + } as StandardOutputAppender, +} + +/** + * Creates the appender configuration for log4js. + * + * @param {CustomFileAppender[]} fileAppenders + * the list of custom file appenders to add to the standard + * appenders. + * @param {string} [basePath] + * the base path for all log files. + * @param {boolean} [stacktraceOnStdout=false] + * whether to show the stacktrace on the standard output + * appender. + * @returns {Object} + * the appender configuration as a map + */ +export function createAppenderConfig( + fileAppenders: CustomFileAppender[], + basePath?: string, +): { [name: string]: Appender } { + if (basePath) { + defaultAppenders.errorFile.filename = `${basePath}/errors.log` + } + const customAppender: { [name: string]: Appender } = { ...defaultAppenders } + + fileAppenders.forEach((appender) => { + const filename = appender.filename ?? `${appender.name}.log` + const dateFile: DateFileAppender = { + ...fileAppenderTemplate, + filename: basePath ? `${basePath}/${filename}` : filename, + } + dateFile.layout = { + type: 'coloredContext', + ...appender.layout, + } + customAppender[appender.name] = dateFile + }) + return customAppender +} diff --git a/config-schema/src/log4js-config/coloredContext.test.ts b/config-schema/src/log4js-config/coloredContext.test.ts new file mode 100644 index 000000000..56a7b7b64 --- /dev/null +++ b/config-schema/src/log4js-config/coloredContext.test.ts @@ -0,0 +1,135 @@ +import { LoggingEvent, levels } from 'log4js' +import colors from 'yoctocolors-cjs' +import { createColoredContextLayout } from './coloredContext' + +let defaultLogEvent: LoggingEvent +let colorFn: (input: string) => string +const startTime = new Date() +const startTimeString = startTime.toISOString() + +describe('createColoredContextLayout', () => { + beforeEach(() => { + defaultLogEvent = { + level: levels.INFO, + categoryName: 'config', + data: ['message'], + context: { user: 1 }, + startTime, + fileName: 'file', + lineNumber: 1, + callStack: 'stack', + pid: 1, + serialise: () => { + throw new Error('Function not implemented.') + }, + } + }) + it('returns a function', () => { + expect(typeof createColoredContextLayout({})).toBe('function') + }) + describe('level:info, color:green', () => { + beforeEach(() => { + defaultLogEvent.level = levels.INFO + colorFn = colors.green + }) + it('format with all undefined', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.INFO}] config -`) + expect(createColoredContextLayout({})(defaultLogEvent)).toBe( + `${coloredString} user=1 message`, + ) + }) + it('format with stack', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.INFO}] config -`) + expect(createColoredContextLayout({ withStack: true })(defaultLogEvent)).toBe( + `${coloredString} user=1 message \nstack`, + ) + }) + it('format with file', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.INFO}] config -`) + expect(createColoredContextLayout({ withFile: true })(defaultLogEvent)).toBe( + `${coloredString} user=1 message \n at file:1`, + ) + }) + it('format with file only if it where level:warn', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.INFO}] config -`) + expect(createColoredContextLayout({ withFile: 'warn' })(defaultLogEvent)).toBe( + `${coloredString} user=1 message`, + ) + }) + it('format with line', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.INFO}] config:1 -`) + expect(createColoredContextLayout({ withLine: true })(defaultLogEvent)).toBe( + `${coloredString} user=1 message`, + ) + }) + it('format with line only if it where level:warn', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.INFO}] config -`) + expect(createColoredContextLayout({ withLine: 'warn' })(defaultLogEvent)).toBe( + `${coloredString} user=1 message`, + ) + }) + it('format with file and line', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.INFO}] config -`) + expect(createColoredContextLayout({ withFile: true, withLine: true })(defaultLogEvent)).toBe( + `${coloredString} user=1 message \n at file:1`, + ) + }) + it('format withStack: error, withLine: true, withFile: warn', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.INFO}] config:1 -`) + expect( + createColoredContextLayout({ + withStack: 'error', + withFile: 'warn', + withLine: true, + })(defaultLogEvent), + ).toBe(`${coloredString} user=1 message`) + }) + }) + + describe('level:error, color:red', () => { + beforeEach(() => { + defaultLogEvent.level = levels.ERROR + colorFn = colors.redBright + }) + it('format with all undefined', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.ERROR}] config -`) + expect(createColoredContextLayout({})(defaultLogEvent)).toBe( + `${coloredString} user=1 message`, + ) + }) + it('format with stack', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.ERROR}] config -`) + expect(createColoredContextLayout({ withStack: true })(defaultLogEvent)).toBe( + `${coloredString} user=1 message \nstack`, + ) + }) + it('format with file', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.ERROR}] config -`) + expect(createColoredContextLayout({ withFile: true })(defaultLogEvent)).toBe( + `${coloredString} user=1 message \n at file:1`, + ) + }) + it('format with line', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.ERROR}] config:1 -`) + expect(createColoredContextLayout({ withLine: true })(defaultLogEvent)).toBe( + `${coloredString} user=1 message`, + ) + }) + it('format with file and line', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.ERROR}] config -`) + expect(createColoredContextLayout({ withFile: true, withLine: true })(defaultLogEvent)).toBe( + `${coloredString} user=1 message \n at file:1`, + ) + }) + it('format withStack: error, withLine: true, withFile: warn', () => { + const coloredString = colorFn(`[${startTimeString}] [${levels.ERROR}] config -`) + expect( + createColoredContextLayout({ + withStack: 'error', + withFile: 'warn', + withLine: true, + })(defaultLogEvent), + ).toBe(`${coloredString} user=1 message \nstack`) + }) + }) +}) diff --git a/config-schema/src/log4js-config/coloredContext.ts b/config-schema/src/log4js-config/coloredContext.ts new file mode 100644 index 000000000..1ebb6b219 --- /dev/null +++ b/config-schema/src/log4js-config/coloredContext.ts @@ -0,0 +1,115 @@ +import { Level, LoggingEvent } from 'log4js' +import colors from 'yoctocolors-cjs' +import { ColoredContextLayoutConfig, LogLevel } from './types' +import { inspect } from 'node:util' + +function colorize(str: string, level: Level): string { + switch (level.colour) { + case 'white': + return colors.white(str) + case 'grey': + return colors.gray(str) + case 'black': + return colors.black(str) + case 'blue': + return colors.blue(str) + case 'cyan': + return colors.cyan(str) + case 'green': + return colors.green(str) + case 'magenta': + return colors.magenta(str) + case 'red': + return colors.redBright(str) + case 'yellow': + return colors.yellow(str) + default: + return colors.gray(str) + } +} + +// distinguish between objects with valid toString function (for examples classes derived from AbstractLoggingView) and other objects +function composeDataString(data: (string | Object)[]): string { + return data + .map((d) => { + // if it is a object and his toString function return only garbage + if (typeof d === 'object' && d.toString() === '[object Object]') { + return inspect(d, ) + } + if (d) { + return d.toString() + } + }) + .join(' ') +} + +// automatic detect context objects and list them in logfmt style +function composeContextString(data: Object): string { + return Object.entries(data) + .map(([key, value]) => { + return `${key}=${value} ` + }) + .join(' ') + .trimEnd() +} + +// check if option is enabled, either if option is them self a boolean or a valid log level and <= eventLogLevel +function isEnabledByLogLevel(eventLogLevel: Level, targetLogLevel?: LogLevel | boolean): boolean { + if (!targetLogLevel) { + return false + } + if (typeof targetLogLevel === 'boolean') { + return targetLogLevel + } + return eventLogLevel.isGreaterThanOrEqualTo(targetLogLevel) +} + +enum DetailKind { + Callstack = 'callstack', + File = 'file', + Line = 'line', +} +function resolveDetailKind( + logEvent: LoggingEvent, + config: ColoredContextLayoutConfig, +): DetailKind | undefined { + if (logEvent.callStack && isEnabledByLogLevel(logEvent.level, config.withStack)) { + return DetailKind.Callstack + } + if (isEnabledByLogLevel(logEvent.level, config.withFile)) { + return DetailKind.File + } + if (isEnabledByLogLevel(logEvent.level, config.withLine)) { + return DetailKind.Line + } + return undefined +} + +export function createColoredContextLayout(config: ColoredContextLayoutConfig) { + return (logEvent: LoggingEvent) => { + const result: string[] = [] + const detailKind = resolveDetailKind(logEvent, config) + let categoryName = logEvent.categoryName + if (detailKind === DetailKind.Line) { + categoryName += `:${logEvent.lineNumber}` + } + result.push( + colorize( + `[${logEvent.startTime.toISOString()}] [${logEvent.level}] ${categoryName} -`, + logEvent.level, + ), + ) + if (Object.keys(logEvent.context).length > 0) { + result.push(composeContextString(logEvent.context)) + } + result.push(composeDataString(logEvent.data)) + + if (detailKind === DetailKind.File) { + result.push(`\n at ${logEvent.fileName}:${logEvent.lineNumber}`) + } + if (detailKind === DetailKind.Callstack) { + result.push(`\n${logEvent.callStack}`) + } + return result.join(' ') + } +} diff --git a/config-schema/src/log4js-config/index.test.ts b/config-schema/src/log4js-config/index.test.ts new file mode 100644 index 000000000..fbc722c0a --- /dev/null +++ b/config-schema/src/log4js-config/index.test.ts @@ -0,0 +1,26 @@ +import { createLog4jsConfig, defaultCategory } from '.' + +describe('createLog4jsConfig', () => { + it('should create a log4js config', () => { + const config = createLog4jsConfig([defaultCategory('test', 'debug')]) + expect(config).toBeDefined() + expect(config.appenders).toBeDefined() + expect(config.categories).toBeDefined() + expect(config.appenders).toHaveProperty('test') + expect(config.categories).toHaveProperty('test') + expect(config.appenders.test).toMatchObject({ + type: 'dateFile', + pattern: 'yyyy-MM-dd', + compress: true, + keepFileExt: true, + fileNameSep: '_', + numBackups: 30, + filename: 'test.log', + layout: { + type: 'coloredContext', + withStack: 'error', + withLine: true, + }, + }) + }) +}) diff --git a/config-schema/src/log4js-config/index.ts b/config-schema/src/log4js-config/index.ts new file mode 100644 index 000000000..94721d384 --- /dev/null +++ b/config-schema/src/log4js-config/index.ts @@ -0,0 +1,81 @@ +import { readFileSync, writeFileSync } from 'node:fs' +import { Configuration, LoggingEvent, addLayout, configure } from 'log4js' +import { createAppenderConfig } from './appenders' +import { createColoredContextLayout } from './coloredContext' +import type { Category, CustomFileAppender, LogLevel } from './types' +import { defaultCategory } from './types' + +export type { Category, LogLevel } +export { defaultCategory } + +/** + * Creates the log4js configuration. + * + * @param {Category[]} categories - the categories to add to the configuration + * @param {string} [basePath] - the base path for log files + * @returns {Configuration} the log4js configuration + */ + +addLayout('json', function () { + return function (logEvent: LoggingEvent) { + return JSON.stringify(logEvent) + } +}) + +addLayout('coloredContext', createColoredContextLayout) + +export function createLog4jsConfig(categories: Category[], basePath?: string): Configuration { + const customFileAppenders: CustomFileAppender[] = [] + const result: Configuration = { + appenders: {}, + categories: {}, + } + + categories.forEach((category: Category) => { + customFileAppenders.push({ + name: category.name, + filename: category.filename, + layout: category.layout, + }) + // needed by log4js, show all error message accidentally without (proper) Category + result.categories.default = { + level: 'debug', + appenders: ['out', 'errors'], + enableCallStack: true, + } + const appenders = [category.name, 'out'] + if (category.additionalErrorsFile) { + appenders.push('errors') + } + result.categories[category.name] = { + level: category.level, + appenders, + enableCallStack: true, + } + }) + + result.appenders = createAppenderConfig(customFileAppenders, basePath) + return result +} + +/** + * Initializes the logger. + * + * @param {Category[]} categories - the categories to add to the configuration + * @param {string} logFilesPath - the base path for log files + * @param {string} [log4jsConfigFileName] - the name of the log4js config file + */ +export function initLogger( + categories: Category[], + logFilesPath: string, + log4jsConfigFileName: string = 'log4js-config.json', +): void { + // if not log4js config file exists, create a default one + try { + configure(JSON.parse(readFileSync(log4jsConfigFileName, 'utf-8'))) + } catch (_e) { + const options = createLog4jsConfig(categories, logFilesPath) + writeFileSync(log4jsConfigFileName, JSON.stringify(options, null, 2), { encoding: 'utf-8' }) + configure(options) + } +} diff --git a/config-schema/src/log4js-config/types/Category.ts b/config-schema/src/log4js-config/types/Category.ts new file mode 100644 index 000000000..3be4e24b3 --- /dev/null +++ b/config-schema/src/log4js-config/types/Category.ts @@ -0,0 +1,35 @@ +import { ColoredContextLayoutConfig } from './ColoredContextLayoutConfig' +import { LogLevel } from './LogLevel' + +/** + * Configuration for a log4js category. + * + * @property {string} name - The name of the category. + * @property {string} [filename] - The filename for the category, use name if not set. + * @property {boolean} [stdout] - Whether to log to stdout. + * @property {boolean} [additionalErrorsFile] - Whether to log errors additional to the default error file. + * @property {LogLevel} level - The logging level. + * @property {ColoredContextLayoutConfig} [layout] - The layout for the category. + */ +export type Category = { + name: string + filename?: string + stdout?: boolean + additionalErrorsFile?: boolean + level: LogLevel + layout?: ColoredContextLayoutConfig +} + +export function defaultCategory(name: string, level: LogLevel): Category { + return { + name, + level, + stdout: true, + additionalErrorsFile: true, + layout: { + withStack: 'error', + withFile: 'warn', + withLine: true, + }, + } +} diff --git a/config-schema/src/log4js-config/types/ColoredContextLayoutConfig.ts b/config-schema/src/log4js-config/types/ColoredContextLayoutConfig.ts new file mode 100644 index 000000000..1a01f666f --- /dev/null +++ b/config-schema/src/log4js-config/types/ColoredContextLayoutConfig.ts @@ -0,0 +1,7 @@ +import { LogLevel } from './LogLevel' + +export type ColoredContextLayoutConfig = { + withStack?: LogLevel | boolean + withFile?: LogLevel | boolean + withLine?: LogLevel | boolean +} diff --git a/config-schema/src/log4js-config/types/CustomFileAppender.ts b/config-schema/src/log4js-config/types/CustomFileAppender.ts new file mode 100644 index 000000000..eab3250a3 --- /dev/null +++ b/config-schema/src/log4js-config/types/CustomFileAppender.ts @@ -0,0 +1,34 @@ +import { ColoredContextLayoutConfig } from './ColoredContextLayoutConfig' +import { LogLevel } from './LogLevel' +/** + * use default dateFile Template for custom file appenders + * + * @example use name for key and filename, add .log to name + * ``` + * const appenderConfig = createAppenderConfig([ + * { name: 'info' }, + * ]) + * ``` + * + * @example if log file should contain the stacktrace + * ``` + * const appenderConfig = createAppenderConfig([ + * { name: 'warn', filename: 'warn.log', withStack: true }, + * ]) + * ``` + * + * @example if log file should contain the stacktrace only from log level debug and higher + * ``` + * const appenderConfig = createAppenderConfig([ + * { name: 'warn', filename: 'warn.log', withStack: 'debug' }, + * ]) + * ``` + * if stack is shown, no file and no line is shown, because it is already in the stack trace + * if file:line is shown, no extra line is shown + * line will be shown after category name:line + */ +export type CustomFileAppender = { + name: string + filename?: string + layout?: ColoredContextLayoutConfig +} diff --git a/config-schema/src/log4js-config/types/LogLevel.ts b/config-schema/src/log4js-config/types/LogLevel.ts new file mode 100644 index 000000000..d669c0c96 --- /dev/null +++ b/config-schema/src/log4js-config/types/LogLevel.ts @@ -0,0 +1,15 @@ +import { z } from 'zod' + +export const LOG_LEVEL = z.enum([ + 'all', + 'mark', + 'trace', + 'debug', + 'info', + 'warn', + 'error', + 'fatal', + 'off', +]) + +export type LogLevel = z.infer diff --git a/config-schema/src/log4js-config/types/index.ts b/config-schema/src/log4js-config/types/index.ts new file mode 100644 index 000000000..67279ff57 --- /dev/null +++ b/config-schema/src/log4js-config/types/index.ts @@ -0,0 +1,4 @@ +export * from './Category' +export * from './CustomFileAppender' +export * from './LogLevel' +export * from './ColoredContextLayoutConfig' diff --git a/config-schema/src/validate.ts b/config-schema/src/validate.ts new file mode 100644 index 000000000..c566dfab7 --- /dev/null +++ b/config-schema/src/validate.ts @@ -0,0 +1,34 @@ +import { ObjectSchema } from 'joi' + +export function validate(schema: ObjectSchema, data: any) { + const { error } = schema.validate(data) + const schemaJson = schema.describe() + if (error) { + error.details.forEach((err) => { + const details = JSON.stringify(err, null, 2) + if (!err.context) { + throw new Error('missing context in config validation with joi: ' + details) + } + if (!schemaJson) { + throw new Error('invalid schema in config validation with joi: ' + details) + } + const key = err.context.key + if (key === undefined) { + throw new Error('missing key in config validation with joi: ' + details) + } + const value = err.context.value + const description = schemaJson.keys[key] + ? schema.describe().keys[key].flags.description + : 'No description available' + if (data[key] === undefined) { + throw new Error( + `Environment Variable '${key}' is missing. ${description}, details: ${details}`, + ) + } else { + throw new Error( + `Error on Environment Variable ${key} with value = ${value}: ${err.message}. ${description}`, + ) + } + }) + } +} diff --git a/config-schema/test/testSetup.ts b/config-schema/test/testSetup.ts new file mode 100644 index 000000000..ef471cc03 --- /dev/null +++ b/config-schema/test/testSetup.ts @@ -0,0 +1,106 @@ +/* + * This file is used to mock the log4js logger in the tests. + * It is used to collect all log entries in the logs array. + * If you want to debug your test, you can use `printLogs()` to print all log entries collected through the tests. + * To have only the relevant logs, call `clearLogs()` before your calling the methods you like to test and `printLogs()` after. + */ + +jest.setTimeout(1000000) + +type LogEntry = { + level: string; + message: string; + logger: string; + context: string; + additional: any[]; +} + +const loggers: { [key: string]: any } = {} +const logs: LogEntry[] = [] + +function addLog(level: string, message: string, logger: string, context: Map, additional: any[]) { + logs.push({ + level, + context: [...context.entries()].map(([key, value]) => `${key}=${value}`).join(' ').trimEnd(), + message, + logger, + additional + }) +} + +export function printLogs() { + for (const log of logs) { + const messages = [] + messages.push(log.message) + messages.push(log.additional.map((d) => { + if (typeof d === 'object' && d.toString() === '[object Object]') { + return JSON.stringify(d) + } + if (d) { + return d.toString() + } + }).filter((d) => d)) + process.stdout.write(`${log.logger} [${log.level}] ${log.context} ${messages.join(' ')}\n`) + } +} + +export function clearLogs(): void { + logs.length = 0 +} + +const getLoggerMocked = jest.fn().mockImplementation((param: any) => { + if (loggers[param]) { + // TODO: check if it is working when tests run in parallel + loggers[param].clearContext() + return loggers[param] + } + // console.log('getLogger called with: ', param) + const fakeLogger = { + context: new Map(), + addContext: jest.fn((key: string, value: string) => { + fakeLogger.context.set(key, value) + }), + trace: jest.fn((message: string, ...args: any[]) => { + addLog('trace', message, param, fakeLogger.context, args) + }), + debug: jest.fn((message: string, ...args: any[]) => { + addLog('debug', message, param, fakeLogger.context, args) + }), + warn: jest.fn((message: string, ...args: any[]) => { + addLog('warn', message, param, fakeLogger.context, args) + }), + info: jest.fn((message: string, ...args: any[]) => { + addLog('info', message, param, fakeLogger.context, args) + }), + error: jest.fn((message: string, ...args: any[]) => { + addLog('error', message, param, fakeLogger.context, args) + }), + fatal: jest.fn((message: string, ...args: any[]) => { + addLog('fatal', message, param, fakeLogger.context, args) + }), + removeContext: jest.fn((key: string) => { + fakeLogger.context.delete(key) + }), + clearContext: jest.fn(() => { + fakeLogger.context.clear() + }) + } + loggers[param] = fakeLogger + return fakeLogger +}) + +jest.mock('log4js', () => { + const originalModule = jest.requireActual('log4js') + return { + __esModule: true, + ...originalModule, + getLogger: getLoggerMocked + } +}) + +export function getLogger(name: string) { + if (!loggers[name]) { + return getLoggerMocked(name) + } + return loggers[name] +} diff --git a/config-schema/tsconfig.json b/config-schema/tsconfig.json index 04306edad..d65460927 100644 --- a/config-schema/tsconfig.json +++ b/config-schema/tsconfig.json @@ -1,7 +1,6 @@ { "compilerOptions": { /* Visit https://aka.ms/tsconfig.json to read more about this file */ - /* Basic Options */ // "incremental": true, /* Enable incremental compilation */ "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */ @@ -69,5 +68,6 @@ "skipLibCheck": true, /* Skip type checking of declaration files. */ "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ }, - "references": [] /* Any project that is referenced must itself have a `references` array (which may be empty). */ + "references": [], /* Any project that is referenced must itself have a `references` array (which may be empty). */ + "exclude": ["**/*.test.ts", "**/*.spec.ts", "test/*"], } diff --git a/database/biome.json b/database/biome.json index 786069e9b..d27c2426d 100644 --- a/database/biome.json +++ b/database/biome.json @@ -1,10 +1,18 @@ { - "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", + "root": false, + "$schema": "https://biomejs.dev/schemas/2.0.0/schema.json", "vcs": { "enabled": false, "clientKind": "git", "useIgnoreFile": false }, "files": { "ignoreUnknown": false, - "ignore": ["build", "node_modules", "coverage"], - "include": ["./src/**/*.ts", "./entity/**/*.ts", "./logging/**/*.ts", "./migrations/**/*.ts"] + "includes": [ + "src/**/*.ts", + "entity/**/*.ts", + "logging/**/*.ts", + "migrations/**/*.ts", + "!**/build", + "!**/node_modules", + "!**/coverage" + ] }, "formatter": { "enabled": true, @@ -17,14 +25,13 @@ "attributePosition": "auto", "bracketSpacing": true }, - "organizeImports": { "enabled": true }, + "assist": { "actions": { "source": { "organizeImports": "on" } } }, "linter": { "enabled": true, "rules": { "recommended": false, "complexity": { "noExtraBooleanCast": "error", - "noMultipleSpacesInRegularExpressionLiterals": "error", "noUselessCatch": "error", "noUselessConstructor": "error", "noUselessLoneBlockStatements": "error", @@ -32,10 +39,11 @@ "noUselessTernary": "error", "noUselessUndefinedInitialization": "error", "noVoid": "error", - "noWith": "error", "useArrowFunction": "off", "useLiteralKeys": "error", - "useRegexLiterals": "error" + "useRegexLiterals": "error", + "noAdjacentSpacesInRegex": "error", + "noCommaOperator": "error" }, "correctness": { "noConstAssign": "error", @@ -46,7 +54,6 @@ "noInnerDeclarations": "error", "noInvalidConstructorSuper": "error", "noInvalidUseBeforeDeclaration": "error", - "noNewSymbol": "error", "noNodejsModules": "off", "noNonoctalDecimalEscape": "error", "noPrecisionLoss": "error", @@ -60,22 +67,22 @@ "noUnsafeOptionalChaining": "error", "noUnusedLabels": "error", "noUnusedVariables": "error", - "useArrayLiterals": "error", "useIsNan": "error", "useValidForDirection": "error", - "useYield": "error" + "useYield": "error", + "noInvalidBuiltinInstantiation": "error", + "useValidTypeof": "error" }, "security": { "noGlobalEval": "error" }, "style": { - "noCommaOperator": "error", "noDefaultExport": "error", - "noVar": "warn", "noYodaExpression": "error", "useBlockStatements": "error", "useConsistentBuiltinInstantiation": "error", "useConst": "error", "useSingleVarDeclarator": "error", - "useThrowOnlyError": "error" + "useThrowOnlyError": "error", + "useArrayLiterals": "error" }, "suspicious": { "noAssignInExpressions": "error", @@ -106,10 +113,11 @@ "noUnsafeNegation": "error", "useDefaultSwitchClauseLast": "error", "useGetterReturn": "error", - "useValidTypeof": "error" + "noWith": "error", + "noVar": "warn" } }, - "ignore": ["**/node_modules", "**/*.min.js", "**/build"] + "includes": ["**", "!**/node_modules", "!**/*.min.js", "!**/build"] }, "javascript": { "formatter": { @@ -127,7 +135,7 @@ }, "overrides": [ { - "include": ["*.ts", "*.tsx"], + "includes": ["**/*.ts", "**/*.tsx"], "linter": { "rules": { "complexity": { "noVoid": "error" } } } } ] diff --git a/database/esbuild.config.ts b/database/esbuild.config.ts index ebdb10e2a..078228f7d 100644 --- a/database/esbuild.config.ts +++ b/database/esbuild.config.ts @@ -1,14 +1,15 @@ import { build } from 'esbuild' import fs from 'node:fs' -import { latestDbVersion } from './src/config/detectLastDBVersion' +import { latestDbVersion } from './src/detectLastDBVersion' build({ - entryPoints: ['entity/index.ts'], + entryPoints: ['src/index.ts'], bundle: true, target: 'node18.20.7', platform: 'node', packages: 'external', outdir: './build', + sourcemap: true, plugins: [ { // hardcode last db version string into index.ts, before parsing diff --git a/database/src/clear.ts b/database/migration/clear.ts similarity index 91% rename from database/src/clear.ts rename to database/migration/clear.ts index e2999bbf9..a78d35890 100644 --- a/database/src/clear.ts +++ b/database/migration/clear.ts @@ -1,6 +1,7 @@ import { Connection } from 'mysql2/promise' -import { CONFIG } from './config' +import { CONFIG } from '../src/config' import { connectToDatabaseServer } from './prepare' +import { MIGRATIONS_TABLE } from '../src/config/const' export async function truncateTables(connection: Connection) { const [tables] = await connection.query('SHOW TABLES') @@ -16,7 +17,7 @@ export async function truncateTables(connection: Connection) { // Truncating all tables... for (const tableName of tableNames) { - if (tableName === CONFIG.MIGRATIONS_TABLE) { + if (tableName === MIGRATIONS_TABLE) { continue } await connection.query(`TRUNCATE TABLE \`${tableName}\``) diff --git a/database/migration/index.ts b/database/migration/index.ts new file mode 100644 index 000000000..2c84fb594 --- /dev/null +++ b/database/migration/index.ts @@ -0,0 +1,103 @@ +import { CONFIG } from '../src/config' +import { DatabaseState, getDatabaseState } from './prepare' + +import path from 'node:path' +import { createPool } from 'mysql' +import { Migration } from 'ts-mysql-migrate' +import { clearDatabase } from './clear' +import { latestDbVersion } from '../src/detectLastDBVersion' +import { MIGRATIONS_TABLE } from '../src/config/const' + +const run = async (command: string) => { + if (command === 'clear') { + if (CONFIG.NODE_ENV === 'production') { + throw new Error('Clearing database in production is not allowed') + } + await clearDatabase() + return + } + // Database actions not supported by our migration library + // await createDatabase() + const state = await getDatabaseState() + if (state === DatabaseState.NOT_CONNECTED) { + throw new Error( + `Database not connected, is database server running? + host: ${CONFIG.DB_HOST} + port: ${CONFIG.DB_PORT} + user: ${CONFIG.DB_USER} + password: ${CONFIG.DB_PASSWORD.slice(-2)} + database: ${CONFIG.DB_DATABASE}`, + ) + } + if (state === DatabaseState.HIGHER_VERSION) { + throw new Error('Database version is higher than required, please switch to the correct branch') + } + if (state === DatabaseState.SAME_VERSION) { + if (command === 'up') { + // biome-ignore lint/suspicious/noConsole: no logger present + console.log('Database is up to date') + return + } + } + // Initialize Migrations + const pool = createPool({ + host: CONFIG.DB_HOST, + port: CONFIG.DB_PORT, + user: CONFIG.DB_USER, + password: CONFIG.DB_PASSWORD, + database: CONFIG.DB_DATABASE, + }) + const migration = new Migration({ + conn: pool, + tableName: MIGRATIONS_TABLE, + silent: true, + dir: path.join(__dirname, 'migrations'), + }) + await migration.initialize() + + // Execute command + switch (command) { + case 'up': + await migration.up() // use for upgrade script + break + case 'down': + await migration.down() // use for downgrade script + break + case 'reset': + if (CONFIG.NODE_ENV === 'production') { + throw new Error('Resetting database in production is not allowed') + } + await migration.reset() + break + default: + throw new Error(`Unsupported command ${command}`) + } + if (command === 'reset') { + // biome-ignore lint/suspicious/noConsole: no logger present + console.log('Database was reset') + } else { + const currentDbVersion = await migration.getLastVersion() + // biome-ignore lint/suspicious/noConsole: no logger present + console.log(`Database was ${command} migrated to version: ${currentDbVersion.fileName}`) + if (latestDbVersion === currentDbVersion.fileName.split('.')[0]) { + // biome-ignore lint/suspicious/noConsole: no logger present + console.log('Database is now up to date') + } else { + // biome-ignore lint/suspicious/noConsole: no logger present + console.log('The latest database version is: ', latestDbVersion) + } + } + + // Terminate connections gracefully + pool.end() +} + +run(process.argv[2]) + .catch((err) => { + // biome-ignore lint/suspicious/noConsole: no logger present + console.log(err) + process.exit(1) + }) + .then(() => { + process.exit() + }) diff --git a/database/migrations/0001-init_db.ts b/database/migration/migrations/0001-init_db.ts similarity index 100% rename from database/migrations/0001-init_db.ts rename to database/migration/migrations/0001-init_db.ts diff --git a/database/migrations/0002-add_settings.ts b/database/migration/migrations/0002-add_settings.ts similarity index 100% rename from database/migrations/0002-add_settings.ts rename to database/migration/migrations/0002-add_settings.ts diff --git a/database/migrations/0003-login_server_tables.ts b/database/migration/migrations/0003-login_server_tables.ts similarity index 100% rename from database/migrations/0003-login_server_tables.ts rename to database/migration/migrations/0003-login_server_tables.ts diff --git a/database/migrations/0004-login_server_data.ts b/database/migration/migrations/0004-login_server_data.ts similarity index 100% rename from database/migrations/0004-login_server_data.ts rename to database/migration/migrations/0004-login_server_data.ts diff --git a/database/migrations/0005-admin_tables.ts b/database/migration/migrations/0005-admin_tables.ts similarity index 100% rename from database/migrations/0005-admin_tables.ts rename to database/migration/migrations/0005-admin_tables.ts diff --git a/database/migrations/0006-login_users_collation.ts b/database/migration/migrations/0006-login_users_collation.ts similarity index 100% rename from database/migrations/0006-login_users_collation.ts rename to database/migration/migrations/0006-login_users_collation.ts diff --git a/database/migrations/0007-login_pending_tasks_delete.ts b/database/migration/migrations/0007-login_pending_tasks_delete.ts similarity index 100% rename from database/migrations/0007-login_pending_tasks_delete.ts rename to database/migration/migrations/0007-login_pending_tasks_delete.ts diff --git a/database/migrations/0008-state_users_plug_holes.ts b/database/migration/migrations/0008-state_users_plug_holes.ts similarity index 100% rename from database/migrations/0008-state_users_plug_holes.ts rename to database/migration/migrations/0008-state_users_plug_holes.ts diff --git a/database/migrations/0009-login_users_plug_holes.ts b/database/migration/migrations/0009-login_users_plug_holes.ts similarity index 100% rename from database/migrations/0009-login_users_plug_holes.ts rename to database/migration/migrations/0009-login_users_plug_holes.ts diff --git a/database/migrations/0010-login_users_state_users_sync.ts b/database/migration/migrations/0010-login_users_state_users_sync.ts similarity index 100% rename from database/migrations/0010-login_users_state_users_sync.ts rename to database/migration/migrations/0010-login_users_state_users_sync.ts diff --git a/database/migrations/0011-login_user_backups_plug_holes.ts b/database/migration/migrations/0011-login_user_backups_plug_holes.ts similarity index 100% rename from database/migrations/0011-login_user_backups_plug_holes.ts rename to database/migration/migrations/0011-login_user_backups_plug_holes.ts diff --git a/database/migrations/0012-login_user_backups_unify_wordlist.ts b/database/migration/migrations/0012-login_user_backups_unify_wordlist.ts similarity index 88% rename from database/migrations/0012-login_user_backups_unify_wordlist.ts rename to database/migration/migrations/0012-login_user_backups_unify_wordlist.ts index ad8244f66..914197baf 100644 --- a/database/migrations/0012-login_user_backups_unify_wordlist.ts +++ b/database/migration/migrations/0012-login_user_backups_unify_wordlist.ts @@ -11,15 +11,15 @@ import path from 'path' const TARGET_MNEMONIC_TYPE = 2 const PHRASE_WORD_COUNT = 24 const WORDS_MNEMONIC_0 = fs - .readFileSync(path.resolve(__dirname, '../src/config/mnemonic.uncompressed_buffer18112.txt')) + .readFileSync(path.resolve(__dirname, '../../src/config/mnemonic.uncompressed_buffer18112.txt')) .toString() .split(',') const WORDS_MNEMONIC_1 = fs - .readFileSync(path.resolve(__dirname, '../src/config/mnemonic.uncompressed_buffer18113.txt')) + .readFileSync(path.resolve(__dirname, '../../src/config/mnemonic.uncompressed_buffer18113.txt')) .toString() .split(',') const WORDS_MNEMONIC_2 = fs - .readFileSync(path.resolve(__dirname, '../src/config/mnemonic.uncompressed_buffer13116.txt')) + .readFileSync(path.resolve(__dirname, '../../src/config/mnemonic.uncompressed_buffer13116.txt')) .toString() .split(',') const WORDS_MNEMONIC = [WORDS_MNEMONIC_0, WORDS_MNEMONIC_1, WORDS_MNEMONIC_2] diff --git a/database/migrations/0013-drop_unused_tables.ts b/database/migration/migrations/0013-drop_unused_tables.ts similarity index 100% rename from database/migrations/0013-drop_unused_tables.ts rename to database/migration/migrations/0013-drop_unused_tables.ts diff --git a/database/migrations/0014-drop_unused_tables_with_data.ts b/database/migration/migrations/0014-drop_unused_tables_with_data.ts similarity index 100% rename from database/migrations/0014-drop_unused_tables_with_data.ts rename to database/migration/migrations/0014-drop_unused_tables_with_data.ts diff --git a/database/migrations/0015-admin_pending_creations.ts b/database/migration/migrations/0015-admin_pending_creations.ts similarity index 100% rename from database/migrations/0015-admin_pending_creations.ts rename to database/migration/migrations/0015-admin_pending_creations.ts diff --git a/database/migrations/0016-transaction_signatures.ts b/database/migration/migrations/0016-transaction_signatures.ts similarity index 100% rename from database/migrations/0016-transaction_signatures.ts rename to database/migration/migrations/0016-transaction_signatures.ts diff --git a/database/migrations/0017-combine_user_tables.ts b/database/migration/migrations/0017-combine_user_tables.ts similarity index 100% rename from database/migrations/0017-combine_user_tables.ts rename to database/migration/migrations/0017-combine_user_tables.ts diff --git a/database/migrations/0018-combine_login_user_backups_and_user_table.ts b/database/migration/migrations/0018-combine_login_user_backups_and_user_table.ts similarity index 100% rename from database/migrations/0018-combine_login_user_backups_and_user_table.ts rename to database/migration/migrations/0018-combine_login_user_backups_and_user_table.ts diff --git a/database/migrations/0019-replace_login_user_id_with_state_user_id.ts b/database/migration/migrations/0019-replace_login_user_id_with_state_user_id.ts similarity index 100% rename from database/migrations/0019-replace_login_user_id_with_state_user_id.ts rename to database/migration/migrations/0019-replace_login_user_id_with_state_user_id.ts diff --git a/database/migrations/0020-rename_and_clean_state_users.ts b/database/migration/migrations/0020-rename_and_clean_state_users.ts similarity index 100% rename from database/migrations/0020-rename_and_clean_state_users.ts rename to database/migration/migrations/0020-rename_and_clean_state_users.ts diff --git a/database/migrations/0021-elopagebuys_fields_nullable.ts b/database/migration/migrations/0021-elopagebuys_fields_nullable.ts similarity index 100% rename from database/migrations/0021-elopagebuys_fields_nullable.ts rename to database/migration/migrations/0021-elopagebuys_fields_nullable.ts diff --git a/database/migrations/0022-delete_decay_start_block.ts b/database/migration/migrations/0022-delete_decay_start_block.ts similarity index 100% rename from database/migrations/0022-delete_decay_start_block.ts rename to database/migration/migrations/0022-delete_decay_start_block.ts diff --git a/database/migrations/0023-users_disabled_soft_delete.ts b/database/migration/migrations/0023-users_disabled_soft_delete.ts similarity index 100% rename from database/migrations/0023-users_disabled_soft_delete.ts rename to database/migration/migrations/0023-users_disabled_soft_delete.ts diff --git a/database/migrations/0024-combine_transaction_tables.ts b/database/migration/migrations/0024-combine_transaction_tables.ts similarity index 100% rename from database/migrations/0024-combine_transaction_tables.ts rename to database/migration/migrations/0024-combine_transaction_tables.ts diff --git a/database/migrations/0025-emails_to_lower.ts b/database/migration/migrations/0025-emails_to_lower.ts similarity index 100% rename from database/migrations/0025-emails_to_lower.ts rename to database/migration/migrations/0025-emails_to_lower.ts diff --git a/database/migrations/0026-combine_transaction_tables2.ts b/database/migration/migrations/0026-combine_transaction_tables2.ts similarity index 100% rename from database/migrations/0026-combine_transaction_tables2.ts rename to database/migration/migrations/0026-combine_transaction_tables2.ts diff --git a/database/migrations/0027-clean_transaction_table.ts b/database/migration/migrations/0027-clean_transaction_table.ts similarity index 100% rename from database/migrations/0027-clean_transaction_table.ts rename to database/migration/migrations/0027-clean_transaction_table.ts diff --git a/database/migrations/0028-decimal_types.ts b/database/migration/migrations/0028-decimal_types.ts similarity index 100% rename from database/migrations/0028-decimal_types.ts rename to database/migration/migrations/0028-decimal_types.ts diff --git a/database/migrations/0029-clean_transaction_table.ts b/database/migration/migrations/0029-clean_transaction_table.ts similarity index 100% rename from database/migrations/0029-clean_transaction_table.ts rename to database/migration/migrations/0029-clean_transaction_table.ts diff --git a/database/migrations/0030-transaction_link.ts b/database/migration/migrations/0030-transaction_link.ts similarity index 100% rename from database/migrations/0030-transaction_link.ts rename to database/migration/migrations/0030-transaction_link.ts diff --git a/database/migrations/0031-remove_sendEmail_from_transaction_link.ts b/database/migration/migrations/0031-remove_sendEmail_from_transaction_link.ts similarity index 100% rename from database/migrations/0031-remove_sendEmail_from_transaction_link.ts rename to database/migration/migrations/0031-remove_sendEmail_from_transaction_link.ts diff --git a/database/migrations/0032-add-transaction-link-to-transaction.ts b/database/migration/migrations/0032-add-transaction-link-to-transaction.ts similarity index 100% rename from database/migrations/0032-add-transaction-link-to-transaction.ts rename to database/migration/migrations/0032-add-transaction-link-to-transaction.ts diff --git a/database/migrations/0033-add_referrer_id.ts b/database/migration/migrations/0033-add_referrer_id.ts similarity index 100% rename from database/migrations/0033-add_referrer_id.ts rename to database/migration/migrations/0033-add_referrer_id.ts diff --git a/database/migrations/0034-drop_server_user_table.ts b/database/migration/migrations/0034-drop_server_user_table.ts similarity index 100% rename from database/migrations/0034-drop_server_user_table.ts rename to database/migration/migrations/0034-drop_server_user_table.ts diff --git a/database/migrations/0035-admin_pending_creations_decimal.ts b/database/migration/migrations/0035-admin_pending_creations_decimal.ts similarity index 100% rename from database/migrations/0035-admin_pending_creations_decimal.ts rename to database/migration/migrations/0035-admin_pending_creations_decimal.ts diff --git a/database/migrations/0036-unique_previous_in_transactions.ts b/database/migration/migrations/0036-unique_previous_in_transactions.ts similarity index 100% rename from database/migrations/0036-unique_previous_in_transactions.ts rename to database/migration/migrations/0036-unique_previous_in_transactions.ts diff --git a/database/migrations/0037-drop_user_setting_table.ts b/database/migration/migrations/0037-drop_user_setting_table.ts similarity index 100% rename from database/migrations/0037-drop_user_setting_table.ts rename to database/migration/migrations/0037-drop_user_setting_table.ts diff --git a/database/migrations/0038-add_contribution_links_table.ts b/database/migration/migrations/0038-add_contribution_links_table.ts similarity index 100% rename from database/migrations/0038-add_contribution_links_table.ts rename to database/migration/migrations/0038-add_contribution_links_table.ts diff --git a/database/migrations/0039-contributions_table.ts b/database/migration/migrations/0039-contributions_table.ts similarity index 100% rename from database/migrations/0039-contributions_table.ts rename to database/migration/migrations/0039-contributions_table.ts diff --git a/database/migrations/0040-add_contribution_link_id_to_user.ts b/database/migration/migrations/0040-add_contribution_link_id_to_user.ts similarity index 100% rename from database/migrations/0040-add_contribution_link_id_to_user.ts rename to database/migration/migrations/0040-add_contribution_link_id_to_user.ts diff --git a/database/migrations/0041-move_users_creation_date.ts b/database/migration/migrations/0041-move_users_creation_date.ts similarity index 100% rename from database/migrations/0041-move_users_creation_date.ts rename to database/migration/migrations/0041-move_users_creation_date.ts diff --git a/database/migrations/0042-update_transactions_for_blockchain.ts b/database/migration/migrations/0042-update_transactions_for_blockchain.ts similarity index 100% rename from database/migrations/0042-update_transactions_for_blockchain.ts rename to database/migration/migrations/0042-update_transactions_for_blockchain.ts diff --git a/database/migrations/0043-add_event_protocol_table.ts b/database/migration/migrations/0043-add_event_protocol_table.ts similarity index 100% rename from database/migrations/0043-add_event_protocol_table.ts rename to database/migration/migrations/0043-add_event_protocol_table.ts diff --git a/database/migrations/0044-insert_missing_contributions.ts b/database/migration/migrations/0044-insert_missing_contributions.ts similarity index 100% rename from database/migrations/0044-insert_missing_contributions.ts rename to database/migration/migrations/0044-insert_missing_contributions.ts diff --git a/database/migrations/0045-add_denied_type_and_status_to_contributions.ts b/database/migration/migrations/0045-add_denied_type_and_status_to_contributions.ts similarity index 100% rename from database/migrations/0045-add_denied_type_and_status_to_contributions.ts rename to database/migration/migrations/0045-add_denied_type_and_status_to_contributions.ts diff --git a/database/migrations/0046-adapt_users_table_for_gradidoid.ts b/database/migration/migrations/0046-adapt_users_table_for_gradidoid.ts similarity index 100% rename from database/migrations/0046-adapt_users_table_for_gradidoid.ts rename to database/migration/migrations/0046-adapt_users_table_for_gradidoid.ts diff --git a/database/migrations/0047-messages_tables.ts b/database/migration/migrations/0047-messages_tables.ts similarity index 100% rename from database/migrations/0047-messages_tables.ts rename to database/migration/migrations/0047-messages_tables.ts diff --git a/database/migrations/0048-add_is_moderator_to_contribution_messages.ts b/database/migration/migrations/0048-add_is_moderator_to_contribution_messages.ts similarity index 100% rename from database/migrations/0048-add_is_moderator_to_contribution_messages.ts rename to database/migration/migrations/0048-add_is_moderator_to_contribution_messages.ts diff --git a/database/migrations/0049-add_user_contacts_table.ts b/database/migration/migrations/0049-add_user_contacts_table.ts similarity index 100% rename from database/migrations/0049-add_user_contacts_table.ts rename to database/migration/migrations/0049-add_user_contacts_table.ts diff --git a/database/migrations/0050-add_messageId_to_event_protocol.ts b/database/migration/migrations/0050-add_messageId_to_event_protocol.ts similarity index 100% rename from database/migrations/0050-add_messageId_to_event_protocol.ts rename to database/migration/migrations/0050-add_messageId_to_event_protocol.ts diff --git a/database/migrations/0051-add_delete_by_to_contributions.ts b/database/migration/migrations/0051-add_delete_by_to_contributions.ts similarity index 100% rename from database/migrations/0051-add_delete_by_to_contributions.ts rename to database/migration/migrations/0051-add_delete_by_to_contributions.ts diff --git a/database/migrations/0052-add_updated_at_to_contributions.ts b/database/migration/migrations/0052-add_updated_at_to_contributions.ts similarity index 100% rename from database/migrations/0052-add_updated_at_to_contributions.ts rename to database/migration/migrations/0052-add_updated_at_to_contributions.ts diff --git a/database/migrations/0053-change_password_encryption.ts b/database/migration/migrations/0053-change_password_encryption.ts similarity index 100% rename from database/migrations/0053-change_password_encryption.ts rename to database/migration/migrations/0053-change_password_encryption.ts diff --git a/database/migrations/0054-recalculate_balance_and_decay.ts b/database/migration/migrations/0054-recalculate_balance_and_decay.ts similarity index 100% rename from database/migrations/0054-recalculate_balance_and_decay.ts rename to database/migration/migrations/0054-recalculate_balance_and_decay.ts diff --git a/database/migrations/0055-consistent_deleted_users.ts b/database/migration/migrations/0055-consistent_deleted_users.ts similarity index 100% rename from database/migrations/0055-consistent_deleted_users.ts rename to database/migration/migrations/0055-consistent_deleted_users.ts diff --git a/database/migrations/0056-consistent_transactions_table.ts b/database/migration/migrations/0056-consistent_transactions_table.ts similarity index 100% rename from database/migrations/0056-consistent_transactions_table.ts rename to database/migration/migrations/0056-consistent_transactions_table.ts diff --git a/database/migrations/0057-clear_old_password_junk.ts b/database/migration/migrations/0057-clear_old_password_junk.ts similarity index 100% rename from database/migrations/0057-clear_old_password_junk.ts rename to database/migration/migrations/0057-clear_old_password_junk.ts diff --git a/database/migrations/0058-add_communities_table.ts b/database/migration/migrations/0058-add_communities_table.ts similarity index 100% rename from database/migrations/0058-add_communities_table.ts rename to database/migration/migrations/0058-add_communities_table.ts diff --git a/database/migrations/0059-add_hide_amount_to_users.ts b/database/migration/migrations/0059-add_hide_amount_to_users.ts similarity index 100% rename from database/migrations/0059-add_hide_amount_to_users.ts rename to database/migration/migrations/0059-add_hide_amount_to_users.ts diff --git a/database/migrations/0060-update_communities_table.ts b/database/migration/migrations/0060-update_communities_table.ts similarity index 100% rename from database/migrations/0060-update_communities_table.ts rename to database/migration/migrations/0060-update_communities_table.ts diff --git a/database/migrations/0061-event_refactoring.ts b/database/migration/migrations/0061-event_refactoring.ts similarity index 100% rename from database/migrations/0061-event_refactoring.ts rename to database/migration/migrations/0061-event_refactoring.ts diff --git a/database/migrations/0062-event_contribution_confirm.ts b/database/migration/migrations/0062-event_contribution_confirm.ts similarity index 100% rename from database/migrations/0062-event_contribution_confirm.ts rename to database/migration/migrations/0062-event_contribution_confirm.ts diff --git a/database/migrations/0063-event_link_fields.ts b/database/migration/migrations/0063-event_link_fields.ts similarity index 100% rename from database/migrations/0063-event_link_fields.ts rename to database/migration/migrations/0063-event_link_fields.ts diff --git a/database/migrations/0064-event_rename.ts b/database/migration/migrations/0064-event_rename.ts similarity index 100% rename from database/migrations/0064-event_rename.ts rename to database/migration/migrations/0064-event_rename.ts diff --git a/database/migrations/0065-refactor_communities_table.ts b/database/migration/migrations/0065-refactor_communities_table.ts similarity index 100% rename from database/migrations/0065-refactor_communities_table.ts rename to database/migration/migrations/0065-refactor_communities_table.ts diff --git a/database/migrations/0066-x-community-sendcoins-transactions_table.ts b/database/migration/migrations/0066-x-community-sendcoins-transactions_table.ts similarity index 100% rename from database/migrations/0066-x-community-sendcoins-transactions_table.ts rename to database/migration/migrations/0066-x-community-sendcoins-transactions_table.ts diff --git a/database/migrations/0067-private_key_in_community_table.ts b/database/migration/migrations/0067-private_key_in_community_table.ts similarity index 100% rename from database/migrations/0067-private_key_in_community_table.ts rename to database/migration/migrations/0067-private_key_in_community_table.ts diff --git a/database/migrations/0068-community_tables_public_key_length.ts b/database/migration/migrations/0068-community_tables_public_key_length.ts similarity index 100% rename from database/migrations/0068-community_tables_public_key_length.ts rename to database/migration/migrations/0068-community_tables_public_key_length.ts diff --git a/database/migrations/0069-add_user_roles_table.ts b/database/migration/migrations/0069-add_user_roles_table.ts similarity index 100% rename from database/migrations/0069-add_user_roles_table.ts rename to database/migration/migrations/0069-add_user_roles_table.ts diff --git a/database/migrations/0070-add_dlt_transactions_table.ts b/database/migration/migrations/0070-add_dlt_transactions_table.ts similarity index 100% rename from database/migrations/0070-add_dlt_transactions_table.ts rename to database/migration/migrations/0070-add_dlt_transactions_table.ts diff --git a/database/migrations/0071-add-pending_transactions-table.ts b/database/migration/migrations/0071-add-pending_transactions-table.ts similarity index 100% rename from database/migrations/0071-add-pending_transactions-table.ts rename to database/migration/migrations/0071-add-pending_transactions-table.ts diff --git a/database/migrations/0072-add_communityuuid_to_transactions_table.ts b/database/migration/migrations/0072-add_communityuuid_to_transactions_table.ts similarity index 100% rename from database/migrations/0072-add_communityuuid_to_transactions_table.ts rename to database/migration/migrations/0072-add_communityuuid_to_transactions_table.ts diff --git a/database/migrations/0073-introduce_foreign_user_in_users_table.ts b/database/migration/migrations/0073-introduce_foreign_user_in_users_table.ts similarity index 100% rename from database/migrations/0073-introduce_foreign_user_in_users_table.ts rename to database/migration/migrations/0073-introduce_foreign_user_in_users_table.ts diff --git a/database/migrations/0074-insert_communityuuid in_existing_users.ts b/database/migration/migrations/0074-insert_communityuuid in_existing_users.ts similarity index 100% rename from database/migrations/0074-insert_communityuuid in_existing_users.ts rename to database/migration/migrations/0074-insert_communityuuid in_existing_users.ts diff --git a/database/migrations/0075-contribution_message_add_index.ts b/database/migration/migrations/0075-contribution_message_add_index.ts similarity index 100% rename from database/migrations/0075-contribution_message_add_index.ts rename to database/migration/migrations/0075-contribution_message_add_index.ts diff --git a/database/migrations/0076-add_updated_by_contribution.ts b/database/migration/migrations/0076-add_updated_by_contribution.ts similarity index 100% rename from database/migrations/0076-add_updated_by_contribution.ts rename to database/migration/migrations/0076-add_updated_by_contribution.ts diff --git a/database/migrations/0077-add_resubmission_date_contribution_message.ts b/database/migration/migrations/0077-add_resubmission_date_contribution_message.ts similarity index 100% rename from database/migrations/0077-add_resubmission_date_contribution_message.ts rename to database/migration/migrations/0077-add_resubmission_date_contribution_message.ts diff --git a/database/migrations/0078-move_resubmission_date.ts b/database/migration/migrations/0078-move_resubmission_date.ts similarity index 100% rename from database/migrations/0078-move_resubmission_date.ts rename to database/migration/migrations/0078-move_resubmission_date.ts diff --git a/database/migrations/0079-fill_linked_user_id_of_contributions.ts b/database/migration/migrations/0079-fill_linked_user_id_of_contributions.ts similarity index 100% rename from database/migrations/0079-fill_linked_user_id_of_contributions.ts rename to database/migration/migrations/0079-fill_linked_user_id_of_contributions.ts diff --git a/database/migrations/0080-fill_linked_user_gradidoId_of_contributions.ts b/database/migration/migrations/0080-fill_linked_user_gradidoId_of_contributions.ts similarity index 100% rename from database/migrations/0080-fill_linked_user_gradidoId_of_contributions.ts rename to database/migration/migrations/0080-fill_linked_user_gradidoId_of_contributions.ts diff --git a/database/migrations/0081-user_join_community.ts b/database/migration/migrations/0081-user_join_community.ts similarity index 100% rename from database/migrations/0081-user_join_community.ts rename to database/migration/migrations/0081-user_join_community.ts diff --git a/database/migrations/0082-introduce_gms_registration.ts b/database/migration/migrations/0082-introduce_gms_registration.ts similarity index 100% rename from database/migrations/0082-introduce_gms_registration.ts rename to database/migration/migrations/0082-introduce_gms_registration.ts diff --git a/database/migrations/0083-join_community_federated_communities.ts b/database/migration/migrations/0083-join_community_federated_communities.ts similarity index 100% rename from database/migrations/0083-join_community_federated_communities.ts rename to database/migration/migrations/0083-join_community_federated_communities.ts diff --git a/database/migrations/0084-introduce_humhub_registration.ts b/database/migration/migrations/0084-introduce_humhub_registration.ts similarity index 100% rename from database/migrations/0084-introduce_humhub_registration.ts rename to database/migration/migrations/0084-introduce_humhub_registration.ts diff --git a/database/migrations/0085-add_index_transactions_user_id.ts b/database/migration/migrations/0085-add_index_transactions_user_id.ts similarity index 100% rename from database/migrations/0085-add_index_transactions_user_id.ts rename to database/migration/migrations/0085-add_index_transactions_user_id.ts diff --git a/database/migrations/0086-add_community_location.ts b/database/migration/migrations/0086-add_community_location.ts similarity index 100% rename from database/migrations/0086-add_community_location.ts rename to database/migration/migrations/0086-add_community_location.ts diff --git a/database/migrations/0087-add_index_on_user_roles.ts b/database/migration/migrations/0087-add_index_on_user_roles.ts similarity index 100% rename from database/migrations/0087-add_index_on_user_roles.ts rename to database/migration/migrations/0087-add_index_on_user_roles.ts diff --git a/database/migrations/0088-create_project_brandings.ts b/database/migration/migrations/0088-create_project_brandings.ts similarity index 100% rename from database/migrations/0088-create_project_brandings.ts rename to database/migration/migrations/0088-create_project_brandings.ts diff --git a/database/migrations/0089-add_openai_threads.ts b/database/migration/migrations/0089-add_openai_threads.ts similarity index 100% rename from database/migrations/0089-add_openai_threads.ts rename to database/migration/migrations/0089-add_openai_threads.ts diff --git a/database/migrations/0090-drop_unused_tables.ts b/database/migration/migrations/0090-drop_unused_tables.ts similarity index 100% rename from database/migrations/0090-drop_unused_tables.ts rename to database/migration/migrations/0090-drop_unused_tables.ts diff --git a/database/src/prepare.ts b/database/migration/prepare.ts similarity index 92% rename from database/src/prepare.ts rename to database/migration/prepare.ts index e29fc1422..d7fd31e2c 100644 --- a/database/src/prepare.ts +++ b/database/migration/prepare.ts @@ -1,7 +1,8 @@ import { Connection, ResultSetHeader, RowDataPacket, createConnection } from 'mysql2/promise' -import { CONFIG } from './config' -import { latestDbVersion } from './config/detectLastDBVersion' +import { CONFIG } from '../src/config' +import { latestDbVersion } from '../src/detectLastDBVersion' +import { MIGRATIONS_TABLE } from '../src/config/const' export enum DatabaseState { NOT_CONNECTED = 'NOT_CONNECTED', @@ -33,7 +34,7 @@ export async function connectToDatabaseServer( async function convertJsToTsInMigrations(connection: Connection): Promise { const [result] = await connection.query(` - UPDATE ${CONFIG.MIGRATIONS_TABLE} + UPDATE ${MIGRATIONS_TABLE} SET fileName = REPLACE(fileName, '.js', '.ts') WHERE fileName LIKE '%.js' `) @@ -85,7 +86,7 @@ export const getDatabaseState = async (): Promise => { SELECT SUM(fileName LIKE '%.js') AS jsCount, SUM(fileName LIKE '%.ts') AS tsCount - FROM ${CONFIG.MIGRATIONS_TABLE} + FROM ${MIGRATIONS_TABLE} `) if (counts[0].jsCount > 0 && counts[0].tsCount > 0) { @@ -100,7 +101,7 @@ export const getDatabaseState = async (): Promise => { // check if the database is up to date const [rows] = await connection.query( - `SELECT fileName FROM ${CONFIG.MIGRATIONS_TABLE} ORDER BY version DESC LIMIT 1`, + `SELECT fileName FROM ${MIGRATIONS_TABLE} ORDER BY version DESC LIMIT 1`, ) if (rows.length === 0) { return DatabaseState.LOWER_VERSION diff --git a/database/package.json b/database/package.json index 94018860f..456b84201 100644 --- a/database/package.json +++ b/database/package.json @@ -1,9 +1,9 @@ { "name": "database", - "version": "2.5.2", + "version": "2.6.0", "description": "Gradido Database Tool to execute database migrations", "main": "./build/index.js", - "types": "./entity/index.ts", + "types": "./src/index.ts", "exports": { ".": { "import": "./build/index.js", @@ -19,16 +19,16 @@ "typecheck": "tsc --noEmit", "lint": "biome check --error-on-warnings .", "lint:fix": "biome check --error-on-warnings . --write", - "clear": "cross-env TZ=UTC tsx src/index.ts clear", - "up": "cross-env TZ=UTC tsx src/index.ts up", - "down": "cross-env TZ=UTC tsx src/index.ts down", - "reset": "cross-env TZ=UTC tsx src/index.ts reset", - "up:backend_test": "cross-env TZ=UTC DB_DATABASE=gradido_test_backend tsx src/index.ts up", - "up:federation_test": "cross-env TZ=UTC DB_DATABASE=gradido_test_federation tsx src/index.ts up", - "up:dht_test": "cross-env TZ=UTC DB_DATABASE=gradido_test_dht tsx src/index.ts up" + "clear": "cross-env TZ=UTC tsx migration/index.ts clear", + "up": "cross-env TZ=UTC tsx migration/index.ts up", + "down": "cross-env TZ=UTC tsx migration/index.ts down", + "reset": "cross-env TZ=UTC tsx migration/index.ts reset", + "up:backend_test": "cross-env TZ=UTC DB_DATABASE=gradido_test_backend tsx migration/index.ts up", + "up:federation_test": "cross-env TZ=UTC DB_DATABASE=gradido_test_federation tsx migration/index.ts up", + "up:dht_test": "cross-env TZ=UTC DB_DATABASE=gradido_test_dht tsx migration/index.ts up" }, "devDependencies": { - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@types/faker": "^5.5.9", "@types/geojson": "^7946.0.13", "@types/node": "^17.0.21", @@ -38,14 +38,17 @@ "@types/uuid": "^8.3.4", "cross-env": "^7.0.3", "decimal.js-light": "^2.5.1", - "esbuild": "^0.25.2", "dotenv": "^10.0.0", + "esbuild": "^0.25.2", "geojson": "^0.5.0", + "joi-extract-type": "^15.0.8", + "log4js": "^6.9.1", "mysql2": "^2.3.0", "reflect-metadata": "^0.1.13", + "source-map-support": "^0.5.21", "ts-mysql-migrate": "^1.0.2", "tsx": "^4.19.4", - "typeorm": "^0.3.16", + "typeorm": "^0.3.22", "uuid": "^8.3.2", "wkx": "^0.5.0" }, diff --git a/database/src/AppDatabase.ts b/database/src/AppDatabase.ts new file mode 100644 index 000000000..2a6181a26 --- /dev/null +++ b/database/src/AppDatabase.ts @@ -0,0 +1,114 @@ +import { DataSource as DBDataSource, FileLogger } from 'typeorm' +import { Migration, entities } from './entity' + +import { getLogger } from 'log4js' +import { latestDbVersion } from '.' +import { CONFIG } from './config' +import { LOG4JS_BASE_CATEGORY_NAME } from './config/const' + +const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.AppDatabase`) + +export class AppDatabase { + private static instance: AppDatabase + private dataSource: DBDataSource | undefined + + /** + * The Singleton's constructor should always be private to prevent direct + * construction calls with the `new` operator. + */ + private constructor() {} + + /** + * The static method that controls the access to the singleton instance. + * + * This implementation let you subclass the Singleton class while keeping + * just one instance of each subclass around. + */ + public static getInstance(): AppDatabase { + if (!AppDatabase.instance) { + AppDatabase.instance = new AppDatabase() + } + return AppDatabase.instance + } + + public isConnected(): boolean { + return this.dataSource?.isInitialized ?? false + } + + public getDataSource(): DBDataSource { + if (!this.dataSource) { + throw new Error('Connection not initialized') + } + return this.dataSource + } + + // create database connection, initialize with automatic retry and check for correct database version + public async init(): Promise { + if (this.dataSource?.isInitialized) { + return + } + if (!this.dataSource) { + this.dataSource = new DBDataSource({ + type: 'mysql', + legacySpatialSupport: false, + host: CONFIG.DB_HOST, + port: CONFIG.DB_PORT, + username: CONFIG.DB_USER, + password: CONFIG.DB_PASSWORD, + database: CONFIG.DB_DATABASE, + entities, + synchronize: false, + logging: CONFIG.TYPEORM_LOGGING_ACTIVE, + logger: CONFIG.TYPEORM_LOGGING_ACTIVE + ? new FileLogger('all', { + // workaround to let previous path working, because with esbuild the script root path has changed + logPath: (CONFIG.PRODUCTION ? '../' : '') + CONFIG.TYPEORM_LOGGING_RELATIVE_PATH, + }) + : undefined, + extra: { + charset: 'utf8mb4_unicode_ci', + }, + }) + } + // retry connection on failure some times to allow database to catch up + for (let attempt = 1; attempt <= CONFIG.DB_CONNECT_RETRY_COUNT; attempt++) { + try { + await this.dataSource.initialize() + if (this.dataSource.isInitialized) { + logger.info(`Database connection established on attempt ${attempt}`) + break + } + } catch (error) { + logger.warn(`Attempt ${attempt} failed to connect to DB:`, error) + await new Promise((resolve) => setTimeout(resolve, CONFIG.DB_CONNECT_RETRY_DELAY_MS)) + } + } + if (!this.dataSource?.isInitialized) { + throw new Error('Could not connect to database') + } + // check for correct database version + await this.checkDBVersion() + } + + public async destroy(): Promise { + await this.dataSource?.destroy() + } + // ###################################### + // private methods + // ###################################### + private async checkDBVersion(): Promise { + const [dbVersion] = await Migration.find({ order: { version: 'DESC' }, take: 1 }) + if (!dbVersion) { + throw new Error('Could not find database version') + } + if (!dbVersion.fileName.startsWith(latestDbVersion)) { + throw new Error( + `Wrong database version detected - the backend requires '${latestDbVersion}' but found '${ + dbVersion.fileName + }`, + ) + } + } +} + +export const getDataSource = () => AppDatabase.getInstance().getDataSource() diff --git a/database/src/config/const.ts b/database/src/config/const.ts new file mode 100644 index 000000000..a02bb7918 --- /dev/null +++ b/database/src/config/const.ts @@ -0,0 +1,2 @@ +export const LOG4JS_BASE_CATEGORY_NAME = 'database' +export const MIGRATIONS_TABLE = 'migrations' diff --git a/database/src/config/index.ts b/database/src/config/index.ts index fdfb1b57e..3eb93ad66 100644 --- a/database/src/config/index.ts +++ b/database/src/config/index.ts @@ -2,14 +2,6 @@ import dotenv from 'dotenv' dotenv.config() -const constants = { - CONFIG_VERSION: { - DEFAULT: 'DEFAULT', - EXPECTED: 'v1.2022-03-18', - CURRENT: '', - }, -} - const database = { DB_CONNECT_RETRY_COUNT: process.env.DB_CONNECT_RETRY_COUNT ? Number.parseInt(process.env.DB_CONNECT_RETRY_COUNT) @@ -22,24 +14,11 @@ const database = { DB_USER: process.env.DB_USER ?? 'root', DB_PASSWORD: process.env.DB_PASSWORD ?? '', DB_DATABASE: process.env.DB_DATABASE ?? 'gradido_community', + TYPEORM_LOGGING_RELATIVE_PATH: + process.env.TYPEORM_LOGGING_RELATIVE_PATH ?? 'typeorm.database.log', + TYPEORM_LOGGING_ACTIVE: process.env.TYPEORM_LOGGING_ACTIVE === 'true' || false, } - -const migrations = { - MIGRATIONS_TABLE: process.env.MIGRATIONS_TABLE || 'migrations', -} - +const PRODUCTION = process.env.NODE_ENV === 'production' || false const nodeEnv = process.env.NODE_ENV || 'development' -// Check config version -constants.CONFIG_VERSION.CURRENT = process.env.CONFIG_VERSION || constants.CONFIG_VERSION.DEFAULT -if ( - ![constants.CONFIG_VERSION.EXPECTED, constants.CONFIG_VERSION.DEFAULT].includes( - constants.CONFIG_VERSION.CURRENT, - ) -) { - throw new Error( - `Fatal: Config Version incorrect - expected "${constants.CONFIG_VERSION.EXPECTED}" or "${constants.CONFIG_VERSION.DEFAULT}", but found "${constants.CONFIG_VERSION.CURRENT}"`, - ) -} - -export const CONFIG = { ...constants, ...database, ...migrations, NODE_ENV: nodeEnv } +export const CONFIG = { ...database, NODE_ENV: nodeEnv, PRODUCTION } diff --git a/database/src/config/detectLastDBVersion.ts b/database/src/detectLastDBVersion.ts similarity index 94% rename from database/src/config/detectLastDBVersion.ts rename to database/src/detectLastDBVersion.ts index b91ac2949..8dedb813f 100644 --- a/database/src/config/detectLastDBVersion.ts +++ b/database/src/detectLastDBVersion.ts @@ -5,7 +5,7 @@ import path from 'node:path' const DB_VERSION_PATTERN = /^(\d{4}-[a-z0-9-_]+)/ // Define the paths to check -const migrationsDir = path.join(__dirname, '..', '..', 'migrations') +const migrationsDir = path.join(__dirname, '..', 'migration', 'migrations') // Helper function to get the highest version number from the directory function getLatestDbVersion(dir: string): string { diff --git a/database/entity/Community.ts b/database/src/entity/Community.ts similarity index 97% rename from database/entity/Community.ts rename to database/src/entity/Community.ts index fde1f0df0..314e96f6a 100644 --- a/database/entity/Community.ts +++ b/database/src/entity/Community.ts @@ -9,9 +9,9 @@ import { PrimaryGeneratedColumn, UpdateDateColumn, } from 'typeorm' -import { GeometryTransformer } from '../src/typeorm/GeometryTransformer' import { FederatedCommunity } from './FederatedCommunity' import { User } from './User' +import { GeometryTransformer } from './transformer/GeometryTransformer' @Entity('communities') export class Community extends BaseEntity { diff --git a/database/entity/Contribution.ts b/database/src/entity/Contribution.ts similarity index 97% rename from database/entity/Contribution.ts rename to database/src/entity/Contribution.ts index c7173a79c..53ec9f36f 100644 --- a/database/entity/Contribution.ts +++ b/database/src/entity/Contribution.ts @@ -10,10 +10,10 @@ import { OneToOne, PrimaryGeneratedColumn, } from 'typeorm' -import { DecimalTransformer } from '../src/typeorm/DecimalTransformer' import { ContributionMessage } from './ContributionMessage' import { Transaction } from './Transaction' import { User } from './User' +import { DecimalTransformer } from './transformer/DecimalTransformer' @Entity('contributions') export class Contribution extends BaseEntity { diff --git a/database/entity/ContributionLink.ts b/database/src/entity/ContributionLink.ts similarity index 96% rename from database/entity/ContributionLink.ts rename to database/src/entity/ContributionLink.ts index 8f79f6b85..fd66c8946 100644 --- a/database/entity/ContributionLink.ts +++ b/database/src/entity/ContributionLink.ts @@ -1,6 +1,6 @@ import { Decimal } from 'decimal.js-light' import { BaseEntity, Column, DeleteDateColumn, Entity, PrimaryGeneratedColumn } from 'typeorm' -import { DecimalTransformer } from '../src/typeorm/DecimalTransformer' +import { DecimalTransformer } from './transformer/DecimalTransformer' @Entity('contribution_links') export class ContributionLink extends BaseEntity { diff --git a/database/entity/ContributionMessage.ts b/database/src/entity/ContributionMessage.ts similarity index 100% rename from database/entity/ContributionMessage.ts rename to database/src/entity/ContributionMessage.ts diff --git a/database/entity/DltTransaction.ts b/database/src/entity/DltTransaction.ts similarity index 100% rename from database/entity/DltTransaction.ts rename to database/src/entity/DltTransaction.ts diff --git a/database/entity/Event.ts b/database/src/entity/Event.ts similarity index 97% rename from database/entity/Event.ts rename to database/src/entity/Event.ts index 868ddaf60..9d17ffdeb 100644 --- a/database/entity/Event.ts +++ b/database/src/entity/Event.ts @@ -8,13 +8,13 @@ import { ManyToOne, PrimaryGeneratedColumn, } from 'typeorm' -import { DecimalTransformer } from '../src/typeorm/DecimalTransformer' import { Contribution } from './Contribution' import { ContributionLink } from './ContributionLink' import { ContributionMessage } from './ContributionMessage' import { Transaction } from './Transaction' import { TransactionLink } from './TransactionLink' import { User } from './User' +import { DecimalTransformer } from './transformer/DecimalTransformer' @Entity('events') export class Event extends BaseEntity { diff --git a/database/entity/FederatedCommunity.ts b/database/src/entity/FederatedCommunity.ts similarity index 100% rename from database/entity/FederatedCommunity.ts rename to database/src/entity/FederatedCommunity.ts diff --git a/database/entity/LoginElopageBuys.ts b/database/src/entity/LoginElopageBuys.ts similarity index 100% rename from database/entity/LoginElopageBuys.ts rename to database/src/entity/LoginElopageBuys.ts diff --git a/database/entity/Migration.ts b/database/src/entity/Migration.ts similarity index 100% rename from database/entity/Migration.ts rename to database/src/entity/Migration.ts diff --git a/database/entity/OpenaiThreads.ts b/database/src/entity/OpenaiThreads.ts similarity index 100% rename from database/entity/OpenaiThreads.ts rename to database/src/entity/OpenaiThreads.ts diff --git a/database/entity/PendingTransaction.ts b/database/src/entity/PendingTransaction.ts similarity index 97% rename from database/entity/PendingTransaction.ts rename to database/src/entity/PendingTransaction.ts index 5281e38ff..71d3b5f30 100644 --- a/database/entity/PendingTransaction.ts +++ b/database/src/entity/PendingTransaction.ts @@ -1,7 +1,7 @@ /* eslint-disable no-use-before-define */ import { Decimal } from 'decimal.js-light' import { BaseEntity, Column, Entity, PrimaryGeneratedColumn } from 'typeorm' -import { DecimalTransformer } from '../src/typeorm/DecimalTransformer' +import { DecimalTransformer } from './transformer/DecimalTransformer' @Entity('pending_transactions') export class PendingTransaction extends BaseEntity { diff --git a/database/entity/ProjectBranding.ts b/database/src/entity/ProjectBranding.ts similarity index 100% rename from database/entity/ProjectBranding.ts rename to database/src/entity/ProjectBranding.ts diff --git a/database/entity/Transaction.ts b/database/src/entity/Transaction.ts similarity index 98% rename from database/entity/Transaction.ts rename to database/src/entity/Transaction.ts index 196005ef6..b7d83bdf1 100644 --- a/database/entity/Transaction.ts +++ b/database/src/entity/Transaction.ts @@ -1,9 +1,9 @@ /* eslint-disable no-use-before-define */ import { Decimal } from 'decimal.js-light' import { BaseEntity, Column, Entity, JoinColumn, OneToOne, PrimaryGeneratedColumn } from 'typeorm' -import { DecimalTransformer } from '../src/typeorm/DecimalTransformer' import { Contribution } from './Contribution' import { DltTransaction } from './DltTransaction' +import { DecimalTransformer } from './transformer/DecimalTransformer' @Entity('transactions') export class Transaction extends BaseEntity { diff --git a/database/entity/TransactionLink.ts b/database/src/entity/TransactionLink.ts similarity index 94% rename from database/entity/TransactionLink.ts rename to database/src/entity/TransactionLink.ts index 68e8c4b68..e1ccfa675 100644 --- a/database/entity/TransactionLink.ts +++ b/database/src/entity/TransactionLink.ts @@ -1,6 +1,6 @@ import { Decimal } from 'decimal.js-light' import { BaseEntity, Column, DeleteDateColumn, Entity, PrimaryGeneratedColumn } from 'typeorm' -import { DecimalTransformer } from '../src/typeorm/DecimalTransformer' +import { DecimalTransformer } from './transformer/DecimalTransformer' @Entity('transaction_links') export class TransactionLink extends BaseEntity { diff --git a/database/entity/User.ts b/database/src/entity/User.ts similarity index 98% rename from database/entity/User.ts rename to database/src/entity/User.ts index 9ff7d50f8..9ff6f384b 100644 --- a/database/entity/User.ts +++ b/database/src/entity/User.ts @@ -10,12 +10,12 @@ import { OneToOne, PrimaryGeneratedColumn, } from 'typeorm' -import { GeometryTransformer } from '../src/typeorm/GeometryTransformer' import { Community } from './Community' import { Contribution } from './Contribution' import { ContributionMessage } from './ContributionMessage' import { UserContact } from './UserContact' import { UserRole } from './UserRole' +import { GeometryTransformer } from './transformer/GeometryTransformer' @Entity('users', { engine: 'InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci' }) export class User extends BaseEntity { diff --git a/database/entity/UserContact.ts b/database/src/entity/UserContact.ts similarity index 100% rename from database/entity/UserContact.ts rename to database/src/entity/UserContact.ts diff --git a/database/entity/UserRole.ts b/database/src/entity/UserRole.ts similarity index 100% rename from database/entity/UserRole.ts rename to database/src/entity/UserRole.ts diff --git a/database/entity/index.ts b/database/src/entity/index.ts similarity index 92% rename from database/entity/index.ts rename to database/src/entity/index.ts index b6ecd8f6c..01195c37e 100644 --- a/database/entity/index.ts +++ b/database/src/entity/index.ts @@ -1,4 +1,3 @@ -import { latestDbVersion } from '../src/config/detectLastDBVersion' import { Community } from './Community' import { Contribution } from './Contribution' import { ContributionLink } from './ContributionLink' @@ -35,9 +34,7 @@ export { User, UserContact, UserRole, - latestDbVersion, } -export * from '../logging' export const entities = [ Community, diff --git a/database/src/typeorm/DecimalTransformer.ts b/database/src/entity/transformer/DecimalTransformer.ts similarity index 100% rename from database/src/typeorm/DecimalTransformer.ts rename to database/src/entity/transformer/DecimalTransformer.ts diff --git a/database/src/typeorm/GeometryTransformer.ts b/database/src/entity/transformer/GeometryTransformer.ts similarity index 100% rename from database/src/typeorm/GeometryTransformer.ts rename to database/src/entity/transformer/GeometryTransformer.ts diff --git a/database/src/index.ts b/database/src/index.ts index ca5878f1a..1dd0a84d6 100644 --- a/database/src/index.ts +++ b/database/src/index.ts @@ -1,102 +1,62 @@ -import { CONFIG } from './config' -import { DatabaseState, getDatabaseState } from './prepare' +import { latestDbVersion } from './detectLastDBVersion' +import { Community } from './entity/Community' +import { Contribution } from './entity/Contribution' +import { ContributionLink } from './entity/ContributionLink' +import { ContributionMessage } from './entity/ContributionMessage' +import { DltTransaction } from './entity/DltTransaction' +import { Event } from './entity/Event' +import { FederatedCommunity } from './entity/FederatedCommunity' +import { LoginElopageBuys } from './entity/LoginElopageBuys' +import { Migration } from './entity/Migration' +import { OpenaiThreads } from './entity/OpenaiThreads' +import { PendingTransaction } from './entity/PendingTransaction' +import { ProjectBranding } from './entity/ProjectBranding' +import { Transaction } from './entity/Transaction' +import { TransactionLink } from './entity/TransactionLink' +import { User } from './entity/User' +import { UserContact } from './entity/UserContact' +import { UserRole } from './entity/UserRole' -import path from 'node:path' -import { createPool } from 'mysql' -import { Migration } from 'ts-mysql-migrate' -import { clearDatabase } from './clear' -import { latestDbVersion } from './config/detectLastDBVersion' - -const run = async (command: string) => { - if (command === 'clear') { - if (CONFIG.NODE_ENV === 'production') { - throw new Error('Clearing database in production is not allowed') - } - await clearDatabase() - return - } - // Database actions not supported by our migration library - // await createDatabase() - const state = await getDatabaseState() - if (state === DatabaseState.NOT_CONNECTED) { - throw new Error( - `Database not connected, is database server running? - host: ${CONFIG.DB_HOST} - port: ${CONFIG.DB_PORT} - user: ${CONFIG.DB_USER} - password: ${CONFIG.DB_PASSWORD.slice(-2)} - database: ${CONFIG.DB_DATABASE}`, - ) - } - if (state === DatabaseState.HIGHER_VERSION) { - throw new Error('Database version is higher than required, please switch to the correct branch') - } - if (state === DatabaseState.SAME_VERSION) { - if (command === 'up') { - // biome-ignore lint/suspicious/noConsole: no logger present - console.log('Database is up to date') - return - } - } - // Initialize Migrations - const pool = createPool({ - host: CONFIG.DB_HOST, - port: CONFIG.DB_PORT, - user: CONFIG.DB_USER, - password: CONFIG.DB_PASSWORD, - database: CONFIG.DB_DATABASE, - }) - const migration = new Migration({ - conn: pool, - tableName: CONFIG.MIGRATIONS_TABLE, - silent: true, - dir: path.join(__dirname, '..', 'migrations'), - }) - await migration.initialize() - - // Execute command - switch (command) { - case 'up': - await migration.up() // use for upgrade script - break - case 'down': - await migration.down() // use for downgrade script - break - case 'reset': - if (CONFIG.NODE_ENV === 'production') { - throw new Error('Resetting database in production is not allowed') - } - await migration.reset() - break - default: - throw new Error(`Unsupported command ${command}`) - } - if (command === 'reset') { - // biome-ignore lint/suspicious/noConsole: no logger present - console.log('Database was reset') - } else { - const currentDbVersion = await migration.getLastVersion() - // biome-ignore lint/suspicious/noConsole: no logger present - console.log(`Database was ${command} migrated to version: ${currentDbVersion.fileName}`) - if (latestDbVersion === currentDbVersion.fileName.split('.')[0]) { - // biome-ignore lint/suspicious/noConsole: no logger present - console.log('Database is now up to date') - } else { - // biome-ignore lint/suspicious/noConsole: no logger present - console.log('The latest database version is: ', latestDbVersion) - } - } - - // Terminate connections gracefully - pool.end() +export { + Community, + Contribution, + ContributionLink, + ContributionMessage, + DltTransaction, + Event, + FederatedCommunity, + LoginElopageBuys, + Migration, + ProjectBranding, + OpenaiThreads, + PendingTransaction, + Transaction, + TransactionLink, + User, + UserContact, + UserRole, } -run(process.argv[2]) - .catch((err) => { - // biome-ignore lint/suspicious/noConsole: no logger present - console.log(err) - process.exit(1) - }) - .then(() => { - process.exit() - }) +export const entities = [ + Community, + Contribution, + ContributionLink, + ContributionMessage, + DltTransaction, + Event, + FederatedCommunity, + LoginElopageBuys, + Migration, + ProjectBranding, + OpenaiThreads, + PendingTransaction, + Transaction, + TransactionLink, + User, + UserContact, + UserRole, +] + +export { latestDbVersion } +export * from './logging' +export { AppDatabase } from './AppDatabase' diff --git a/database/logging/AbstractLogging.view.ts b/database/src/logging/AbstractLogging.view.ts similarity index 86% rename from database/logging/AbstractLogging.view.ts rename to database/src/logging/AbstractLogging.view.ts index 4d8824cc3..00fdd4703 100644 --- a/database/logging/AbstractLogging.view.ts +++ b/database/src/logging/AbstractLogging.view.ts @@ -23,7 +23,11 @@ export abstract class AbstractLoggingView { public dateToString(date: Date | undefined | null): string | undefined { if (date) { - return date.toISOString() + if (date instanceof Date) { + return date.toISOString() + } else { + return new Date(date).toISOString() + } } return undefined } diff --git a/database/logging/CommunityLogging.view.ts b/database/src/logging/CommunityLogging.view.ts similarity index 94% rename from database/logging/CommunityLogging.view.ts rename to database/src/logging/CommunityLogging.view.ts index 0108aefd8..9bd847416 100644 --- a/database/logging/CommunityLogging.view.ts +++ b/database/src/logging/CommunityLogging.view.ts @@ -1,4 +1,4 @@ -import { Community } from '../entity/Community' +import { Community } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' diff --git a/database/logging/ContributionLogging.view.ts b/database/src/logging/ContributionLogging.view.ts similarity index 97% rename from database/logging/ContributionLogging.view.ts rename to database/src/logging/ContributionLogging.view.ts index 9896fac4e..a9dd5a36f 100644 --- a/database/logging/ContributionLogging.view.ts +++ b/database/src/logging/ContributionLogging.view.ts @@ -1,4 +1,4 @@ -import { Contribution } from '../entity/Contribution' +import { Contribution } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' import { ContributionMessageLoggingView } from './ContributionMessageLogging.view' import { TransactionLoggingView } from './TransactionLogging.view' diff --git a/database/logging/ContributionMessageLogging.view.ts b/database/src/logging/ContributionMessageLogging.view.ts similarity index 93% rename from database/logging/ContributionMessageLogging.view.ts rename to database/src/logging/ContributionMessageLogging.view.ts index 741820a3c..7c59e6213 100644 --- a/database/logging/ContributionMessageLogging.view.ts +++ b/database/src/logging/ContributionMessageLogging.view.ts @@ -1,4 +1,4 @@ -import { ContributionMessage } from '../entity/ContributionMessage' +import { ContributionMessage } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' import { ContributionLoggingView } from './ContributionLogging.view' import { UserLoggingView } from './UserLogging.view' diff --git a/database/logging/DltTransactionLogging.view.ts b/database/src/logging/DltTransactionLogging.view.ts similarity index 92% rename from database/logging/DltTransactionLogging.view.ts rename to database/src/logging/DltTransactionLogging.view.ts index 1a51121c8..e990e5ace 100644 --- a/database/logging/DltTransactionLogging.view.ts +++ b/database/src/logging/DltTransactionLogging.view.ts @@ -1,4 +1,4 @@ -import { DltTransaction } from '../entity/DltTransaction' +import { DltTransaction } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' import { TransactionLoggingView } from './TransactionLogging.view' diff --git a/database/logging/FederatedCommunityLogging.view.ts b/database/src/logging/FederatedCommunityLogging.view.ts similarity index 91% rename from database/logging/FederatedCommunityLogging.view.ts rename to database/src/logging/FederatedCommunityLogging.view.ts index ccfc9f005..a5bcb3e5a 100644 --- a/database/logging/FederatedCommunityLogging.view.ts +++ b/database/src/logging/FederatedCommunityLogging.view.ts @@ -1,4 +1,4 @@ -import { FederatedCommunity } from '../entity/FederatedCommunity' +import { FederatedCommunity } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' export class FederatedCommunityLoggingView extends AbstractLoggingView { diff --git a/database/logging/PendingTransactionLogging.view.ts b/database/src/logging/PendingTransactionLogging.view.ts similarity index 83% rename from database/logging/PendingTransactionLogging.view.ts rename to database/src/logging/PendingTransactionLogging.view.ts index 78035004b..9cf27be88 100644 --- a/database/logging/PendingTransactionLogging.view.ts +++ b/database/src/logging/PendingTransactionLogging.view.ts @@ -1,5 +1,4 @@ -import { PendingTransaction } from '../entity/PendingTransaction' -import { Transaction } from '../entity/Transaction' +import { PendingTransaction, Transaction } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' import { TransactionLoggingView } from './TransactionLogging.view' diff --git a/database/logging/TransactionLogging.view.ts b/database/src/logging/TransactionLogging.view.ts similarity index 97% rename from database/logging/TransactionLogging.view.ts rename to database/src/logging/TransactionLogging.view.ts index e0a039a4b..bc0eea761 100644 --- a/database/logging/TransactionLogging.view.ts +++ b/database/src/logging/TransactionLogging.view.ts @@ -1,4 +1,4 @@ -import { Transaction } from '../entity/Transaction' +import { Transaction } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' import { ContributionLoggingView } from './ContributionLogging.view' import { DltTransactionLoggingView } from './DltTransactionLogging.view' diff --git a/database/logging/UserContactLogging.view.ts b/database/src/logging/UserContactLogging.view.ts similarity index 95% rename from database/logging/UserContactLogging.view.ts rename to database/src/logging/UserContactLogging.view.ts index e843ca738..d80b17c67 100644 --- a/database/logging/UserContactLogging.view.ts +++ b/database/src/logging/UserContactLogging.view.ts @@ -1,4 +1,4 @@ -import { UserContact } from '../entity/UserContact' +import { UserContact } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' import { UserLoggingView } from './UserLogging.view' diff --git a/database/logging/UserLogging.view.ts b/database/src/logging/UserLogging.view.ts similarity index 98% rename from database/logging/UserLogging.view.ts rename to database/src/logging/UserLogging.view.ts index 375068ff7..1aa5e4407 100644 --- a/database/logging/UserLogging.view.ts +++ b/database/src/logging/UserLogging.view.ts @@ -1,4 +1,4 @@ -import { User } from '../entity/User' +import { User } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' import { ContributionLoggingView } from './ContributionLogging.view' import { ContributionMessageLoggingView } from './ContributionMessageLogging.view' diff --git a/database/logging/UserRoleLogging.view.ts b/database/src/logging/UserRoleLogging.view.ts similarity index 92% rename from database/logging/UserRoleLogging.view.ts rename to database/src/logging/UserRoleLogging.view.ts index 9b4914f15..52684d242 100644 --- a/database/logging/UserRoleLogging.view.ts +++ b/database/src/logging/UserRoleLogging.view.ts @@ -1,4 +1,4 @@ -import { UserRole } from '../entity/UserRole' +import { UserRole } from '../entity' import { AbstractLoggingView } from './AbstractLogging.view' import { UserLoggingView } from './UserLogging.view' diff --git a/database/logging/index.ts b/database/src/logging/index.ts similarity index 87% rename from database/logging/index.ts rename to database/src/logging/index.ts index 9a436ca47..c19bd9a57 100644 --- a/database/logging/index.ts +++ b/database/src/logging/index.ts @@ -1,3 +1,5 @@ +import { getLogger } from 'log4js' +import { LOG4JS_BASE_CATEGORY_NAME } from '../config/const' import { AbstractLoggingView } from './AbstractLogging.view' import { CommunityLoggingView } from './CommunityLogging.view' import { ContributionLoggingView } from './ContributionLogging.view' @@ -23,3 +25,5 @@ export { UserLoggingView, UserRoleLoggingView, } + +export const logger = getLogger(LOG4JS_BASE_CATEGORY_NAME) diff --git a/database/src/typeorm.ts b/database/src/typeorm.ts deleted file mode 100644 index 4b4f494f1..000000000 --- a/database/src/typeorm.ts +++ /dev/null @@ -1 +0,0 @@ -export * from 'typeorm' diff --git a/database/tsconfig.json b/database/tsconfig.json index 37347b875..fc32bbbab 100644 --- a/database/tsconfig.json +++ b/database/tsconfig.json @@ -45,11 +45,13 @@ /* Module Resolution Options */ // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ - // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ - // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ + //"baseUrl": ".", /* Base directory to resolve non-absolute module names. */ + //"paths": { + //"@/*": ["src/*"], /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ + //}, // "rootDirs": [".", "../database"], /* List of root folders whose combined content represents the structure of the project at runtime. */ // "typeRoots": [], /* List of folders to include type definitions from. */ - "types": ["node"], /* Type declaration files to be included in compilation. */ + // "types": ["node"], /* Type declaration files to be included in compilation. */ // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ diff --git a/deployment/bare_metal/Readme.md b/deployment/bare_metal/Readme.md new file mode 100644 index 000000000..34d791e0f --- /dev/null +++ b/deployment/bare_metal/Readme.md @@ -0,0 +1,46 @@ +# Deployment for bare metal servers +This setup is designed for **bare metal servers**, offering maximum performance and reliability for Gradido deployments. However, it can also work on **virtual servers (VPS)** – if properly configured. + +## 🧠 Memory Considerations on VServers + +We have observed that some VServer providers apply **aggressive virtual memory constraints** or overcommit strategies that may cause **random crashes** of Node.js processes – even when total RAM appears sufficient. + +### Important Notes: + +- A single Node.js process may **allocate 10–12 GB of virtual memory** (VIRT), even if **real memory usage (RES)** stays below 200 MB. +- Some VPS environments **panic or kill processes** when virtual memory allocation exceeds certain invisible thresholds. + +## 🛡️ Rate Limiting (API Protection) + +This deployment includes built-in **rate limiting** for public-facing endpoints to prevent abuse and denial-of-service attacks. + +### 🔒 NGINX Rate & Connection Limits Overview + +| Path | Zone | Rate Limit | Burst | Max Connections | Notes | +|----------------------------|----------|----------------|-------|------------------|--------------------------------| +| `/` | frontend | 15 requests/s | 150 | 60 | Public frontend | +| `/admin` | frontend | 15 requests/s | 30 | 20 | Admin frontend | +| `/graphql` | backend | 20 requests/s | 40 | 20 | Main backend GraphQL API | +| `/hook` | backend | 20 requests/s | 20 | 20 | Internal backend webhooks | +| `/hooks/` | backend | 20 requests/s | 20 | 20 | Reverse proxy for webhooks | +| `/api/` | api | 30 requests/s | 60 | 30 | Federation GraphQL API | + +- ``: placeholder for federation api version +- All zones use `$binary_remote_addr` for client identification. +- `nodelay` ensures burst requests are not delayed (they are either accepted or rejected). +- Global connection zone: `limit_conn_zone $binary_remote_addr zone=addr:10m;` + +This setup helps protect public and internal interfaces from abuse, while ensuring smooth parallel access during high load periods (e.g., UI builds or cluster sync). + +These limits work like a traffic cop at each route: +- **Rate limits** (`limit_req`) define how many requests per second a single client can send. +- **Burst values** allow short spikes without blocking – like a temporary buffer. +- **Connection limits** (`limit_conn`) cap how many concurrent connections a single IP can keep open. + +Each route (frontend, backend, API, etc.) has its own configuration depending on its expected traffic pattern and sensitivity. For example: +- The public frontend allows higher bursts (many assets load at once). +- The GraphQL backend and admin interfaces are more tightly controlled. + +This ensures fairness, avoids accidental DoS scenarios, and keeps overall latency low, even under high usage. + + diff --git a/deployment/bare_metal/nginx/common/limit_requests.conf b/deployment/bare_metal/nginx/common/limit_requests.conf index c9501fd64..022a6d3a3 100644 --- a/deployment/bare_metal/nginx/common/limit_requests.conf +++ b/deployment/bare_metal/nginx/common/limit_requests.conf @@ -1,4 +1,4 @@ -limit_req_zone $binary_remote_addr zone=frontend:20m rate=5r/s; -limit_req_zone $binary_remote_addr zone=backend:25m rate=15r/s; +limit_req_zone $binary_remote_addr zone=frontend:20m rate=15r/s; +limit_req_zone $binary_remote_addr zone=backend:25m rate=20r/s; limit_req_zone $binary_remote_addr zone=api:5m rate=30r/s; limit_conn_zone $binary_remote_addr zone=addr:10m; \ No newline at end of file diff --git a/deployment/bare_metal/nginx/conf.d/logging.conf b/deployment/bare_metal/nginx/conf.d/logging.conf index a76e8fae7..b8ed225a8 100644 --- a/deployment/bare_metal/nginx/conf.d/logging.conf +++ b/deployment/bare_metal/nginx/conf.d/logging.conf @@ -1,4 +1,6 @@ log_format gradido_log '$remote_addr - $remote_user [$time_local] ' '"$request_method $status $request_uri"' ' "$http_referer" "$http_user_agent"' - ' $server_protocol $body_bytes_sent $request_time'; \ No newline at end of file + ' $server_protocol $body_bytes_sent $request_time' + ' limit status: $limit_req_status' + ; \ No newline at end of file diff --git a/deployment/bare_metal/nginx/sites-available/gradido-federation.conf.template b/deployment/bare_metal/nginx/sites-available/gradido-federation.conf.template index cf5f53b25..5123deb5e 100644 --- a/deployment/bare_metal/nginx/sites-available/gradido-federation.conf.template +++ b/deployment/bare_metal/nginx/sites-available/gradido-federation.conf.template @@ -1,7 +1,7 @@ location /api/$FEDERATION_APIVERSION { - #limit_req zone=api burst=60 nodelay; - #limit_conn addr 30; + limit_req zone=api burst=60 nodelay; + limit_conn addr 30; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; diff --git a/deployment/bare_metal/nginx/sites-available/gradido.conf.ssl.template b/deployment/bare_metal/nginx/sites-available/gradido.conf.ssl.template index 294e9f8a0..1eb01f09e 100644 --- a/deployment/bare_metal/nginx/sites-available/gradido.conf.ssl.template +++ b/deployment/bare_metal/nginx/sites-available/gradido.conf.ssl.template @@ -25,8 +25,9 @@ server { include /etc/nginx/common/protect_add_header.conf; # protect from slow loris - #client_body_timeout 10s; - #client_header_timeout 10s; + client_body_timeout 10s; + client_header_timeout 10s; + send_timeout 10s; # protect from range attack (in http header) if ($http_range ~ "d{9,}") { @@ -53,12 +54,30 @@ server { # Frontend (default) location / { - - #limit_req zone=frontend burst=40 nodelay; - #limit_conn addr 40; + limit_req zone=frontend burst=150 nodelay; + limit_conn addr 60; root $PROJECT_ROOT/frontend/build/; index index.html; - try_files $uri $uri/ /index.html = 404; + + # caching rules for assets + # static assets + location ~* \.(?:woff2?|ttf|otf|eot|jpg|jpeg|png|gif|svg|webp|ico)$ { + # keep assets for a week + add_header Cache-Control "public, max-age=604800"; + try_files $uri =404; + } + # hashed assets + location ~* \.(?:js|css|json)$ { + add_header Cache-Control "public, max-age=31536000, immutable"; + try_files $uri =404; + } + + try_files $uri $uri/ /index.html = 404; + + # don't cache index.html + add_header Cache-Control "no-cache, no-store, must-revalidate"; + add_header Pragma "no-cache"; + add_header Expires 0; access_log $GRADIDO_LOG_PATH/nginx-access.frontend.log gradido_log; error_log $GRADIDO_LOG_PATH/nginx-error.frontend.log warn; @@ -66,8 +85,8 @@ server { # Backend location /graphql { - #limit_req zone=backend burst=10 nodelay; - #limit_conn addr 10; + limit_req zone=backend burst=40 nodelay; + limit_conn addr 20; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection 'upgrade'; @@ -84,8 +103,8 @@ server { # Backend webhooks location /hook { - #limit_req zone=backend burst=10; - #limit_conn addr 10; + limit_req zone=backend burst=20 nodelay; + limit_conn addr 20; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection 'upgrade'; @@ -102,8 +121,8 @@ server { # Webhook reverse proxy location /hooks/ { - #limit_req zone=backend burst=10; - #limit_conn addr 10; + limit_req zone=backend burst=20 nodelay; + limit_conn addr 20; proxy_pass http://127.0.0.1:9000/hooks/; access_log $GRADIDO_LOG_PATH/nginx-access.hooks.log gradido_log; @@ -112,13 +131,32 @@ server { # Admin Frontend location /admin { - #limit_req zone=frontend burst=30 nodelay; - #limit_conn addr 40; - rewrite ^/admin/(.*)$ /$1 break; - root $PROJECT_ROOT/admin/build/; + limit_req zone=frontend burst=30 nodelay; + limit_conn addr 20; + #rewrite ^/admin/(.*)$ /$1 break; + alias $PROJECT_ROOT/admin/build/; index index.html; + + # caching rules for assets + # static assets + location ~* \.(?:woff2?|ttf|otf|eot|jpg|jpeg|png|gif|svg|webp|ico)$ { + # keep assets for a week + add_header Cache-Control "public, max-age=604800"; + try_files $uri =404; + } + # hashed assets + location ~* \.(?:js|css|json)$ { + add_header Cache-Control "public, max-age=31536000, immutable"; + try_files $uri =404; + } + try_files $uri $uri/ /index.html = 404; + # don't cache index.html + add_header Cache-Control "no-cache, no-store, must-revalidate"; + add_header Pragma "no-cache"; + add_header Expires 0; + access_log $GRADIDO_LOG_PATH/nginx-access.admin.log gradido_log; error_log $GRADIDO_LOG_PATH/nginx-error.admin.log warn; } diff --git a/deployment/bare_metal/nginx/sites-available/gradido.conf.template b/deployment/bare_metal/nginx/sites-available/gradido.conf.template index 7bd28b228..1f5ca2304 100644 --- a/deployment/bare_metal/nginx/sites-available/gradido.conf.template +++ b/deployment/bare_metal/nginx/sites-available/gradido.conf.template @@ -10,8 +10,9 @@ server { include /etc/nginx/common/protect_add_header.conf; # protect from slow loris - #client_body_timeout 10s; - #client_header_timeout 10s; + client_body_timeout 10s; + client_header_timeout 10s; + send_timeout 10s; # protect from range attack (in http header) if ($http_range ~ "d{9,}") { @@ -38,20 +39,39 @@ server { # Frontend (default) location / { - #limit_req zone=frontend burst=40 nodelay; - #limit_conn addr 40; + limit_req zone=frontend burst=150 nodelay; + limit_conn addr 60; root $PROJECT_ROOT/frontend/build/; index index.html; + + # caching rules for assets + # static assets + location ~* \.(?:woff2?|ttf|otf|eot|jpg|jpeg|png|gif|svg|webp|ico)$ { + # keep assets for a week + add_header Cache-Control "public, max-age=604800"; + try_files $uri =404; + } + # hashed assets + location ~* \.(?:js|css|json)$ { + add_header Cache-Control "public, max-age=31536000, immutable"; + try_files $uri =404; + } + try_files $uri $uri/ /index.html = 404; + # don't cache index.html + add_header Cache-Control "no-cache, no-store, must-revalidate"; + add_header Pragma "no-cache"; + add_header Expires 0; + access_log $GRADIDO_LOG_PATH/nginx-access.frontend.log gradido_log; error_log $GRADIDO_LOG_PATH/nginx-error.frontend.log warn; } # Backend location /graphql { - #limit_req zone=backend burst=10 nodelay; - #limit_conn addr 10; + limit_req zone=backend burst=40 nodelay; + limit_conn addr 20; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection 'upgrade'; @@ -68,8 +88,8 @@ server { # Backend webhooks location /hook { - #limit_req zone=backend burst=10; - #limit_conn addr 10; + limit_req zone=backend burst=20 nodelay; + limit_conn addr 20; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection 'upgrade'; @@ -86,8 +106,8 @@ server { # Webhook reverse proxy location /hooks/ { - #limit_req zone=backend burst=10; - #limit_conn addr 10; + limit_req zone=backend burst=20 nodelay; + limit_conn addr 20; proxy_pass http://127.0.0.1:9000/hooks/; access_log $GRADIDO_LOG_PATH/nginx-access.hooks.log gradido_log; @@ -96,13 +116,32 @@ server { # Admin Frontend location /admin { - #limit_req zone=frontend burst=30 nodelay; - #limit_conn addr 40; + limit_req zone=frontend burst=30 nodelay; + limit_conn addr 20; rewrite ^/admin/(.*)$ /$1 break; root $PROJECT_ROOT/admin/build/; index index.html; + + # caching rules for assets + # static assets + location ~* \.(?:woff2?|ttf|otf|eot|jpg|jpeg|png|gif|svg|webp|ico)$ { + # keep assets for a week + add_header Cache-Control "public, max-age=604800"; + # try_files $uri =404; + } + # hashed assets + location ~* \.(?:js|css|json)$ { + add_header Cache-Control "public, max-age=31536000, immutable"; + # try_files $uri =404; + } + try_files $uri $uri/ /index.html = 404; + # don't cache index.html + add_header Cache-Control "no-cache, no-store, must-revalidate"; + add_header Pragma "no-cache"; + add_header Expires 0; + access_log $GRADIDO_LOG_PATH/nginx-access.admin.log gradido_log; error_log $GRADIDO_LOG_PATH/nginx-error.admin.log warn; } diff --git a/deployment/bare_metal/start.sh b/deployment/bare_metal/start.sh index f38621943..0eda8958a 100755 --- a/deployment/bare_metal/start.sh +++ b/deployment/bare_metal/start.sh @@ -101,7 +101,15 @@ TODAY=$(date +"%Y-%m-%d") # Create a new updating.html from the template \cp $SCRIPT_DIR/nginx/update-page/updating.html.template $UPDATE_HTML -# redirect all output of the script to the UPDATE_HTML and also have things on console +# store real console stream in fd 3 +if test -t 1; then + # stdout is a TTY - normal console + exec 3> /dev/tty +else + # stdout is not a TTY - probably Docker or CI + exec 3> /proc/$$/fd/1 +fi +# redirect all output of the script to the UPDATE_HTML # TODO: this might pose a security risk exec > >(tee -a $UPDATE_HTML) 2>&1 @@ -109,34 +117,36 @@ exec > >(tee -a $UPDATE_HTML) 2>&1 echo 'Configuring nginx to serve the update-page' nginx_restart() { sudo /etc/init.d/nginx restart || { - echo -e "\e[33mwarn: nginx restart failed, will try to fix with 'sudo systemctl reset-failed nginx' and 'sudo systemctl start nginx'\e[0m" > /dev/tty + echo -e "\e[33mwarn: nginx restart failed\e[0m" >&3 + # run nginx -t to show problem but ignore exit code to prevent trap + { sudo nginx -t || true; } >&3 + echo -e "\e[33mwarn: will try to fix with 'sudo systemctl reset-failed nginx' and 'sudo systemctl start nginx'\e[0m" >&3 sudo systemctl reset-failed nginx sudo systemctl start nginx } } -nginx_restart ln -sf $SCRIPT_DIR/nginx/sites-available/update-page.conf $SCRIPT_DIR/nginx/sites-enabled/default - +nginx_restart # helper functions log_step() { local message="$1" - echo -e "\e[34m$message\e[0m" # > /dev/tty # blue in console + echo -e "\e[34m$message\e[0m" >&3 # blue in console echo "

$message

" >> "$UPDATE_HTML" # blue in html } log_error() { local message="$1" - echo -e "\e[31m$message\e[0m" # > /dev/tty # red in console + echo -e "\e[31m$message\e[0m" >&3 # red in console echo "$message" >> "$UPDATE_HTML" # red in html } log_warn() { local message="$1" - echo -e "\e[33m$message\e[0m" # > /dev/tty # orange in console + echo -e "\e[33m$message\e[0m" >&3 # orange in console echo "$message" >> "$UPDATE_HTML" # orange in html } log_success() { local message="$1" - echo -e "\e[32m$message\e[0m" # > /dev/tty # green in console + echo -e "\e[32m$message\e[0m" >&3 # green in console echo "

$message

" >> "$UPDATE_HTML" # green in html } @@ -287,12 +297,19 @@ else fi # start after building all to use up less ressources -pm2 start --name gradido-backend "turbo backend#start --env-mode=loose" -l $GRADIDO_LOG_PATH/pm2.backend.$TODAY.log --log-date-format 'YYYY-MM-DD HH:mm:ss.SSS' -#pm2 start --name gradido-frontend "yarn --cwd $PROJECT_ROOT/frontend start" -l $GRADIDO_LOG_PATH/pm2.frontend.$TODAY.log --log-date-format 'YYYY-MM-DD HH:mm:ss.SSS' -#pm2 start --name gradido-admin "yarn --cwd $PROJECT_ROOT/admin start" -l $GRADIDO_LOG_PATH/pm2.admin.$TODAY.log --log-date-format 'YYYY-MM-DD HH:mm:ss.SSS' +pm2 start --name gradido-backend \ + "env TZ=UTC NODE_ENV=production node ./build/index.js" \ + --cwd $PROJECT_ROOT/backend \ + -l $GRADIDO_LOG_PATH/pm2.backend.$TODAY.log \ + --log-date-format 'YYYY-MM-DD HH:mm:ss.SSS' + pm2 save if [ ! -z $FEDERATION_DHT_TOPIC ]; then - pm2 start --name gradido-dht-node "turbo dht-node#start --env-mode=loose" -l $GRADIDO_LOG_PATH/pm2.dht-node.$TODAY.log --log-date-format 'YYYY-MM-DD HH:mm:ss.SSS' + pm2 start --name gradido-dht-node \ + "env TZ=UTC NODE_ENV=production node ./build/index.js" \ + --cwd $PROJECT_ROOT/dht-node \ + -l $GRADIDO_LOG_PATH/pm2.dht-node.$TODAY.log \ + --log-date-format 'YYYY-MM-DD HH:mm:ss.SSS' pm2 save else log_step "=====================================================================" @@ -316,7 +333,11 @@ do log_step "====================================================" log_step " start $MODULENAME listening on port=$FEDERATION_PORT" log_step "====================================================" - pm2 start --name $MODULENAME "turbo federation#start --env-mode=loose" -l $GRADIDO_LOG_PATH/pm2.$MODULENAME.$TODAY.log --log-date-format 'YYYY-MM-DD HH:mm:ss.SSS' + pm2 start --name $MODULENAME \ + "env TZ=UTC NODE_ENV=production node ./build/index.js" \ + --cwd $PROJECT_ROOT/federation \ + -l $GRADIDO_LOG_PATH/pm2.$MODULENAME.$TODAY.log \ + --log-date-format 'YYYY-MM-DD HH:mm:ss.SSS' pm2 save done diff --git a/dht-node/Dockerfile b/dht-node/Dockerfile index bd397fea0..c19888a5d 100644 --- a/dht-node/Dockerfile +++ b/dht-node/Dockerfile @@ -116,8 +116,5 @@ COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/dht-node/build/index.js ./in # add node_modules from production_node_modules COPY --chown=app:app --from=production-node-modules ${DOCKER_WORKDIR}/node_modules ./node_modules -# Copy log4js-config.json to provide log configuration -COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/dht-node/log4js-config.json ./log4js-config.json - # Run command CMD ["node", "index.js"] diff --git a/dht-node/esbuild.config.ts b/dht-node/esbuild.config.ts index f38039c43..42d9c329d 100644 --- a/dht-node/esbuild.config.ts +++ b/dht-node/esbuild.config.ts @@ -10,5 +10,5 @@ build({ // legalComments: 'inline', external: ['dht-rpc', 'sodium-universal'], minify: true, - sourcemap: false, + sourcemap: true, }) diff --git a/dht-node/jest.config.js b/dht-node/jest.config.js index 18170ac48..bca83b5ce 100644 --- a/dht-node/jest.config.js +++ b/dht-node/jest.config.js @@ -1,19 +1,22 @@ /** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ module.exports = { - verbose: true, + verbose: false, preset: 'ts-jest', - collectCoverage: true, + collectCoverage: false, collectCoverageFrom: ['src/**/*.ts', '!**/node_modules/**', '!src/seeds/**', '!build/**'], coverageThreshold: { global: { lines: 82, }, }, - setupFiles: ['/test/testSetup.ts'], + setupFiles: ['config-schema/test/testSetup.ts'], setupFilesAfterEnv: [], modulePathIgnorePatterns: ['/build/'], moduleNameMapper: { '@/(.*)': '/src/$1', '@test/(.*)': '/test/$1', }, + transform: { + '^.+\\.(t|j)sx?$': '@swc/jest', + }, } diff --git a/dht-node/log4js-config.json b/dht-node/log4js-config.json deleted file mode 100644 index ee5207550..000000000 --- a/dht-node/log4js-config.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "appenders": - { - "dht": - { - "type": "dateFile", - "filename": "../logs/dht-node/apiversion-%v.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "errorFile": - { - "type": "dateFile", - "filename": "../logs/dht-node/errors.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "errors": - { - "type": "logLevelFilter", - "level": "error", - "appender": "errorFile" - }, - "out": - { - "type": "stdout", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - } - } - }, - "categories": - { - "default": - { - "appenders": - [ - "out", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "dht": - { - "appenders": - [ - "dht", - "out", - "errors" - ], - "level": "debug", - "enableCallStack": true - } - } -} diff --git a/dht-node/package.json b/dht-node/package.json index ac51035eb..a973f7087 100644 --- a/dht-node/package.json +++ b/dht-node/package.json @@ -1,6 +1,6 @@ { "name": "dht-node", - "version": "2.5.2", + "version": "2.6.0", "description": "Gradido dht-node module", "main": "src/index.ts", "repository": "https://github.com/gradido/gradido/", @@ -10,20 +10,27 @@ "scripts": { "build": "tsx esbuild.config.ts", "start": "cross-env TZ=UTC NODE_ENV=production node build/index.js", - "dev": "cross-env TZ=UTC tsx watch src/index.ts", + "dev": "cross-env TZ=UTC nodemon -w src --ext ts,json -r tsconfig-paths/register src/index.ts", + "devFast": "cross-env TZ=UTC tsx src/index.ts", "typecheck": "tsc --noEmit", "lint": "biome check --error-on-warnings .", "lint:fix": "biome check --error-on-warnings . --write", - "test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_dht jest --runInBand --forceExit --detectOpenHandles" + "test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_dht jest --verbose --runInBand --forceExit --detectOpenHandles", + "test:debug": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_dht node --inspect-brk node_modules/.bin/jest --bail --runInBand --forceExit --detectOpenHandles", + "test:coverage": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_dht jest --coverage --runInBand --forceExit --detectOpenHandles" }, "dependencies": { + "cross-env": "^7.0.3", "dht-rpc": "6.18.1", - "sodium-universal": "4.0.1", - "cross-env": "^7.0.3" + "sodium-universal": "4.0.1" }, "devDependencies": { - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@hyperswarm/dht": "6.5.1", + "@swc/cli": "^0.7.3", + "@swc/core": "^1.11.24", + "@swc/helpers": "^0.5.17", + "@swc/jest": "^0.2.38", "@types/dotenv": "^8.2.3", "@types/jest": "27.5.1", "@types/joi": "^17.2.3", @@ -36,7 +43,9 @@ "jest": "27.5.1", "joi": "^17.13.3", "log4js": "^6.9.1", + "nodemon": "^2.0.7", "prettier": "^2.8.8", + "source-map-support": "^0.5.21", "ts-jest": "27.1.4", "tsx": "^4.19.4", "typeorm": "^0.3.22", diff --git a/dht-node/src/config/const.ts b/dht-node/src/config/const.ts new file mode 100644 index 000000000..47350c075 --- /dev/null +++ b/dht-node/src/config/const.ts @@ -0,0 +1 @@ +export const LOG4JS_BASE_CATEGORY_NAME = 'dht' diff --git a/dht-node/src/config/index.ts b/dht-node/src/config/index.ts index 14b4a789b..7f403ef26 100644 --- a/dht-node/src/config/index.ts +++ b/dht-node/src/config/index.ts @@ -1,38 +1,22 @@ import { validate } from 'config-schema' -import { latestDbVersion } from 'database' +import type { LogLevel } from 'config-schema' import dotenv from 'dotenv' - import { schema } from './schema' dotenv.config() -const constants = { - DB_VERSION: latestDbVersion, - LOG4JS_CONFIG: 'log4js-config.json', +const logging = { + LOG4JS_CONFIG: process.env.LOG4JS_CONFIG ?? 'log4js-config.json', // default log level on production should be info - LOG_LEVEL: process.env.LOG_LEVEL ?? 'info', + // log level for default log4js-config.json, don't change existing log4js-config.json + LOG_LEVEL: (process.env.LOG_LEVEL ?? 'info') as LogLevel, + LOG_FILES_BASE_PATH: process.env.LOG_FILES_BASE_PATH ?? '../logs/dht-node', } const server = { PRODUCTION: process.env.NODE_ENV === 'production', } -const database = { - DB_CONNECT_RETRY_COUNT: process.env.DB_CONNECT_RETRY_COUNT - ? Number.parseInt(process.env.DB_CONNECT_RETRY_COUNT) - : 15, - DB_CONNECT_RETRY_DELAY_MS: process.env.DB_CONNECT_RETRY_DELAY_MS - ? Number.parseInt(process.env.DB_CONNECT_RETRY_DELAY_MS) - : 500, - DB_HOST: process.env.DB_HOST ?? 'localhost', - DB_PORT: process.env.DB_PORT ? Number.parseInt(process.env.DB_PORT) : 3306, - DB_USER: process.env.DB_USER ?? 'root', - DB_PASSWORD: process.env.DB_PASSWORD ?? '', - DB_DATABASE: process.env.DB_DATABASE ?? 'gradido_community', - TYPEORM_LOGGING_RELATIVE_PATH: - process.env.TYPEORM_LOGGING_RELATIVE_PATH ?? 'typeorm.dht-node.log', -} - const community = { COMMUNITY_NAME: process.env.COMMUNITY_NAME ?? 'Gradido Entwicklung', COMMUNITY_DESCRIPTION: @@ -51,9 +35,8 @@ const federation = { } export const CONFIG = { - ...constants, + ...logging, ...server, - ...database, ...community, ...federation, } diff --git a/dht-node/src/config/schema.ts b/dht-node/src/config/schema.ts index 67dac73a4..a205a51f3 100644 --- a/dht-node/src/config/schema.ts +++ b/dht-node/src/config/schema.ts @@ -1,38 +1,22 @@ import { COMMUNITY_DESCRIPTION, COMMUNITY_NAME, - DB_CONNECT_RETRY_COUNT, - DB_CONNECT_RETRY_DELAY_MS, - DB_DATABASE, - DB_HOST, - DB_PASSWORD, - DB_PORT, - DB_USER, - DB_VERSION, LOG4JS_CONFIG, + LOG_FILES_BASE_PATH, LOG_LEVEL, NODE_ENV, PRODUCTION, - TYPEORM_LOGGING_RELATIVE_PATH, } from 'config-schema' import Joi from 'joi' export const schema = Joi.object({ COMMUNITY_NAME, COMMUNITY_DESCRIPTION, - DB_DATABASE, - DB_CONNECT_RETRY_COUNT, - DB_CONNECT_RETRY_DELAY_MS, - DB_HOST, - DB_PASSWORD, - DB_PORT, - DB_USER, - DB_VERSION, LOG4JS_CONFIG, + LOG_FILES_BASE_PATH, LOG_LEVEL, NODE_ENV, PRODUCTION, - TYPEORM_LOGGING_RELATIVE_PATH, FEDERATION_DHT_TOPIC: Joi.string() .default('GRADIDO_HUB') diff --git a/dht-node/src/dht_node/index.test.ts b/dht-node/src/dht_node/index.test.ts index 5ffc3cb5f..20cc52057 100644 --- a/dht-node/src/dht_node/index.test.ts +++ b/dht-node/src/dht_node/index.test.ts @@ -3,11 +3,11 @@ import { Community as DbCommunity, FederatedCommunity as DbFederatedCommunity } import { validate as validateUUID, version as versionUUID } from 'uuid' import { cleanDB, testEnvironment } from '@test/helpers' -import { logger } from '@test/testSetup' +import { clearLogs, getLogger, printLogs } from 'config-schema/test/testSetup' import { CONFIG } from '@/config' -import { startDHT } from './index' +import { LOG_CATEGORY_DHT_NODE, startDHT } from './index' CONFIG.FEDERATION_DHT_SEED = '64ebcb0e3ad547848fef4197c6e2332f' CONFIG.FEDERATION_COMMUNITY_APIS = '1_0,1_1,2_0' @@ -21,6 +21,8 @@ const keyPairMock = { secretKey: Buffer.from('secretKey'), } +const logger = getLogger(LOG_CATEGORY_DHT_NODE) + const serverListenSpy = jest.fn() const serverEventMocks: { [key: string]: any } = {} @@ -112,6 +114,13 @@ describe('federation', () => { jest.useFakeTimers() }) + afterEach(() => { + // print logs which where captured during test + // printLogs() + // clean logs after, else they will be printed in next test again + // cleanLogs() + }) + describe('call startDHT', () => { const hashSpy = jest.spyOn(DHT, 'hash') const keyPairSpy = jest.spyOn(DHT, 'keyPair') @@ -349,6 +358,7 @@ describe('federation', () => { let jsonArray: any[] let result: DbFederatedCommunity[] = [] beforeAll(async () => { + clearLogs() jest.clearAllMocks() jsonArray = [ { @@ -364,6 +374,7 @@ describe('federation', () => { ] await socketEventMocks.data(Buffer.from(JSON.stringify(jsonArray))) result = await DbFederatedCommunity.find({ where: { foreign: true } }) + printLogs() }) afterAll(async () => { diff --git a/dht-node/src/dht_node/index.ts b/dht-node/src/dht_node/index.ts index 69cf86681..dc45b15ed 100644 --- a/dht-node/src/dht_node/index.ts +++ b/dht-node/src/dht_node/index.ts @@ -7,8 +7,9 @@ import { import { v4 as uuidv4 } from 'uuid' import { CONFIG } from '@/config' -import { logger } from '@/server/logger' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' +import { getLogger } from 'log4js' import { ApiVersionType } from './ApiVersionType' const KEY_SECRET_SEEDBYTES = 32 @@ -17,6 +18,8 @@ const POLLTIME = 20000 const SUCCESSTIME = 120000 const ERRORTIME = 240000 const ANNOUNCETIME = 30000 +export const LOG_CATEGORY_DHT_NODE = `${LOG4JS_BASE_CATEGORY_NAME}.dht_node` +const logger = getLogger(LOG_CATEGORY_DHT_NODE) type CommunityApi = { api: string @@ -57,7 +60,6 @@ export const startDHT = async (topic: string): Promise => { server.on('connection', function (socket: any) { logger.info(`server on... with Remote public key: ${socket.remotePublicKey.toString('hex')}`) - socket.on('data', async (data: Buffer) => { try { if (data.length > 1141) { @@ -82,7 +84,6 @@ export const startDHT = async (topic: string): Promise => { ) return } - for (const recApiVersion of recApiVersions) { if ( !recApiVersion.api || @@ -111,7 +112,6 @@ export const startDHT = async (topic: string): Promise => { publicKey: socket.remotePublicKey, lastAnnouncedAt: new Date(), } - logger.debug(`upsert with variables=${JSON.stringify(variables)}`) // this will NOT update the updatedAt column, to distingue between a normal update and the last announcement await DbFederatedCommunity.createQueryBuilder() .insert() diff --git a/dht-node/src/index.ts b/dht-node/src/index.ts index e7058f152..36c866eaa 100644 --- a/dht-node/src/index.ts +++ b/dht-node/src/index.ts @@ -1,12 +1,22 @@ +import 'source-map-support/register' import { startDHT } from '@/dht_node/index' -import { CONFIG } from './config' -import { logger } from './server/logger' -import { checkDBVersionUntil } from './typeorm/DBVersion' +import { CONFIG } from '@/config' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' +import { defaultCategory, initLogger } from 'config-schema' +import { AppDatabase } from 'database' +import { getLogger } from 'log4js' async function main() { + // init logger + initLogger( + [defaultCategory(LOG4JS_BASE_CATEGORY_NAME, CONFIG.LOG_LEVEL)], + CONFIG.LOG_FILES_BASE_PATH, + CONFIG.LOG4JS_CONFIG, + ) + const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}`) // open mysql connection - await checkDBVersionUntil(CONFIG.DB_CONNECT_RETRY_COUNT, CONFIG.DB_CONNECT_RETRY_DELAY_MS) + await AppDatabase.getInstance().init() logger.debug(`dhtseed set by CONFIG.FEDERATION_DHT_SEED=${CONFIG.FEDERATION_DHT_SEED}`) logger.info( `starting Federation on ${CONFIG.FEDERATION_DHT_TOPIC} ${ diff --git a/dht-node/src/server/logger.ts b/dht-node/src/server/logger.ts deleted file mode 100644 index 99ea8518c..000000000 --- a/dht-node/src/server/logger.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { readFileSync } from 'fs' - -import { configure, getLogger } from 'log4js' - -import { CONFIG } from '@/config' - -const options = JSON.parse(readFileSync(CONFIG.LOG4JS_CONFIG, 'utf-8')) - -options.categories.dht.level = CONFIG.LOG_LEVEL -let filename: string = options.appenders.dht.filename -options.appenders.dht.filename = filename.replace( - 'apiversion-%v', - 'dht-' + CONFIG.FEDERATION_DHT_TOPIC, -) -filename = options.appenders.errorFile.filename - -configure(options) - -const logger = getLogger('dht') - -export { logger } diff --git a/dht-node/src/typeorm/DBVersion.ts b/dht-node/src/typeorm/DBVersion.ts deleted file mode 100644 index be9f0c612..000000000 --- a/dht-node/src/typeorm/DBVersion.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { Migration } from 'database' - -import { logger } from '@/server/logger' - -import { CONFIG } from '@/config' -import { Connection as DbConnection } from 'typeorm' -import { connection as connectionFunc } from './connection' - -async function checkDBVersionUntil(maxRetries: number, delayMs: number): Promise { - for (let attempt = 1; attempt <= maxRetries; attempt++) { - try { - const connection = await connectionFunc() - if (connection?.isInitialized) { - const dbVersion = await checkDBVersion(CONFIG.DB_VERSION) - if (dbVersion) { - logger.info('Database connection and version check succeeded.') - return connection - } - } - } catch (err) { - logger.warn(`Attempt ${attempt}: Waiting for DB...`, err) - } - await new Promise((resolve) => setTimeout(resolve, delayMs)) - } - - logger.fatal( - `Fatal: Could not connect to database or version check failed after ${maxRetries} attempts.`, - ) - throw new Error('Fatal: Database not ready.') -} - -const getDBVersion = async (): Promise => { - try { - const [dbVersion] = await Migration.find({ order: { version: 'DESC' }, take: 1 }) - return dbVersion ? dbVersion.fileName : null - } catch (error) { - logger.error(error) - return null - } -} - -const checkDBVersion = async (DB_VERSION: string): Promise => { - const dbVersion = await getDBVersion() - if (!dbVersion || dbVersion.indexOf(DB_VERSION) === -1) { - logger.error( - `Wrong database version detected - the dht-node requires '${DB_VERSION}' but found '${ - dbVersion || 'None' - }`, - ) - return false - } - return true -} - -export { checkDBVersion, getDBVersion, checkDBVersionUntil } diff --git a/dht-node/src/typeorm/connection.ts b/dht-node/src/typeorm/connection.ts deleted file mode 100644 index 14c6195f9..000000000 --- a/dht-node/src/typeorm/connection.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { CONFIG } from '@/config' -// TODO This is super weird - since the entities are defined in another project they have their own globals. -// We cannot use our connection here, but must use the external typeorm installation -import { entities } from 'database' -import { Connection, FileLogger, createConnection } from 'typeorm' - -export const connection = async (): Promise => { - try { - return createConnection({ - name: 'default', - type: 'mysql', - host: CONFIG.DB_HOST, - port: CONFIG.DB_PORT, - username: CONFIG.DB_USER, - password: CONFIG.DB_PASSWORD, - database: CONFIG.DB_DATABASE, - entities, - synchronize: false, - logging: true, - logger: new FileLogger('all', { - // workaround to let previous path working, because with esbuild the script root path has changed - logPath: '../' + CONFIG.TYPEORM_LOGGING_RELATIVE_PATH, - }), - extra: { - charset: 'utf8mb4_unicode_ci', - }, - }) - } catch (error) { - // biome-ignore lint/suspicious/noConsole: no logger present - console.log(error) - return null - } -} diff --git a/dht-node/test/helpers.ts b/dht-node/test/helpers.ts index 1fd42066a..52b847b4c 100644 --- a/dht-node/test/helpers.ts +++ b/dht-node/test/helpers.ts @@ -1,7 +1,4 @@ -import { entities } from 'database' - -import { checkDBVersionUntil } from '@/typeorm/DBVersion' -import { CONFIG } from '@/config' +import { AppDatabase, entities } from 'database' export const headerPushMock = jest.fn((t) => { context.token = t.value @@ -24,7 +21,9 @@ export const cleanDB = async () => { } export const testEnvironment = async () => { - return { con: await checkDBVersionUntil(CONFIG.DB_CONNECT_RETRY_COUNT, CONFIG.DB_CONNECT_RETRY_DELAY_MS) } + const appDB = AppDatabase.getInstance() + await appDB.init() + return { con: appDB.getDataSource() } } export const resetEntity = async (entity: any) => { diff --git a/dht-node/test/testSetup.ts b/dht-node/test/testSetup.ts deleted file mode 100644 index ff619e95d..000000000 --- a/dht-node/test/testSetup.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { logger } from '@/server/logger' - -jest.setTimeout(1000000) - -jest.mock('@/server/logger', () => { - const originalModule = jest.requireActual('@/server/logger') - return { - __esModule: true, - ...originalModule, - logger: { - addContext: jest.fn(), - trace: jest.fn(), - debug: jest.fn(), - warn: jest.fn(), - info: jest.fn(), - error: jest.fn(), - fatal: jest.fn(), - }, - } -}) - -export { logger } diff --git a/dht-node/tsconfig.json b/dht-node/tsconfig.json index 757d61e02..9628b7c41 100644 --- a/dht-node/tsconfig.json +++ b/dht-node/tsconfig.json @@ -12,7 +12,7 @@ // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */ // "declaration": true, /* Generates corresponding '.d.ts' file. */ // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ - // "sourceMap": true, /* Generates corresponding '.map' file. */ + "sourceMap": true, /* Generates corresponding '.map' file. */ // "outFile": "./", /* Concatenate and emit output to single file. */ "outDir": "./build", /* Redirect output structure to the directory. */ // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ @@ -74,5 +74,8 @@ /* Advanced Options */ "skipLibCheck": true, /* Skip type checking of declaration files. */ "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ + }, + "ts-node": { + "swc": true } } diff --git a/dht-node/turbo.json b/dht-node/turbo.json index f6211afa1..3044e8840 100644 --- a/dht-node/turbo.json +++ b/dht-node/turbo.json @@ -2,10 +2,10 @@ "extends": ["//"], "tasks": { "test": { - "dependsOn": ["database#build", "config-schema#build", "database#up:dht_test"] + "dependsOn": ["config-schema#build", "database#build", "database#up:dht_test"] }, "dev": { - "dependsOn": ["database#up"] + "dependsOn": ["config-schema#build", "database#build", "database#up"] }, "start": { "dependsOn": ["database#up", "build"] diff --git a/dlt-connector/package.json b/dlt-connector/package.json index cbabb26c8..a61158c58 100644 --- a/dlt-connector/package.json +++ b/dlt-connector/package.json @@ -1,6 +1,6 @@ { "name": "gradido-dlt-connector", - "version": "2.5.2", + "version": "2.6.0", "description": "Gradido DLT-Connector", "main": "src/index.ts", "repository": "https://github.com/gradido/gradido/", diff --git a/federation/Dockerfile b/federation/Dockerfile index 070fe3e2a..a8fcfd769 100644 --- a/federation/Dockerfile +++ b/federation/Dockerfile @@ -115,8 +115,5 @@ COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/federation/build/index.js ./ # add node_modules from production_node_modules COPY --chown=app:app --from=production-node-modules ${DOCKER_WORKDIR}/node_modules ./node_modules -# Copy log4js-config.json to provide log configuration -COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/federation/log4js-config.json ./log4js-config.json - # Run command CMD ["node", "index.js"] \ No newline at end of file diff --git a/federation/esbuild.config.ts b/federation/esbuild.config.ts index b4154f008..2ef5f88e5 100644 --- a/federation/esbuild.config.ts +++ b/federation/esbuild.config.ts @@ -12,4 +12,5 @@ build({ external: ['sodium-native'], plugins: [esbuildDecorators()], minify: true, + sourcemap: true, }) \ No newline at end of file diff --git a/federation/jest.config.js b/federation/jest.config.js index 44ddf9bf5..9766c8848 100644 --- a/federation/jest.config.js +++ b/federation/jest.config.js @@ -2,14 +2,14 @@ module.exports = { verbose: true, preset: 'ts-jest', - collectCoverage: true, + collectCoverage: false, collectCoverageFrom: ['src/**/*.ts', '!**/node_modules/**', '!src/seeds/**', '!build/**'], coverageThreshold: { global: { lines: 68, }, }, - setupFiles: ['/test/testSetup.ts'], + setupFiles: ['config-schema/test/testSetup.ts'], setupFilesAfterEnv: [], modulePathIgnorePatterns: ['/build/'], moduleNameMapper: { diff --git a/federation/log4js-config.json b/federation/log4js-config.json deleted file mode 100644 index e9336f83c..000000000 --- a/federation/log4js-config.json +++ /dev/null @@ -1,151 +0,0 @@ -{ - "appenders": - { - "access": - { - "type": "dateFile", - "filename": "../logs/federation/access-%p.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "apollo": - { - "type": "dateFile", - "filename": "../logs/federation/apollo-%p.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "backend": - { - "type": "dateFile", - "filename": "../logs/federation/backend-%p.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "federation": - { - "type": "dateFile", - "filename": "../logs/federation/apiversion-%v-%p.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "errorFile": - { - "type": "dateFile", - "filename": "../logs/federation/errors-%p.log", - "pattern": "yyyy-MM-dd", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - }, - "compress": true, - "keepFileExt" : true, - "fileNameSep" : "_", - "numBackups" : 30 - }, - "errors": - { - "type": "logLevelFilter", - "level": "error", - "appender": "errorFile" - }, - "out": - { - "type": "stdout", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - } - }, - "apolloOut": - { - "type": "stdout", - "layout": - { - "type": "pattern", "pattern": "%d{ISO8601} %p %c [%X{user}] [%f : %l] - %m" - } - } - }, - "categories": - { - "default": - { - "appenders": - [ - "out", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "apollo": - { - "appenders": - [ - "apollo", - "apolloOut", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "backend": - { - "appenders": - [ - "backend", - "out", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "federation": - { - "appenders": - [ - "federation", - "out", - "errors" - ], - "level": "debug", - "enableCallStack": true - }, - "http": - { - "appenders": - [ - "access" - ], - "level": "info" - } - } -} diff --git a/federation/package.json b/federation/package.json index ca1105e94..3e15b8e14 100644 --- a/federation/package.json +++ b/federation/package.json @@ -1,12 +1,11 @@ { "name": "federation", - "version": "2.5.2", + "version": "2.6.0", "description": "Gradido federation module providing Gradido-Hub-Federation and versioned API for inter community communication", "main": "src/index.ts", "repository": "https://github.com/gradido/gradido/federation", "author": "Gradido Academy - https://www.gradido.net", "license": "Apache-2.0", - "private": false, "scripts": { "build": "ts-node ./esbuild.config.ts", @@ -16,6 +15,8 @@ "dev:bun": "cross-env TZ=UTC bun --hot src/index.ts", "typecheck": "tsc --noEmit", "test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_federation jest --runInBand --forceExit --detectOpenHandles", + "test:debug": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_federation node --inspect-brk node_modules/.bin/jest --bail --runInBand --forceExit --detectOpenHandles", + "test:coverage": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_federation jest --coverage --runInBand --forceExit --detectOpenHandles", "lint": "biome check --error-on-warnings .", "lint:fix": "biome check --error-on-warnings . --write" }, @@ -25,7 +26,7 @@ }, "devDependencies": { "@anatine/esbuild-decorators": "^0.2.19", - "@biomejs/biome": "1.9.4", + "@biomejs/biome": "2.0.0", "@swc/cli": "^0.7.3", "@swc/core": "^1.11.24", "@swc/helpers": "^0.5.17", @@ -58,15 +59,18 @@ "nodemon": "^2.0.7", "prettier": "^3.5.3", "reflect-metadata": "^0.1.13", + "source-map-support": "^0.5.21", "ts-jest": "27.0.5", "tsconfig-paths": "^4.1.1", "type-graphql": "^1.1.1", - "typeorm": "^0.3.16", + "typeorm": "^0.3.22", "typescript": "^4.9.5", "uuid": "8.3.2" }, "nodemonConfig": { - "ignore": ["**/*.test.ts"] + "ignore": [ + "**/*.test.ts" + ] }, "engines": { "node": ">=18" diff --git a/federation/src/client/1_0/AuthenticationClient.ts b/federation/src/client/1_0/AuthenticationClient.ts index 2aa7b55d3..8535e06ab 100644 --- a/federation/src/client/1_0/AuthenticationClient.ts +++ b/federation/src/client/1_0/AuthenticationClient.ts @@ -1,12 +1,15 @@ -import { federationLogger as logger } from '@/server/logger' import { FederatedCommunity as DbFederatedCommunity } from 'database' import { GraphQLClient } from 'graphql-request' +import { getLogger } from 'log4js' import { AuthenticationArgs } from '@/graphql/api/1_0/model/AuthenticationArgs' import { OpenConnectionCallbackArgs } from '@/graphql/api/1_0/model/OpenConnectionCallbackArgs' +import { LOG4JS_CLIENT_1_0_CATEGORY_NAME } from '.' import { authenticate } from './query/authenticate' import { openConnectionCallback } from './query/openConnectionCallback' +const logger = getLogger(`${LOG4JS_CLIENT_1_0_CATEGORY_NAME}.AuthenticationClient`) + export class AuthenticationClient { dbCom: DbFederatedCommunity endpoint: string @@ -27,41 +30,35 @@ export class AuthenticationClient { } async openConnectionCallback(args: OpenConnectionCallbackArgs): Promise { - logger.debug('Authentication: openConnectionCallback with endpoint', this.endpoint, args) + logger.debug('openConnectionCallback with endpoint', this.endpoint, args) try { const { data } = await this.client.rawRequest(openConnectionCallback, { args }) if (data && data.openConnectionCallback) { - logger.warn( - 'Authentication: openConnectionCallback without response data from endpoint', - this.endpoint, - ) + logger.warn('openConnectionCallback without response data from endpoint', this.endpoint) return false } - logger.debug( - 'Authentication: openConnectionCallback successfully started with endpoint', - this.endpoint, - ) + logger.debug('openConnectionCallback successfully started with endpoint', this.endpoint) return true } catch (err) { - logger.error('Authentication: error on openConnectionCallback', err) + logger.error('error on openConnectionCallback', err) } return false } async authenticate(args: AuthenticationArgs): Promise { - logger.debug('Authentication: authenticate with endpoint=', this.endpoint) + logger.debug('authenticate with endpoint=', this.endpoint) try { const { data } = await this.client.rawRequest(authenticate, { args }) - logger.debug('Authentication: after authenticate: data:', data) + logger.debug('after authenticate: data:', data) const authUuid: string = data?.authenticate if (authUuid) { - logger.debug('Authentication: received authenticated uuid', authUuid) + logger.debug('received authenticated uuid', authUuid) return authUuid } } catch (err) { - logger.error('Authentication: authenticate failed', { + logger.error('authenticate failed', { endpoint: this.endpoint, err, }) diff --git a/federation/src/client/1_0/index.ts b/federation/src/client/1_0/index.ts new file mode 100644 index 000000000..18e8aaeed --- /dev/null +++ b/federation/src/client/1_0/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' + +export const LOG4JS_CLIENT_1_0_CATEGORY_NAME = `${LOG4JS_BASE_CATEGORY_NAME}.client.1_0` diff --git a/federation/src/config/const.ts b/federation/src/config/const.ts new file mode 100644 index 000000000..4d0e8c1db --- /dev/null +++ b/federation/src/config/const.ts @@ -0,0 +1 @@ +export const LOG4JS_BASE_CATEGORY_NAME = 'federation' diff --git a/federation/src/config/index.ts b/federation/src/config/index.ts index 3c759d702..6e9d5437e 100644 --- a/federation/src/config/index.ts +++ b/federation/src/config/index.ts @@ -1,9 +1,9 @@ -import { latestDbVersion } from 'database' // ATTENTION: DO NOT PUT ANY SECRETS IN HERE (or the .env) import { Decimal } from 'decimal.js-light' import dotenv from 'dotenv' import { validate } from 'config-schema' +import type { LogLevel } from 'config-schema' import { schema } from './schema' @@ -14,12 +14,12 @@ Decimal.set({ rounding: Decimal.ROUND_HALF_UP, }) -const constants = { - DB_VERSION: latestDbVersion, - DECAY_START_TIME: new Date('2021-05-13 17:46:31-0000'), // GMT+0 - LOG4JS_CONFIG: 'log4js-config.json', +const logging = { + LOG4JS_CONFIG_PLACEHOLDER: process.env.LOG4JS_CONFIG_PLACEHOLDER ?? 'log4js-config-%v.json', // default log level on production should be info - LOG_LEVEL: process.env.LOG_LEVEL ?? 'info', + // log level for default log4js-config.json, don't change existing log4js-config.json + LOG_LEVEL: (process.env.LOG_LEVEL ?? 'info') as LogLevel, + LOG_FILES_BASE_PATH: process.env.LOG_FILES_BASE_PATH ?? '../logs/federation', } const server = { @@ -29,20 +29,6 @@ const server = { // GDT_API_URL: process.env.GDT_API_URL || 'https://gdt.gradido.net', PRODUCTION: process.env.NODE_ENV === 'production', } -const database = { - DB_CONNECT_RETRY_COUNT: process.env.DB_CONNECT_RETRY_COUNT - ? Number.parseInt(process.env.DB_CONNECT_RETRY_COUNT) - : 15, - DB_CONNECT_RETRY_DELAY_MS: process.env.DB_CONNECT_RETRY_DELAY_MS - ? Number.parseInt(process.env.DB_CONNECT_RETRY_DELAY_MS) - : 500, - DB_HOST: process.env.DB_HOST ?? 'localhost', - DB_PORT: process.env.DB_PORT ? Number.parseInt(process.env.DB_PORT) : 3306, - DB_USER: process.env.DB_USER ?? 'root', - DB_PASSWORD: process.env.DB_PASSWORD ?? '', - DB_DATABASE: process.env.DB_DATABASE ?? 'gradido_community', - TYPEORM_LOGGING_RELATIVE_PATH: process.env.TYPEORM_LOGGING_RELATIVE_PATH ?? 'typeorm.backend.log', -} const COMMUNITY_HOST = process.env.COMMUNITY_HOST ?? 'localhost' const URL_PROTOCOL = process.env.URL_PROTOCOL ?? 'http' @@ -60,11 +46,8 @@ const federation = { } export const CONFIG = { - ...constants, + ...logging, ...server, - ...database, - // ...community, - // ...eventProtocol, ...federation, } diff --git a/federation/src/config/schema.ts b/federation/src/config/schema.ts index 812728cff..82e05d3bc 100644 --- a/federation/src/config/schema.ts +++ b/federation/src/config/schema.ts @@ -1,38 +1,22 @@ import { - DB_CONNECT_RETRY_COUNT, - DB_CONNECT_RETRY_DELAY_MS, - DB_DATABASE, - DB_HOST, - DB_PASSWORD, - DB_PORT, - DB_USER, - DB_VERSION, DECAY_START_TIME, GRAPHIQL, - LOG4JS_CONFIG, + LOG4JS_CONFIG_PLACEHOLDER, + LOG_FILES_BASE_PATH, LOG_LEVEL, NODE_ENV, PRODUCTION, - TYPEORM_LOGGING_RELATIVE_PATH, } from 'config-schema' import Joi from 'joi' export const schema = Joi.object({ - DB_DATABASE, - DB_CONNECT_RETRY_COUNT, - DB_CONNECT_RETRY_DELAY_MS, - DB_HOST, - DB_PASSWORD, - DB_PORT, - DB_USER, - DB_VERSION, DECAY_START_TIME, GRAPHIQL, - LOG4JS_CONFIG, + LOG4JS_CONFIG_PLACEHOLDER, + LOG_FILES_BASE_PATH, LOG_LEVEL, NODE_ENV, PRODUCTION, - TYPEORM_LOGGING_RELATIVE_PATH, FEDERATION_API: Joi.string() .valid('1_0', '1_1') diff --git a/federation/src/graphql/api/1_0/index.ts b/federation/src/graphql/api/1_0/index.ts new file mode 100644 index 000000000..3a995fd28 --- /dev/null +++ b/federation/src/graphql/api/1_0/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_API_CATEGORY_NAME } from '@/graphql/api' + +export const LOG4JS_API_1_0_CATEGORY_NAME = `${LOG4JS_API_CATEGORY_NAME}.1_0` diff --git a/federation/src/graphql/api/1_0/resolver/AuthenticationResolver.ts b/federation/src/graphql/api/1_0/resolver/AuthenticationResolver.ts index 2479bb8be..89e891b34 100644 --- a/federation/src/graphql/api/1_0/resolver/AuthenticationResolver.ts +++ b/federation/src/graphql/api/1_0/resolver/AuthenticationResolver.ts @@ -1,18 +1,21 @@ import { CONFIG } from '@/config' import { LogError } from '@/server/LogError' -import { federationLogger as logger } from '@/server/logger' import { CommunityLoggingView, Community as DbCommunity, FederatedCommunity as DbFedCommunity, FederatedCommunityLoggingView, } from 'database' +import { getLogger } from 'log4js' import { Arg, Mutation, Resolver } from 'type-graphql' +import { LOG4JS_RESOLVER_1_0_CATEGORY_NAME } from '.' import { AuthenticationArgs } from '../model/AuthenticationArgs' import { OpenConnectionArgs } from '../model/OpenConnectionArgs' import { OpenConnectionCallbackArgs } from '../model/OpenConnectionCallbackArgs' import { startAuthentication, startOpenConnectionCallback } from '../util/authenticateCommunity' +const logger = getLogger(`${LOG4JS_RESOLVER_1_0_CATEGORY_NAME}.AuthenticationResolver`) + @Resolver() export class AuthenticationResolver { @Mutation(() => Boolean) @@ -21,7 +24,7 @@ export class AuthenticationResolver { args: OpenConnectionArgs, ): Promise { const pubKeyBuf = Buffer.from(args.publicKey, 'hex') - logger.debug(`Authentication: openConnection() via apiVersion=1_0:`, args) + logger.debug(`openConnection() via apiVersion=1_0:`, args) // first find with args.publicKey the community 'comA', which starts openConnection request const comA = await DbCommunity.findOneBy({ @@ -30,7 +33,7 @@ export class AuthenticationResolver { if (!comA) { throw new LogError(`unknown requesting community with publicKey`, pubKeyBuf.toString('hex')) } - logger.debug(`Authentication: found requestedCom:`, new CommunityLoggingView(comA)) + logger.debug(`found requestedCom:`, new CommunityLoggingView(comA)) // biome-ignore lint/complexity/noVoid: no await to respond immediately and invoke callback-request asynchronously void startOpenConnectionCallback(args, comA, CONFIG.FEDERATION_API) return true @@ -41,17 +44,17 @@ export class AuthenticationResolver { @Arg('data') args: OpenConnectionCallbackArgs, ): Promise { - logger.debug(`Authentication: openConnectionCallback() via apiVersion=1_0 ...`, args) + logger.debug(`openConnectionCallback() via apiVersion=1_0 ...`, args) // TODO decrypt args.url with homeCom.privateKey and verify signing with callbackFedCom.publicKey const endPoint = args.url.slice(0, args.url.lastIndexOf('/') + 1) const apiVersion = args.url.slice(args.url.lastIndexOf('/') + 1, args.url.length) - logger.debug(`Authentication: search fedComB per:`, endPoint, apiVersion) + logger.debug(`search fedComB per:`, endPoint, apiVersion) const fedComB = await DbFedCommunity.findOneBy({ endPoint, apiVersion }) if (!fedComB) { throw new LogError(`unknown callback community with url`, args.url) } logger.debug( - `Authentication: found fedComB and start authentication:`, + `found fedComB and start authentication:`, new FederatedCommunityLoggingView(fedComB), ) // biome-ignore lint/complexity/noVoid: no await to respond immediately and invoke authenticate-request asynchronously @@ -64,18 +67,15 @@ export class AuthenticationResolver { @Arg('data') args: AuthenticationArgs, ): Promise { - logger.debug(`Authentication: authenticate() via apiVersion=1_0 ...`, args) + logger.debug(`authenticate() via apiVersion=1_0 ...`, args) const authCom = await DbCommunity.findOneByOrFail({ communityUuid: args.oneTimeCode }) - logger.debug('Authentication: found authCom:', new CommunityLoggingView(authCom)) + logger.debug('found authCom:', new CommunityLoggingView(authCom)) if (authCom) { // TODO decrypt args.uuid with authCom.publicKey authCom.communityUuid = args.uuid authCom.authenticatedAt = new Date() await DbCommunity.save(authCom) - logger.debug( - 'Authentication: store authCom.uuid successfully:', - new CommunityLoggingView(authCom), - ) + logger.debug('store authCom.uuid successfully:', new CommunityLoggingView(authCom)) const homeCom = await DbCommunity.findOneByOrFail({ foreign: false }) // TODO encrypt homeCom.uuid with homeCom.privateKey if (homeCom.communityUuid) { diff --git a/federation/src/graphql/api/1_0/resolver/PublicCommunityInfoResolver.test.ts b/federation/src/graphql/api/1_0/resolver/PublicCommunityInfoResolver.test.ts index af3b022e5..e7bf40bed 100644 --- a/federation/src/graphql/api/1_0/resolver/PublicCommunityInfoResolver.test.ts +++ b/federation/src/graphql/api/1_0/resolver/PublicCommunityInfoResolver.test.ts @@ -1,18 +1,20 @@ import { CONFIG } from '@/config' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { createServer } from '@/server/createServer' import { createTestClient } from 'apollo-server-testing' import { Community as DbCommunity } from 'database' -import { Connection } from 'typeorm' +import { getLogger } from 'log4js' +import { DataSource } from 'typeorm' let query: any // to do: We need a setup for the tests that closes the connection -let con: Connection +let con: DataSource CONFIG.FEDERATION_API = '1_0' beforeAll(async () => { - const server = await createServer() + const server = await createServer(getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.apollo`)) con = server.con query = createTestClient(server.apollo).query DbCommunity.clear() diff --git a/federation/src/graphql/api/1_0/resolver/PublicCommunityInfoResolver.ts b/federation/src/graphql/api/1_0/resolver/PublicCommunityInfoResolver.ts index e019f27f6..9bd2898f3 100644 --- a/federation/src/graphql/api/1_0/resolver/PublicCommunityInfoResolver.ts +++ b/federation/src/graphql/api/1_0/resolver/PublicCommunityInfoResolver.ts @@ -1,9 +1,12 @@ -import { federationLogger as logger } from '@/server/logger' import { Community as DbCommunity } from 'database' +import { getLogger } from 'log4js' import { Query, Resolver } from 'type-graphql' +import { LOG4JS_RESOLVER_1_0_CATEGORY_NAME } from '.' import { GetPublicCommunityInfoResultLoggingView } from '../logger/GetPublicCommunityInfoResultLogging.view' import { GetPublicCommunityInfoResult } from '../model/GetPublicCommunityInfoResult' +const logger = getLogger(`${LOG4JS_RESOLVER_1_0_CATEGORY_NAME}.PublicCommunityInfoResolver`) + @Resolver() export class PublicCommunityInfoResolver { @Query(() => GetPublicCommunityInfoResult) diff --git a/federation/src/graphql/api/1_0/resolver/PublicKeyResolver.test.ts b/federation/src/graphql/api/1_0/resolver/PublicKeyResolver.test.ts index 894570972..13ec75dfc 100644 --- a/federation/src/graphql/api/1_0/resolver/PublicKeyResolver.test.ts +++ b/federation/src/graphql/api/1_0/resolver/PublicKeyResolver.test.ts @@ -1,7 +1,9 @@ import { CONFIG } from '@/config' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { createServer } from '@/server/createServer' import { createTestClient } from 'apollo-server-testing' import { FederatedCommunity as DbFederatedCommunity } from 'database' +import { getLogger } from 'log4js' let query: any @@ -11,7 +13,7 @@ let con: any CONFIG.FEDERATION_API = '1_0' beforeAll(async () => { - const server = await createServer() + const server = await createServer(getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.apollo`)) con = server.con query = createTestClient(server.apollo).query DbFederatedCommunity.clear() diff --git a/federation/src/graphql/api/1_0/resolver/PublicKeyResolver.ts b/federation/src/graphql/api/1_0/resolver/PublicKeyResolver.ts index 7c4d734c1..f849941e0 100644 --- a/federation/src/graphql/api/1_0/resolver/PublicKeyResolver.ts +++ b/federation/src/graphql/api/1_0/resolver/PublicKeyResolver.ts @@ -1,8 +1,11 @@ -import { federationLogger as logger } from '@/server/logger' import { FederatedCommunity as DbFederatedCommunity } from 'database' +import { getLogger } from 'log4js' import { Query, Resolver } from 'type-graphql' +import { LOG4JS_RESOLVER_1_0_CATEGORY_NAME } from '.' import { GetPublicKeyResult } from '../model/GetPublicKeyResult' +const logger = getLogger(`${LOG4JS_RESOLVER_1_0_CATEGORY_NAME}.PublicKeyResolver`) + @Resolver() export class PublicKeyResolver { @Query(() => GetPublicKeyResult) diff --git a/federation/src/graphql/api/1_0/resolver/SendCoinsResolver.test.ts b/federation/src/graphql/api/1_0/resolver/SendCoinsResolver.test.ts index 8129ee702..d41ea1936 100644 --- a/federation/src/graphql/api/1_0/resolver/SendCoinsResolver.test.ts +++ b/federation/src/graphql/api/1_0/resolver/SendCoinsResolver.test.ts @@ -1,12 +1,13 @@ import { CONFIG } from '@/config' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { fullName } from '@/graphql/util/fullName' import { cleanDB, testEnvironment } from '@test/helpers' -import { logger } from '@test/testSetup' import { ApolloServerTestClient } from 'apollo-server-testing' import { Community as DbCommunity, User as DbUser, UserContact as DbUserContact } from 'database' import Decimal from 'decimal.js-light' import { GraphQLError } from 'graphql' -import { Connection } from 'typeorm' +import { getLogger } from 'log4js' +import { DataSource } from 'typeorm' import { SendCoinsArgs } from '../model/SendCoinsArgs' let mutate: ApolloServerTestClient['mutate'] // , con: Connection @@ -15,7 +16,7 @@ let mutate: ApolloServerTestClient['mutate'] // , con: Connection let testEnv: { mutate: ApolloServerTestClient['mutate'] query: ApolloServerTestClient['query'] - con: Connection + con: DataSource } CONFIG.FEDERATION_API = '1_0' @@ -28,7 +29,7 @@ let recipUser: DbUser let recipContact: DbUserContact beforeAll(async () => { - testEnv = await testEnvironment(logger) + testEnv = await testEnvironment(getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.apollo`)) mutate = testEnv.mutate // query = testEnv.query // con = testEnv.con diff --git a/federation/src/graphql/api/1_0/resolver/SendCoinsResolver.ts b/federation/src/graphql/api/1_0/resolver/SendCoinsResolver.ts index 5ade497a4..a6eeef7f8 100644 --- a/federation/src/graphql/api/1_0/resolver/SendCoinsResolver.ts +++ b/federation/src/graphql/api/1_0/resolver/SendCoinsResolver.ts @@ -1,14 +1,15 @@ import { findUserByIdentifier } from '@/graphql/util/findUserByIdentifier' import { fullName } from '@/graphql/util/fullName' import { LogError } from '@/server/LogError' -import { federationLogger as logger } from '@/server/logger' import { Community as DbCommunity, PendingTransaction as DbPendingTransaction, PendingTransactionLoggingView, } from 'database' import Decimal from 'decimal.js-light' +import { getLogger } from 'log4js' import { Arg, Mutation, Resolver } from 'type-graphql' +import { LOG4JS_RESOLVER_1_0_CATEGORY_NAME } from '.' import { PendingTransactionState } from '../enum/PendingTransactionState' import { TransactionTypeId } from '../enum/TransactionTypeId' import { SendCoinsArgsLoggingView } from '../logger/SendCoinsArgsLogging.view' @@ -20,6 +21,8 @@ import { revertSettledReceiveTransaction } from '../util/revertSettledReceiveTra import { settlePendingReceiveTransaction } from '../util/settlePendingReceiveTransaction' import { storeForeignUser } from '../util/storeForeignUser' +const logger = getLogger(`${LOG4JS_RESOLVER_1_0_CATEGORY_NAME}.SendCoinsResolver`) + @Resolver() export class SendCoinsResolver { @Mutation(() => SendCoinsResult) @@ -232,7 +235,7 @@ export class SendCoinsResolver { ) } - logger.debug(`XCom: settlePendingReceiveTransaction()-1_0... successfull`) + logger.debug(`XCom: settlePendingReceiveTransaction()-1_0... successful`) return true } else { logger.debug('XCom: settlePendingReceiveTransaction NOT matching pendingTX for settlement...') diff --git a/federation/src/graphql/api/1_0/resolver/index.ts b/federation/src/graphql/api/1_0/resolver/index.ts new file mode 100644 index 000000000..adec2adb9 --- /dev/null +++ b/federation/src/graphql/api/1_0/resolver/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_API_1_0_CATEGORY_NAME } from '@/graphql/api/1_0' + +export const LOG4JS_RESOLVER_1_0_CATEGORY_NAME = `${LOG4JS_API_1_0_CATEGORY_NAME}.resolver` diff --git a/federation/src/graphql/api/1_0/util/authenticateCommunity.ts b/federation/src/graphql/api/1_0/util/authenticateCommunity.ts index 0004fb5b6..e4d27e47c 100644 --- a/federation/src/graphql/api/1_0/util/authenticateCommunity.ts +++ b/federation/src/graphql/api/1_0/util/authenticateCommunity.ts @@ -1,10 +1,10 @@ -import { federationLogger as logger } from '@/server/logger' import { CommunityLoggingView, Community as DbCommunity, FederatedCommunity as DbFedCommunity, FederatedCommunityLoggingView, } from 'database' +import { getLogger } from 'log4js' import { OpenConnectionArgs } from '../model/OpenConnectionArgs' import { OpenConnectionCallbackArgs } from '../model/OpenConnectionCallbackArgs' @@ -12,14 +12,17 @@ import { AuthenticationClientFactory } from '@/client/AuthenticationClientFactor import { randombytes_random } from 'sodium-native' import { AuthenticationClient as V1_0_AuthenticationClient } from '@/client/1_0/AuthenticationClient' +import { LOG4JS_1_0_UTIL_CATEGORY_NAME } from '.' import { AuthenticationArgs } from '../model/AuthenticationArgs' +const logger = getLogger(`${LOG4JS_1_0_UTIL_CATEGORY_NAME}.authenticateCommunity`) + export async function startOpenConnectionCallback( args: OpenConnectionArgs, comA: DbCommunity, api: string, ): Promise { - logger.debug(`Authentication: startOpenConnectionCallback() with:`, { + logger.debug(`startOpenConnectionCallback() with:`, { args, comA: new CommunityLoggingView(comA), }) @@ -53,13 +56,13 @@ export async function startOpenConnectionCallback( : homeFedCom.endPoint + '/' + homeFedCom.apiVersion logger.debug(`Authentication: start openConnectionCallback with args:`, callbackArgs) if (await client.openConnectionCallback(callbackArgs)) { - logger.debug('Authentication: startOpenConnectionCallback() successful:', callbackArgs) + logger.debug('startOpenConnectionCallback() successful:', callbackArgs) } else { - logger.error('Authentication: startOpenConnectionCallback() failed:', callbackArgs) + logger.error('startOpenConnectionCallback() failed:', callbackArgs) } } } catch (err) { - logger.error('Authentication: error in startOpenConnectionCallback:', err) + logger.error('error in startOpenConnectionCallback:', err) } } @@ -67,7 +70,7 @@ export async function startAuthentication( oneTimeCode: string, fedComB: DbFedCommunity, ): Promise { - logger.debug(`Authentication: startAuthentication()...`, { + logger.debug(`startAuthentication()...`, { oneTimeCode, fedComB: new FederatedCommunityLoggingView(fedComB), }) @@ -84,12 +87,12 @@ export async function startAuthentication( if (homeCom.communityUuid) { authenticationArgs.uuid = homeCom.communityUuid } - logger.debug(`Authentication: invoke authenticate() with:`, authenticationArgs) + logger.debug(`invoke authenticate() with:`, authenticationArgs) const fedComUuid = await client.authenticate(authenticationArgs) - logger.debug(`Authentication: response of authenticate():`, fedComUuid) + logger.debug(`response of authenticate():`, fedComUuid) if (fedComUuid !== null) { logger.debug( - `Authentication: received communityUUid for callbackFedCom:`, + `received communityUUid for callbackFedCom:`, fedComUuid, new FederatedCommunityLoggingView(fedComB), ) @@ -101,15 +104,12 @@ export async function startAuthentication( callbackCom.communityUuid = fedComUuid callbackCom.authenticatedAt = new Date() await DbCommunity.save(callbackCom) - logger.debug( - 'Authentication: Community Authentication successful:', - new CommunityLoggingView(callbackCom), - ) + logger.debug('Community Authentication successful:', new CommunityLoggingView(callbackCom)) } else { - logger.error('Authentication: Community Authentication failed:', authenticationArgs) + logger.error('Community Authentication failed:', authenticationArgs) } } } catch (err) { - logger.error('Authentication: error in startOpenConnectionCallback:', err) + logger.error('error in startAuthentication:', err) } } diff --git a/federation/src/graphql/api/1_0/util/index.ts b/federation/src/graphql/api/1_0/util/index.ts new file mode 100644 index 000000000..109466d5c --- /dev/null +++ b/federation/src/graphql/api/1_0/util/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_API_1_0_CATEGORY_NAME } from '@/graphql/api/1_0' + +export const LOG4JS_1_0_UTIL_CATEGORY_NAME = `${LOG4JS_API_1_0_CATEGORY_NAME}.util` diff --git a/federation/src/graphql/api/1_0/util/revertSettledReceiveTransaction.ts b/federation/src/graphql/api/1_0/util/revertSettledReceiveTransaction.ts index 4d69210ec..cdb8a4563 100644 --- a/federation/src/graphql/api/1_0/util/revertSettledReceiveTransaction.ts +++ b/federation/src/graphql/api/1_0/util/revertSettledReceiveTransaction.ts @@ -1,4 +1,5 @@ import { + AppDatabase, CommunityLoggingView, Community as DbCommunity, PendingTransaction as DbPendingTransaction, @@ -8,16 +9,19 @@ import { UserLoggingView, Transaction as dbTransaction, } from 'database' -import { getConnection } from 'typeorm' import { PendingTransactionState } from '../enum/PendingTransactionState' import { LogError } from '@/server/LogError' -import { federationLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' +import { LOG4JS_1_0_UTIL_CATEGORY_NAME } from '.' import { TRANSACTIONS_LOCK } from '@/graphql/util/TRANSACTIONS_LOCK' import { getLastTransaction } from '@/graphql/util/getLastTransaction' +const db = AppDatabase.getInstance() +const logger = getLogger(`${LOG4JS_1_0_UTIL_CATEGORY_NAME}.revertSettledReceiveTransaction`) + export async function revertSettledReceiveTransaction( homeCom: DbCommunity, receiverUser: DbUser, @@ -26,7 +30,7 @@ export async function revertSettledReceiveTransaction( // TODO: synchronisation with TRANSACTION_LOCK of backend-modul necessary!!! // acquire lock const releaseLock = await TRANSACTIONS_LOCK.acquire() - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() await queryRunner.startTransaction('REPEATABLE READ') logger.debug(`start Transaction for write-access...`) diff --git a/federation/src/graphql/api/1_0/util/settlePendingReceiveTransaction.ts b/federation/src/graphql/api/1_0/util/settlePendingReceiveTransaction.ts index 6a3e6b159..b10d40a03 100644 --- a/federation/src/graphql/api/1_0/util/settlePendingReceiveTransaction.ts +++ b/federation/src/graphql/api/1_0/util/settlePendingReceiveTransaction.ts @@ -1,4 +1,5 @@ import { + AppDatabase, CommunityLoggingView, Community as DbCommunity, PendingTransaction as DbPendingTransaction, @@ -8,17 +9,20 @@ import { UserLoggingView, Transaction as dbTransaction, } from 'database' -import { getConnection } from 'typeorm' import { PendingTransactionState } from '../enum/PendingTransactionState' import { LogError } from '@/server/LogError' -import { federationLogger as logger } from '@/server/logger' import { TRANSACTIONS_LOCK } from '@/graphql/util/TRANSACTIONS_LOCK' import { getLastTransaction } from '@/graphql/util/getLastTransaction' import Decimal from 'decimal.js-light' +import { getLogger } from 'log4js' +import { LOG4JS_1_0_UTIL_CATEGORY_NAME } from '.' import { calculateRecipientBalance } from './calculateRecipientBalance' +const db = AppDatabase.getInstance() +const logger = getLogger(`${LOG4JS_1_0_UTIL_CATEGORY_NAME}.settlePendingReceiveTransaction`) + export async function settlePendingReceiveTransaction( homeCom: DbCommunity, receiverUser: DbUser, @@ -27,7 +31,7 @@ export async function settlePendingReceiveTransaction( // TODO: synchronisation with TRANSACTION_LOCK of backend-modul necessary!!! // acquire lock const releaseLock = await TRANSACTIONS_LOCK.acquire() - const queryRunner = getConnection().createQueryRunner() + const queryRunner = db.getDataSource().createQueryRunner() await queryRunner.connect() await queryRunner.startTransaction('REPEATABLE READ') logger.debug(`start Transaction for write-access...`) diff --git a/federation/src/graphql/api/1_0/util/storeForeignUser.ts b/federation/src/graphql/api/1_0/util/storeForeignUser.ts index d0f05ae1e..c7991f482 100644 --- a/federation/src/graphql/api/1_0/util/storeForeignUser.ts +++ b/federation/src/graphql/api/1_0/util/storeForeignUser.ts @@ -1,9 +1,12 @@ import { User as DbUser, UserLoggingView } from 'database' -import { federationLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' +import { LOG4JS_1_0_UTIL_CATEGORY_NAME } from '.' import { SendCoinsArgsLoggingView } from '../logger/SendCoinsArgsLogging.view' import { SendCoinsArgs } from '../model/SendCoinsArgs' +const logger = getLogger(`${LOG4JS_1_0_UTIL_CATEGORY_NAME}.storeForeignUser`) + export async function storeForeignUser(args: SendCoinsArgs): Promise { if (args.senderCommunityUuid !== null && args.senderUserUuid !== null) { try { diff --git a/federation/src/graphql/api/1_1/resolver/PublicKeyResolver.test.ts b/federation/src/graphql/api/1_1/resolver/PublicKeyResolver.test.ts index 2c2b3bb48..40db6db76 100644 --- a/federation/src/graphql/api/1_1/resolver/PublicKeyResolver.test.ts +++ b/federation/src/graphql/api/1_1/resolver/PublicKeyResolver.test.ts @@ -1,7 +1,9 @@ import { CONFIG } from '@/config' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' import { createServer } from '@/server/createServer' import { createTestClient } from 'apollo-server-testing' import { FederatedCommunity as DbFederatedCommunity } from 'database' +import { getLogger } from 'log4js' let query: any @@ -11,7 +13,7 @@ let con: any CONFIG.FEDERATION_API = '1_1' beforeAll(async () => { - const server = await createServer() + const server = await createServer(getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.apollo`)) con = server.con query = createTestClient(server.apollo).query DbFederatedCommunity.clear() diff --git a/federation/src/graphql/api/1_1/resolver/PublicKeyResolver.ts b/federation/src/graphql/api/1_1/resolver/PublicKeyResolver.ts index 6e621eb59..181ccb78c 100644 --- a/federation/src/graphql/api/1_1/resolver/PublicKeyResolver.ts +++ b/federation/src/graphql/api/1_1/resolver/PublicKeyResolver.ts @@ -1,13 +1,16 @@ -import { federationLogger as logger } from '@/server/logger' import { FederatedCommunity as DbFederatedCommunity } from 'database' +import { getLogger } from 'log4js' import { Query, Resolver } from 'type-graphql' +import { LOG4JS_RESOLVER_1_1_CATEGORY_NAME } from '.' import { GetPublicKeyResult } from '../../1_0/model/GetPublicKeyResult' +const logger = getLogger(`${LOG4JS_RESOLVER_1_1_CATEGORY_NAME}.PublicKeyResolver`) + @Resolver() export class PublicKeyResolver { @Query(() => GetPublicKeyResult) async getPublicKey(): Promise { - logger.debug(`getPublicKey() via apiVersion=1_0 ...`) + logger.debug(`getPublicKey()...`) const homeCom = await DbFederatedCommunity.findOneOrFail({ where: { foreign: false, @@ -15,7 +18,7 @@ export class PublicKeyResolver { }, }) const publicKeyHex = homeCom.publicKey.toString('hex') - logger.debug(`getPublicKey()-1_1... return publicKey=${publicKeyHex}`) + logger.debug(`getPublicKey()... return publicKey=${publicKeyHex}`) return new GetPublicKeyResult(publicKeyHex) } } diff --git a/federation/src/graphql/api/1_1/resolver/index.ts b/federation/src/graphql/api/1_1/resolver/index.ts new file mode 100644 index 000000000..f2b9283eb --- /dev/null +++ b/federation/src/graphql/api/1_1/resolver/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_API_CATEGORY_NAME } from '@/graphql/api' + +export const LOG4JS_RESOLVER_1_1_CATEGORY_NAME = `${LOG4JS_API_CATEGORY_NAME}.1_1.resolver` diff --git a/federation/src/graphql/api/index.ts b/federation/src/graphql/api/index.ts new file mode 100644 index 000000000..b6e810c61 --- /dev/null +++ b/federation/src/graphql/api/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_GRAPHQL_CATEGORY_NAME } from '@/graphql' + +export const LOG4JS_API_CATEGORY_NAME = `${LOG4JS_GRAPHQL_CATEGORY_NAME}.api` diff --git a/federation/src/graphql/api/schema.ts b/federation/src/graphql/api/schema.ts index 661e203d2..00ae50b21 100644 --- a/federation/src/graphql/api/schema.ts +++ b/federation/src/graphql/api/schema.ts @@ -1,12 +1,13 @@ -import { federationLogger as logger } from '@/server/logger' +import { getLogger } from 'log4js' import { NonEmptyArray } from 'type-graphql' +import { LOG4JS_API_CATEGORY_NAME } from '.' // config import { CONFIG } from '../../config' import { getApiResolvers as getApiResolvers_1_0 } from './1_0/schema' import { getApiResolvers as getApiResolvers_1_1 } from './1_1/schema' export const getApiResolvers = (): NonEmptyArray => { - logger.info(`getApiResolvers...${CONFIG.FEDERATION_API}`) + getLogger(LOG4JS_API_CATEGORY_NAME).info(`getApiResolvers...${CONFIG.FEDERATION_API}`) if (CONFIG.FEDERATION_API === '1_0') { return getApiResolvers_1_0() diff --git a/federation/src/graphql/index.ts b/federation/src/graphql/index.ts new file mode 100644 index 000000000..c3b774479 --- /dev/null +++ b/federation/src/graphql/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' + +export const LOG4JS_GRAPHQL_CATEGORY_NAME = `${LOG4JS_BASE_CATEGORY_NAME}.graphql` diff --git a/federation/src/graphql/util/checkTradingLevel.ts b/federation/src/graphql/util/checkTradingLevel.ts index 773e76c03..458970c9f 100644 --- a/federation/src/graphql/util/checkTradingLevel.ts +++ b/federation/src/graphql/util/checkTradingLevel.ts @@ -1,9 +1,12 @@ import { CONFIG } from '@/config' -import { federationLogger as logger } from '@/server/logger' +import { LOG4JS_GRAPHQL_UTIL_CATEGORY_NAME } from '@/graphql/util' import { Community as DbCommunity } from 'database' import { Decimal } from 'decimal.js-light' +import { getLogger } from 'log4js' export async function checkTradingLevel(homeCom: DbCommunity, amount: Decimal): Promise { + const logger = getLogger(`${LOG4JS_GRAPHQL_UTIL_CATEGORY_NAME}.checkTradingLevel`) + const tradingLevel = CONFIG.FEDERATION_TRADING_LEVEL if (homeCom.url !== tradingLevel.RECEIVER_COMMUNITY_URL) { logger.warn( diff --git a/federation/src/graphql/util/decay.ts b/federation/src/graphql/util/decay.ts index 331d3b5b4..9f2908b71 100644 --- a/federation/src/graphql/util/decay.ts +++ b/federation/src/graphql/util/decay.ts @@ -1,13 +1,17 @@ import { Decimal } from 'decimal.js-light' -import { CONFIG } from '@/config' import { LogError } from '@/server/LogError' +import { DECAY_START_TIME } from 'config-schema' import { Decay } from '../api/1_0/model/Decay' -// TODO: externalize all those definitions and functions into an external decay library +Decimal.set({ + precision: 25, + rounding: Decimal.ROUND_HALF_UP, +}) +// TODO: externalize all those definitions and functions into an external decay library function decayFormula(value: Decimal, seconds: number): Decimal { - // TODO why do we need to convert this here to a stting to work properly? + // TODO why do we need to convert this here to a string to work properly? return value.mul( new Decimal('0.99999997803504048973201202316767079413460520837376').pow(seconds).toString(), ) @@ -17,7 +21,7 @@ function calculateDecay( amount: Decimal, from: Date, to: Date, - startBlock: Date = CONFIG.DECAY_START_TIME, + startBlock: Date = DECAY_START_TIME, ): Decay { const fromMs = from.getTime() const toMs = to.getTime() diff --git a/federation/src/graphql/util/index.ts b/federation/src/graphql/util/index.ts new file mode 100644 index 000000000..d7f06f692 --- /dev/null +++ b/federation/src/graphql/util/index.ts @@ -0,0 +1,3 @@ +import { LOG4JS_GRAPHQL_CATEGORY_NAME } from '@/graphql' + +export const LOG4JS_GRAPHQL_UTIL_CATEGORY_NAME = `${LOG4JS_GRAPHQL_CATEGORY_NAME}.util` diff --git a/federation/src/index.ts b/federation/src/index.ts index 5c3cc6f89..4492f24fb 100644 --- a/federation/src/index.ts +++ b/federation/src/index.ts @@ -1,21 +1,29 @@ +import 'source-map-support/register' import { createServer } from './server/createServer' +import { defaultCategory, initLogger } from 'config-schema' +import { getLogger } from 'log4js' // config import { CONFIG } from './config' +import { LOG4JS_BASE_CATEGORY_NAME } from './config/const' async function main() { - // biome-ignore lint/suspicious/noConsole: no logger needed fot startup infos - console.log(`FEDERATION_PORT=${CONFIG.FEDERATION_PORT}`) - // biome-ignore lint/suspicious/noConsole: no logger needed fot startup infos - console.log(`FEDERATION_API=${CONFIG.FEDERATION_API}`) - const { app } = await createServer() + // init logger + const log4jsConfigFileName = CONFIG.LOG4JS_CONFIG_PLACEHOLDER.replace('%v', CONFIG.FEDERATION_API) + initLogger( + [defaultCategory('federation', CONFIG.LOG_LEVEL), defaultCategory('apollo', CONFIG.LOG_LEVEL)], + `${CONFIG.LOG_FILES_BASE_PATH}_${CONFIG.FEDERATION_API}`, + log4jsConfigFileName, + ) + + // init server + const { app } = await createServer(getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.apollo`)) app.listen(CONFIG.FEDERATION_PORT, () => { - // biome-ignore lint/suspicious/noConsole: no logger needed fot startup infos - console.log(`Server is running at http://localhost:${CONFIG.FEDERATION_PORT}`) + const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}`) + logger.info(`Server is running at http://localhost:${CONFIG.FEDERATION_PORT}`) if (CONFIG.GRAPHIQL) { - // biome-ignore lint/suspicious/noConsole: no logger needed fot startup infos - console.log( + logger.info( `GraphIQL available at ${CONFIG.FEDERATION_COMMUNITY_URL}/api/${CONFIG.FEDERATION_API}`, ) } diff --git a/federation/src/server/LogError.ts b/federation/src/server/LogError.ts index fbd6e6d18..0a91b17c6 100644 --- a/federation/src/server/LogError.ts +++ b/federation/src/server/LogError.ts @@ -1,8 +1,23 @@ -import { federationLogger as logger } from './logger' +import { LOG4JS_BASE_CATEGORY_NAME } from '@/config/const' +import { getLogger } from 'log4js' +/** + * A custom Error that logs itself immediately upon instantiation. + * + * TODO: Anti-pattern warning: + * Logging inside the constructor introduces side effects during object creation, + * which breaks separation of concerns and can lead to duplicate or unwanted logs. + * It is generally better to log errors where they are caught, not where they are thrown. + * + * @class LogError + * @extends {Error} + * @param {string} msg - The error message. + * @param {...any} details - Additional details passed to the logger. + */ export class LogError extends Error { constructor(msg: string, ...details: any[]) { super(msg) + const logger = getLogger(`${LOG4JS_BASE_CATEGORY_NAME}.logError`) logger.error(msg, ...details) } } diff --git a/federation/src/server/createServer.ts b/federation/src/server/createServer.ts index e737d2a61..015fbba48 100644 --- a/federation/src/server/createServer.ts +++ b/federation/src/server/createServer.ts @@ -1,9 +1,7 @@ import 'reflect-metadata' import { ApolloServer } from 'apollo-server-express' -import express, { Express, RequestHandler } from 'express' - -import { checkDBVersionUntil } from '@/typeorm/DBVersion' +import express, { Express } from 'express' // server import cors from './cors' @@ -13,15 +11,11 @@ import { plugins } from './plugins' // graphql import { schema } from '@/graphql/schema' -// webhooks -// import { elopageWebhook } from '@/webhook/elopage' -import { Connection } from 'typeorm' - -import { CONFIG } from '@/config' +import { AppDatabase } from 'database' import { slowDown } from 'express-slow-down' import helmet from 'helmet' import { Logger } from 'log4js' -import { apolloLogger } from './logger' +import { DataSource } from 'typeorm' // i18n // import { i18n } from './localization' @@ -29,21 +23,16 @@ import { apolloLogger } from './logger' // TODO implement // import queryComplexity, { simpleEstimator, fieldConfigEstimator } from "graphql-query-complexity"; -type ServerDef = { apollo: ApolloServer; app: Express; con: Connection } +type ServerDef = { apollo: ApolloServer; app: Express; con: DataSource } export const createServer = async ( // context: any = serverContext, - logger: Logger = apolloLogger, + apolloLogger: Logger, // localization: i18n.I18n = i18n, ): Promise => { - logger.addContext('user', 'unknown') - logger.debug('createServer...') - // open mysql connection - const con = await checkDBVersionUntil( - CONFIG.DB_CONNECT_RETRY_COUNT, - CONFIG.DB_CONNECT_RETRY_DELAY_MS, - ) + const db = AppDatabase.getInstance() + await db.init() // Express Server const app = express() @@ -82,9 +71,6 @@ export const createServer = async ( // i18n // app.use(localization.init) - // Elopage Webhook - // app.post('/hook/elopage/' + CONFIG.WEBHOOK_ELOPAGE_SECRET, elopageWebhook) - // Apollo Server const apollo = new ApolloServer({ schema: await schema(), @@ -92,10 +78,8 @@ export const createServer = async ( // introspection: CONFIG.GRAPHIQL, // context, plugins, - logger, + logger: apolloLogger, }) apollo.applyMiddleware({ app, path: '/' }) - logger.debug('createServer...successful') - - return { apollo, app, con } + return { apollo, app, con: db.getDataSource() } } diff --git a/federation/src/server/logger.ts b/federation/src/server/logger.ts deleted file mode 100644 index 505c7e4d8..000000000 --- a/federation/src/server/logger.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { CONFIG } from '@/config' -import log4js from 'log4js' - -import { readFileSync } from 'fs' - -const options = JSON.parse(readFileSync(CONFIG.LOG4JS_CONFIG, 'utf-8')) - -options.categories.backend.level = CONFIG.LOG_LEVEL -options.categories.apollo.level = CONFIG.LOG_LEVEL -let filename: string = options.appenders.federation.filename -options.appenders.federation.filename = filename - .replace('%v', CONFIG.FEDERATION_API) - .replace('%p', CONFIG.FEDERATION_PORT.toString()) -filename = options.appenders.access.filename -options.appenders.access.filename = filename.replace('%p', CONFIG.FEDERATION_PORT.toString()) -filename = options.appenders.apollo.filename -options.appenders.apollo.filename = filename.replace('%p', CONFIG.FEDERATION_PORT.toString()) -filename = options.appenders.backend.filename -options.appenders.backend.filename = filename.replace('%p', CONFIG.FEDERATION_PORT.toString()) -filename = options.appenders.errorFile.filename -options.appenders.errorFile.filename = filename.replace('%p', CONFIG.FEDERATION_PORT.toString()) - -log4js.configure(options) - -const apolloLogger = log4js.getLogger('apollo') -// const backendLogger = log4js.getLogger('backend') -const federationLogger = log4js.getLogger('federation') - -// backendLogger.addContext('user', 'unknown') - -export { apolloLogger, federationLogger } diff --git a/federation/src/typeorm/DBVersion.ts b/federation/src/typeorm/DBVersion.ts deleted file mode 100644 index 712c4ee4f..000000000 --- a/federation/src/typeorm/DBVersion.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { CONFIG } from '@/config' -import { federationLogger as logger } from '@/server/logger' -import { Migration } from 'database' -import { Connection as DbConnection } from 'typeorm' -import { connection as connectionFunc } from './connection' - -async function checkDBVersionUntil(maxRetries: number, delayMs: number): Promise { - for (let attempt = 1; attempt <= maxRetries; attempt++) { - try { - const connection = await connectionFunc() - if (connection?.isInitialized) { - const dbVersion = await checkDBVersion(CONFIG.DB_VERSION) - if (dbVersion) { - logger.info('Database connection and version check succeeded.') - return connection - } - } - } catch (err) { - logger.warn(`Attempt ${attempt}: Waiting for DB...`, err) - } - await new Promise((resolve) => setTimeout(resolve, delayMs)) - } - - logger.fatal( - `Fatal: Could not connect to database or version check failed after ${maxRetries} attempts.`, - ) - throw new Error('Fatal: Database not ready.') -} - -const getDBVersion = async (): Promise => { - try { - const [dbVersion] = await Migration.find({ order: { version: 'DESC' }, take: 1 }) - return dbVersion ? dbVersion.fileName : null - } catch (error) { - logger.error(error) - return null - } -} - -const checkDBVersion = async (DB_VERSION: string): Promise => { - const dbVersion = await getDBVersion() - if (!dbVersion || dbVersion.indexOf(DB_VERSION) === -1) { - logger.error( - `Wrong database version detected - the backend requires '${DB_VERSION}' but found '${ - dbVersion || 'None' - }`, - ) - return false - } - return true -} - -export { checkDBVersion, getDBVersion, checkDBVersionUntil } diff --git a/federation/src/typeorm/connection.ts b/federation/src/typeorm/connection.ts deleted file mode 100644 index 827e8d430..000000000 --- a/federation/src/typeorm/connection.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { CONFIG } from '@/config' -// TODO This is super weird - since the entities are defined in another project they have their own globals. -// We cannot use our connection here, but must use the external typeorm installation -import { entities } from 'database' -import { Connection, FileLogger, createConnection } from 'typeorm' - -const connection = async (): Promise => { - try { - return createConnection({ - name: 'default', - type: 'mysql', - host: CONFIG.DB_HOST, - port: CONFIG.DB_PORT, - username: CONFIG.DB_USER, - password: CONFIG.DB_PASSWORD, - database: CONFIG.DB_DATABASE, - entities, - synchronize: false, - logging: true, - logger: new FileLogger('all', { - // workaround to let previous path working, because with esbuild the script root path has changed - logPath: '../' + CONFIG.TYPEORM_LOGGING_RELATIVE_PATH, - }), - extra: { - charset: 'utf8mb4_unicode_ci', - }, - }) - } catch (error) { - // biome-ignore lint/suspicious/noConsole: no logger present - console.log(error) - return null - } -} - -export { connection } diff --git a/federation/test/helpers.ts b/federation/test/helpers.ts index 9dad4f49c..c8523fe7e 100644 --- a/federation/test/helpers.ts +++ b/federation/test/helpers.ts @@ -1,16 +1,9 @@ - - - - - - - import { entities } from 'database' import { createTestClient } from 'apollo-server-testing' import { createServer } from '@/server/createServer' -import { logger } from './testSetup' +import { getLogger } from 'config-schema/test/testSetup' export const headerPushMock = jest.fn((t) => { context.token = t.value @@ -32,7 +25,7 @@ export const cleanDB = async () => { } } -export const testEnvironment = async (testLogger = logger /*, testI18n = i18n */) => { +export const testEnvironment = async (testLogger = getLogger('apollo') /*, testI18n = i18n */) => { const server = await createServer(/* context, */ testLogger /* , testI18n */) const con = server.con const testClient = createTestClient(server.apollo) diff --git a/federation/test/testSetup.ts b/federation/test/testSetup.ts deleted file mode 100644 index 85008799f..000000000 --- a/federation/test/testSetup.ts +++ /dev/null @@ -1,43 +0,0 @@ -// import { CONFIG } from '@/config' -// import { i18n } from '@/server/localization' -import { federationLogger as logger } from '@/server/logger' - -// CONFIG.EMAIL = true -// CONFIG.EMAIL_TEST_MODUS = false - -jest.setTimeout(1000000) - -jest.mock('@/server/logger', () => { - const originalModule = jest.requireActual('@/server/logger') - return { - __esModule: true, - ...originalModule, - backendLogger: { - addContext: jest.fn(), - trace: jest.fn(), - debug: jest.fn(), - warn: jest.fn(), - info: jest.fn(), - error: jest.fn(), - fatal: jest.fn(), - }, - } -}) - -/* -jest.mock('@/server/localization', () => { - const originalModule = jest.requireActual('@/server/localization') - return { - __esModule: true, - ...originalModule, - i18n: { - init: jest.fn(), - // configure: jest.fn(), - // __: jest.fn(), - // setLocale: jest.fn(), - }, - } -}) -*/ - -export { logger } diff --git a/frontend/package.json b/frontend/package.json index 258e00b33..7e122a237 100755 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "frontend", - "version": "2.5.2", + "version": "2.6.0", "private": true, "scripts": { "dev": "concurrently \"yarn watch-scss\" \"vite\"", diff --git a/frontend/src/components/Inputs/InputPassword.spec.js b/frontend/src/components/Inputs/InputPassword.spec.js index 9d6f5481d..94e50e026 100644 --- a/frontend/src/components/Inputs/InputPassword.spec.js +++ b/frontend/src/components/Inputs/InputPassword.spec.js @@ -80,7 +80,7 @@ describe('InputPassword', () => { }) it('has the placeholder "input-field-placeholder"', () => { - expect(wrapper.find('input').attributes('placeholder')).toEqual('form.password') + expect(wrapper.find('input').attributes('placeholder')).toEqual('input-field-placeholder') }) it('has the value ""', () => { @@ -88,7 +88,7 @@ describe('InputPassword', () => { }) it('has the label "input-field-label"', () => { - expect(wrapper.find('label').text()).toEqual('form.password') + expect(wrapper.find('label').text()).toEqual('input-field-label') }) it('has the label for "input-field-name-input-field"', () => { diff --git a/frontend/src/components/Inputs/InputPassword.vue b/frontend/src/components/Inputs/InputPassword.vue index b496d09a3..b7619045a 100644 --- a/frontend/src/components/Inputs/InputPassword.vue +++ b/frontend/src/components/Inputs/InputPassword.vue @@ -62,6 +62,14 @@ const props = defineProps({ type: Boolean, default: false, }, + label: { + type: String, + default: null, + }, + placeholder: { + type: String, + default: null, + }, }) const name = toRef(props, 'name') @@ -73,8 +81,8 @@ const { value, errorMessage, meta, errors, validate } = useField(name, props.rul const { t } = useI18n() const defaultTranslations = computed(() => ({ - label: t('form.password'), - placeholder: t('form.password'), + label: props.label || t('form.password'), + placeholder: props.placeholder || t('form.password'), })) const showPassword = ref(false) diff --git a/frontend/src/components/LinkInformations/RedeemInformation.vue b/frontend/src/components/LinkInformations/RedeemInformation.vue index c89c7c3e9..c1fc21683 100644 --- a/frontend/src/components/LinkInformations/RedeemInformation.vue +++ b/frontend/src/components/LinkInformations/RedeemInformation.vue @@ -10,7 +10,7 @@ {{ '"' + linkData.senderCommunity.name + '.' + linkData.senderUser.firstName + '"' }} {{ $t('transaction-link.send_you') }} {{ $filters.GDD(linkData.amount) }} -

+

{{ '"' + linkData.senderUser.firstName + '"' }} {{ $t('transaction-link.send_you') }} {{ $filters.GDD(linkData.amount) }}

diff --git a/frontend/src/layouts/templates/ContributionsTemplate.spec.js b/frontend/src/layouts/templates/ContributionsTemplate.spec.js index 75448c0ae..30552edfc 100644 --- a/frontend/src/layouts/templates/ContributionsTemplate.spec.js +++ b/frontend/src/layouts/templates/ContributionsTemplate.spec.js @@ -40,9 +40,9 @@ describe('ContributionsTemplate', () => { expect(wrapper.findComponent({ name: 'ContributionInfo' }).exists()).toBe(true) }) - describe('mounted with parameter contributions', () => { + describe('mounted with parameter own-contributions', () => { beforeEach(() => { - wrapper = createWrapper('contributions') + wrapper = createWrapper('own-contributions') }) it('has a header related to "my contributions"', () => { @@ -73,9 +73,9 @@ describe('ContributionsTemplate', () => { }) }) - describe('mounted with parameter community', () => { + describe('mounted with parameter all-contributions', () => { beforeEach(() => { - wrapper = createWrapper('community') + wrapper = createWrapper('all-contributions') }) it('has a header related to "the community"', () => { diff --git a/frontend/src/layouts/templates/ContributionsTemplate.vue b/frontend/src/layouts/templates/ContributionsTemplate.vue index 85213c972..0e1692e69 100644 --- a/frontend/src/layouts/templates/ContributionsTemplate.vue +++ b/frontend/src/layouts/templates/ContributionsTemplate.vue @@ -12,7 +12,7 @@
-