Merge branch 'master' into refactor_swap_eslint_biome

This commit is contained in:
einhornimmond 2025-04-30 10:11:39 +02:00
commit abeec96a6e
40 changed files with 862 additions and 4426 deletions

View File

@ -43,18 +43,20 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Backend | docker-compose mariadb
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Backend | install and build
run: cd database && yarn && yarn build && cd ../config && yarn && cd ../backend && yarn && yarn build
- name: Sleep for 30 seconds
run: sleep 30s
shell: bash
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Backend | docker-compose database
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps database
- name: Backend | prepare database
run: cd database && yarn up:backend_test
- name: Backend | Unit tests
run: cd database && yarn && yarn build && cd ../config && yarn install && cd ../backend && yarn && yarn test
- name: Backend | Unit tests
run: cd backend && yarn test
lint:
if: needs.files-changed.outputs.backend == 'true'
@ -66,7 +68,7 @@ jobs:
uses: actions/checkout@v3
- name: Backend | Lint
run: cd database && yarn && cd ../config && yarn install && cd ../backend && yarn && yarn run lint
run: cd database && yarn && cd ../backend && yarn && yarn run lint
locales:
if: needs.files-changed.outputs.backend == 'true'

View File

@ -42,7 +42,7 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Database | docker-compose
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach mariadb
- name: Database | up

View File

@ -31,15 +31,7 @@ jobs:
uses: actions/checkout@v3
- name: Build 'test' image
run: |
docker build --target test -t "gradido/dht-node:test" -f dht-node/Dockerfile .
docker save "gradido/dht-node:test" > /tmp/dht-node.tar
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: docker-dht-node-test
path: /tmp/dht-node.tar
run: docker build --target test -t "gradido/dht-node:test" -f dht-node/Dockerfile .
lint:
name: Lint - DHT Node
@ -50,8 +42,8 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Lint
run: cd database && yarn && cd ../config && yarn install && cd ../dht-node && yarn && yarn run lint
- name: DHT-Node | Lint
run: cd database && yarn && cd ../dht-node && yarn && yarn run lint
unit_test:
name: Unit Tests - DHT Node
@ -62,30 +54,18 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Download Docker Image
uses: actions/download-artifact@v4
with:
name: docker-dht-node-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/dht-node.tar
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Sleep for 30 seconds
run: sleep 30s
shell: bash
- name: DHT-Node | install and build
run: cd database && yarn && yarn build && cd ../config && yarn && cd ../dht-node && yarn && yarn build
- name: docker-compose database
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps database
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Sleep for 30 seconds
run: sleep 30s
shell: bash
- name: DHT-Node | prepare database
run: cd database && yarn up:dht_test
- name: DHT-Node | Unit tests
run: cd dht-node && yarn test
- name: Unit tests
run: cd database && yarn && yarn build && cd ../config && yarn install && cd ../dht-node && yarn && yarn test
#- name: Unit tests
# run: docker run --env NODE_ENV=test --env DB_HOST=mariadb --network gradido_internal-net --rm gradido/dht-node:test yarn run test

View File

@ -13,25 +13,12 @@ jobs:
- name: Boot up test system | docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach mariadb
- name: Sleep for 10 seconds
run: sleep 10s
- name: Boot up test system | seed backend
- name: Prepare test system
run: |
sudo chown runner:docker -R *
cd database
yarn && yarn dev_reset
cd ../config
yarn install
cd ../backend
yarn && yarn seed
- name: Boot up test system | docker-compose backend, frontend, admin, nginx, mailserver
run: |
cd backend
cp .env.test_e2e .env
cd ..
docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps backend frontend admin nginx mailserver
cd database && yarn && yarn build
cd ../config && yarn
cd ../backend && yarn
- name: End-to-end tests | prepare
run: |
@ -41,6 +28,21 @@ jobs:
cd e2e-tests/
yarn
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Boot up test system | seed backend
run: |
cd database && yarn dev_reset
cd ../backend && yarn seed
- name: Boot up test system | docker-compose backend, frontend, admin, nginx, mailserver
run: |
cd backend
cp .env.test_e2e .env
cd ..
docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps backend frontend admin nginx mailserver
- name: End-to-end tests | run tests
id: e2e-tests
run: |

View File

@ -72,20 +72,15 @@ jobs:
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Sleep for 30 seconds
run: sleep 30s
shell: bash
- name: docker-compose database
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps database
- name: Federation | install and build
run: cd database && yarn && yarn build && cd ../config && yarn && cd ../federation && yarn && yarn build
- name: Sleep for 30 seconds
run: sleep 30s
shell: bash
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
#- name: Unit tests
# run: cd database && yarn && yarn build && cd ../dht-node && yarn && yarn test
- name: Unit tests
run: |
docker run --env NODE_ENV=test --env DB_HOST=mariadb --network gradido_internal-net --rm gradido/federation:test yarn run test
- name: Federation | prepare database
run: cd database && yarn up:federation_test
- name: Federation | Unit tests
run: docker run --env NODE_ENV=test --env DB_HOST=mariadb --network gradido_internal-net --rm gradido/federation:test yarn run test

View File

@ -14,7 +14,7 @@
"dev": "cross-env TZ=UTC nodemon -w src --ext ts,pug,json,css --exec ts-node -r tsconfig-paths/register src/index.ts",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write",
"test": "cross-env TZ=UTC NODE_ENV=development jest --runInBand --forceExit --detectOpenHandles",
"test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_backend jest --runInBand --forceExit --detectOpenHandles",
"seed": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/seeds/index.ts",
"klicktipp": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/util/executeKlicktipp.ts",
"gmsusers": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/apis/gms/ExportUsers.ts",

View File

@ -26,7 +26,7 @@ function getUsersPage(page: number, limit: number): Promise<[User[], number]> {
/**
* @param client
* @returns user map indices with email
* @returns user map indices with username
*/
async function loadUsersFromHumHub(client: HumHubClient): Promise<Map<string, GetUser>> {
const start = new Date().getTime()
@ -42,7 +42,7 @@ async function loadUsersFromHumHub(client: HumHubClient): Promise<Map<string, Ge
for (const user of usersPage.results) {
// deleted users have empty emails
if (user.account.email) {
humhubUsers.set(user.account.email.trim(), user)
humhubUsers.set(user.account.username, user)
} else {
skippedUsersCount++
}
@ -52,6 +52,7 @@ async function loadUsersFromHumHub(client: HumHubClient): Promise<Map<string, Ge
`load users from humhub: ${humhubUsers.size}/${usersPage.total}, skipped: ${skippedUsersCount}\r`,
)
} while (usersPage && usersPage.results.length === HUMHUB_BULK_SIZE)
process.stdout.write('\n')
const elapsed = new Date().getTime() - start
logger.info('load users from humhub', {
@ -87,25 +88,31 @@ async function main() {
const humhubUsers = await loadUsersFromHumHub(humHubClient)
let dbUserCount = 0
const executedHumhubActionsCount = [0, 0, 0, 0]
const executedHumhubActionsCount = [0, 0, 0, 0, 0]
do {
const [users, totalUsers] = await getUsersPage(page, USER_BULK_SIZE)
dbUserCount += users.length
userCount = users.length
page++
const promises: Promise<ExecutedHumhubAction>[] = []
for (const user of users) {
promises.push(syncUser(user, humhubUsers))
try {
const [users, totalUsers] = await getUsersPage(page, USER_BULK_SIZE)
dbUserCount += users.length
userCount = users.length
page++
const promises: Promise<ExecutedHumhubAction>[] = []
for (const user of users) {
promises.push(syncUser(user, humhubUsers))
}
const executedActions = await Promise.all(promises)
for (const executedAction of executedActions) {
executedHumhubActionsCount[executedAction as number]++
}
// using process.stdout.write here so that carriage-return is working analog to c
// printf("\rchecked user: %d/%d", dbUserCount, totalUsers);
process.stdout.write(`checked user: ${dbUserCount}/${totalUsers}\r`)
} catch (e) {
process.stdout.write('\n')
throw e
}
const executedActions = await Promise.all(promises)
for (const executedAction of executedActions) {
executedHumhubActionsCount[executedAction as number]++
}
// using process.stdout.write here so that carriage-return is working analog to c
// printf("\rchecked user: %d/%d", dbUserCount, totalUsers);
process.stdout.write(`checked user: ${dbUserCount}/${totalUsers}\r`)
} while (userCount === USER_BULK_SIZE)
process.stdout.write('\n')
await con.destroy()
const elapsed = new Date().getTime() - start
@ -116,6 +123,7 @@ async function main() {
updatedCount: executedHumhubActionsCount[ExecutedHumhubAction.UPDATE],
skippedCount: executedHumhubActionsCount[ExecutedHumhubAction.SKIP],
deletedCount: executedHumhubActionsCount[ExecutedHumhubAction.DELETE],
validationErrorCount: executedHumhubActionsCount[ExecutedHumhubAction.VALIDATION_ERROR],
})
}

View File

@ -2,6 +2,7 @@ import { User } from '@entity/User'
import { isHumhubUserIdenticalToDbUser } from '@/apis/humhub/compareHumhubUserDbUser'
import { GetUser } from '@/apis/humhub/model/GetUser'
import { PostUser } from '@/apis/humhub/model/PostUser'
export enum ExecutedHumhubAction {
UPDATE,
@ -26,7 +27,8 @@ export async function syncUser(
user: User,
humhubUsers: Map<string, GetUser>,
): Promise<ExecutedHumhubAction> {
const humhubUser = humhubUsers.get(user.emailContact.email.trim())
const postUser = new PostUser(user)
const humhubUser = humhubUsers.get(postUser.account.username)
if (humhubUser) {
if (!user.humhubAllowed) {
return Promise.resolve(ExecutedHumhubAction.DELETE)

View File

@ -1,3 +1,5 @@
import 'reflect-metadata'
import { PublishNameType } from '@/graphql/enum/PublishNameType'
import { communityDbUser } from '@/util/communityUser'
import { isHumhubUserIdenticalToDbUser } from './compareHumhubUserDbUser'
@ -12,6 +14,7 @@ describe('isHumhubUserIdenticalToDbUser', () => {
defaultUser.alias = 'alias'
defaultUser.emailContact.email = 'email@gmail.com'
defaultUser.language = 'en'
defaultUser.gradidoID = 'gradidoID'
})
it('Should return true because humhubUser was created from entity user', () => {
@ -20,6 +23,20 @@ describe('isHumhubUserIdenticalToDbUser', () => {
expect(result).toBe(true)
})
it('Should return false, because last name differ because of publish name type', () => {
const humhubUser = new GetUser(defaultUser, 1)
defaultUser.humhubPublishName = PublishNameType.PUBLISH_NAME_FIRST
const result = isHumhubUserIdenticalToDbUser(humhubUser, defaultUser)
expect(result).toBe(false)
})
it('Should return true, even if alias is empty', () => {
defaultUser.alias = ''
const humhubUser = new GetUser(defaultUser, 1)
const result = isHumhubUserIdenticalToDbUser(humhubUser, defaultUser)
expect(result).toBe(true)
})
it('Should return false because first name differ', () => {
const humhubUser = new GetUser(defaultUser, 1)
humhubUser.profile.firstname = 'changed first name'

View File

@ -27,7 +27,8 @@ describe('syncUser function', () => {
it('When humhubUser exists and user.humhubAllowed is false, should return DELETE action', async () => {
const humhubUsers = new Map<string, GetUser>()
humhubUsers.set(defaultUser.emailContact.email, new GetUser(defaultUser, 1))
const humhubUser = new GetUser(defaultUser, 1)
humhubUsers.set(humhubUser.account.username, humhubUser)
defaultUser.humhubAllowed = false
const result = await syncUser(defaultUser, humhubUsers)
@ -38,8 +39,8 @@ describe('syncUser function', () => {
it('When humhubUser exists and user.humhubAllowed is true and there are changes in user data, should return UPDATE action', async () => {
const humhubUsers = new Map<string, GetUser>()
const humhubUser = new GetUser(defaultUser, 1)
humhubUsers.set(humhubUser.account.username, humhubUser)
humhubUser.account.username = 'test username'
humhubUsers.set(defaultUser.emailContact.email, humhubUser)
defaultUser.humhubAllowed = true
const result = await syncUser(defaultUser, humhubUsers)
@ -50,7 +51,7 @@ describe('syncUser function', () => {
it('When humhubUser exists and user.humhubAllowed is true and there are no changes in user data, should return SKIP action', async () => {
const humhubUsers = new Map<string, GetUser>()
const humhubUser = new GetUser(defaultUser, 1)
humhubUsers.set(defaultUser.emailContact.email, humhubUser)
humhubUsers.set(humhubUser.account.username, humhubUser)
defaultUser.humhubAllowed = true
const result = await syncUser(defaultUser, humhubUsers)

View File

@ -1,6 +1,7 @@
import { User } from '@entity/User'
import { LogError } from '@/server/LogError'
import { backendLogger as logger } from '@/server/logger'
import { HumHubClient } from './HumHubClient'
import { isHumhubUserIdenticalToDbUser } from './compareHumhubUserDbUser'
@ -12,7 +13,22 @@ export enum ExecutedHumhubAction {
CREATE,
SKIP,
DELETE,
VALIDATION_ERROR,
}
// todo: replace with full validation (schema)
function isValid(postUser: PostUser, userId: number): boolean {
if (postUser.profile.firstname.length > 20) {
logger.error('firstname too long for humhub, for user with id:', userId)
return false
}
if (postUser.profile.lastname.length > 20) {
logger.error('lastname too long for humhub, for user with id:', userId)
return false
}
return true
}
/**
* Trigger action according to conditions
* | User exist on humhub | export to humhub allowed | changes in user data | ACTION
@ -21,9 +37,8 @@ export enum ExecutedHumhubAction {
* | true | true | false | SKIP
* | false | false | ignored | SKIP
* | false | true | ignored | CREATE
* @param user
* @param humHubClient
* @param humhubUsers
* @param user user entity
* @param humhubUsers user map indices with username
* @returns
*/
export async function syncUser(
@ -31,7 +46,10 @@ export async function syncUser(
humhubUsers: Map<string, GetUser>,
): Promise<ExecutedHumhubAction> {
const postUser = new PostUser(user)
const humhubUser = humhubUsers.get(user.emailContact.email.trim())
if (!isValid(postUser, user.id)) {
return ExecutedHumhubAction.VALIDATION_ERROR
}
const humhubUser = humhubUsers.get(postUser.account.username)
const humHubClient = HumHubClient.getInstance()
if (!humHubClient) {
throw new LogError('Error creating humhub client')

View File

@ -34,12 +34,16 @@ export class PublishNameLogic {
* @returns user.firstName for PUBLISH_NAME_FIRST, PUBLISH_NAME_FIRST_INITIAL or PUBLISH_NAME_FULL
*/
public getFirstName(publishNameType: PublishNameType): string {
let firstName = ''
if (this.user && typeof this.user.firstName === 'string') {
firstName = this.user.firstName
}
return [
PublishNameType.PUBLISH_NAME_FIRST,
PublishNameType.PUBLISH_NAME_FIRST_INITIAL,
PublishNameType.PUBLISH_NAME_FULL,
].includes(publishNameType)
? this.user.firstName
? firstName.slice(0, 20)
: ''
}
@ -50,10 +54,14 @@ export class PublishNameLogic {
* first initial from user.lastName for PUBLISH_NAME_FIRST_INITIAL
*/
public getLastName(publishNameType: PublishNameType): string {
let lastName = ''
if (this.user && typeof this.user.lastName === 'string') {
lastName = this.user.lastName
}
return publishNameType === PublishNameType.PUBLISH_NAME_FULL
? this.user.lastName
: publishNameType === PublishNameType.PUBLISH_NAME_FIRST_INITIAL
? this.user.lastName.charAt(0)
? lastName.slice(0, 20)
: publishNameType === PublishNameType.PUBLISH_NAME_FIRST_INITIAL && lastName.length > 0
? lastName.charAt(0)
: ''
}

View File

@ -33,6 +33,7 @@ import { UpdateUserInfosArgs } from '@arg/UpdateUserInfosArgs'
import { OptInType } from '@enum/OptInType'
import { Order } from '@enum/Order'
import { PasswordEncryptionType } from '@enum/PasswordEncryptionType'
import { PublishNameType } from '@enum/PublishNameType'
import { UserContactType } from '@enum/UserContactType'
import { SearchAdminUsersResult } from '@model/AdminUser'
// import { Location } from '@model/Location'
@ -50,6 +51,7 @@ import { PostUser } from '@/apis/humhub/model/PostUser'
import { encode } from '@/auth/JWT'
import { RIGHTS } from '@/auth/RIGHTS'
import { CONFIG } from '@/config'
import { PublishNameLogic } from '@/data/PublishName.logic'
import {
sendAccountActivationEmail,
sendAccountMultiRegistrationEmail,
@ -241,12 +243,12 @@ export class UserResolver {
try {
const result = await humhubUserPromise
user.humhubAllowed = result?.result?.account.status === 1
if (user.humhubAllowed) {
if (user.humhubAllowed && result?.result?.account?.username) {
let spaceId = null
if (projectBranding) {
spaceId = projectBranding.spaceId
}
await syncHumhub(null, dbUser, spaceId)
await syncHumhub(null, dbUser, result.result.account.username, spaceId)
}
} catch (e) {
logger.error("couldn't reach out to humhub, disable for now", e)
@ -443,7 +445,11 @@ export class UserResolver {
if (projectBranding) {
spaceId = projectBranding.spaceId
}
await syncHumhub(null, dbUser, spaceId)
try {
await syncHumhub(null, dbUser, dbUser.gradidoID, spaceId)
} catch (e) {
logger.error("createUser: couldn't reach out to humhub, disable for now", e)
}
}
if (redeemCode) {
@ -654,6 +660,10 @@ export class UserResolver {
)
const user = getUser(context)
const updateUserInGMS = compareGmsRelevantUserSettings(user, updateUserInfosArgs)
const publishNameLogic = new PublishNameLogic(user)
const oldHumhubUsername = publishNameLogic.getUserIdentifier(
user.humhubPublishName as PublishNameType,
)
// try {
if (firstName) {
@ -759,7 +769,7 @@ export class UserResolver {
}
try {
if (CONFIG.HUMHUB_ACTIVE) {
await syncHumhub(updateUserInfosArgs, user)
await syncHumhub(updateUserInfosArgs, user, oldHumhubUsername)
}
} catch (e) {
logger.error('error sync user with humhub', e)
@ -835,7 +845,7 @@ export class UserResolver {
}
const humhubUserAccount = new HumhubAccount(dbUser)
const autoLoginUrlPromise = humhubClient.createAutoLoginUrl(humhubUserAccount.username, project)
const humhubUser = await syncHumhub(null, dbUser)
const humhubUser = await syncHumhub(null, dbUser, humhubUserAccount.username)
if (!humhubUser) {
throw new LogError("user don't exist (any longer) on humhub and couldn't be created")
}

View File

@ -32,7 +32,7 @@ describe('syncHumhub', () => {
})
it('Should not sync if no relevant changes', async () => {
await syncHumhub(mockUpdateUserInfosArg, new User())
await syncHumhub(mockUpdateUserInfosArg, new User(), 'username')
expect(HumHubClient.getInstance).not.toBeCalled()
// language logging from some other place
expect(logger.debug).toBeCalledTimes(5)
@ -42,7 +42,7 @@ describe('syncHumhub', () => {
it('Should retrieve user from humhub and sync if relevant changes', async () => {
mockUpdateUserInfosArg.firstName = 'New' // Relevant changes
mockUser.firstName = 'New'
await syncHumhub(mockUpdateUserInfosArg, mockUser)
await syncHumhub(mockUpdateUserInfosArg, mockUser, 'username')
expect(logger.debug).toHaveBeenCalledTimes(8) // Four language logging calls, two debug calls in function, one for not syncing
expect(logger.info).toHaveBeenLastCalledWith('finished sync user with humhub', {
localId: mockUser.id,

View File

@ -2,10 +2,9 @@ import { User } from '@entity/User'
import { HumHubClient } from '@/apis/humhub/HumHubClient'
import { GetUser } from '@/apis/humhub/model/GetUser'
import { PostUser } from '@/apis/humhub/model/PostUser'
import { ExecutedHumhubAction, syncUser } from '@/apis/humhub/syncUser'
import { PublishNameLogic } from '@/data/PublishName.logic'
import { UpdateUserInfosArgs } from '@/graphql/arg/UpdateUserInfosArgs'
import { PublishNameType } from '@/graphql/enum/PublishNameType'
import { backendLogger as logger } from '@/server/logger'
/**
@ -17,6 +16,7 @@ import { backendLogger as logger } from '@/server/logger'
export async function syncHumhub(
updateUserInfosArg: UpdateUserInfosArgs | null,
user: User,
oldHumhubUsername: string,
spaceId?: number | null,
): Promise<GetUser | null | undefined> {
// check for humhub relevant changes
@ -38,15 +38,13 @@ export async function syncHumhub(
return
}
logger.debug('retrieve user from humhub')
const userNameLogic = new PublishNameLogic(user)
const username = userNameLogic.getUserIdentifier(user.humhubPublishName as PublishNameType)
let humhubUser = await humhubClient.userByUsername(username)
let humhubUser = await humhubClient.userByUsername(oldHumhubUsername)
if (!humhubUser) {
humhubUser = await humhubClient.userByEmail(user.emailContact.email)
}
const humhubUsers = new Map<string, GetUser>()
if (humhubUser) {
humhubUsers.set(user.emailContact.email, humhubUser)
humhubUsers.set(humhubUser.account.username, humhubUser)
}
logger.debug('update user at humhub')
const result = await syncUser(user, humhubUsers)
@ -63,7 +61,8 @@ export async function syncHumhub(
logger.debug(`user added to space ${spaceId}`)
}
if (result !== ExecutedHumhubAction.SKIP) {
return await humhubClient.userByUsername(username)
const getUser = new PostUser(user)
return await humhubClient.userByUsername(getUser.account.username)
}
return humhubUser
}

View File

@ -6,6 +6,8 @@ import { backendLogger as logger } from '@/server/logger'
CONFIG.EMAIL = true
CONFIG.EMAIL_TEST_MODUS = false
CONFIG.HUMHUB_ACTIVE = false
CONFIG.GMS_ACTIVE = false
jest.setTimeout(1000000)

View File

@ -401,6 +401,131 @@
dependencies:
tslib "^2.4.0"
"@esbuild/aix-ppc64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.25.3.tgz#014180d9a149cffd95aaeead37179433f5ea5437"
integrity sha512-W8bFfPA8DowP8l//sxjJLSLkD8iEjMc7cBVyP+u4cEv9sM7mdUCkgsj+t0n/BWPFtv7WWCN5Yzj0N6FJNUUqBQ==
"@esbuild/android-arm64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.25.3.tgz#649e47e04ddb24a27dc05c395724bc5f4c55cbfe"
integrity sha512-XelR6MzjlZuBM4f5z2IQHK6LkK34Cvv6Rj2EntER3lwCBFdg6h2lKbtRjpTTsdEjD/WSe1q8UyPBXP1x3i/wYQ==
"@esbuild/android-arm@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.25.3.tgz#8a0f719c8dc28a4a6567ef7328c36ea85f568ff4"
integrity sha512-PuwVXbnP87Tcff5I9ngV0lmiSu40xw1At6i3GsU77U7cjDDB4s0X2cyFuBiDa1SBk9DnvWwnGvVaGBqoFWPb7A==
"@esbuild/android-x64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.25.3.tgz#e2ab182d1fd06da9bef0784a13c28a7602d78009"
integrity sha512-ogtTpYHT/g1GWS/zKM0cc/tIebFjm1F9Aw1boQ2Y0eUQ+J89d0jFY//s9ei9jVIlkYi8AfOjiixcLJSGNSOAdQ==
"@esbuild/darwin-arm64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.25.3.tgz#c7f3166fcece4d158a73dcfe71b2672ca0b1668b"
integrity sha512-eESK5yfPNTqpAmDfFWNsOhmIOaQA59tAcF/EfYvo5/QWQCzXn5iUSOnqt3ra3UdzBv073ykTtmeLJZGt3HhA+w==
"@esbuild/darwin-x64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.25.3.tgz#d8c5342ec1a4bf4b1915643dfe031ba4b173a87a"
integrity sha512-Kd8glo7sIZtwOLcPbW0yLpKmBNWMANZhrC1r6K++uDR2zyzb6AeOYtI6udbtabmQpFaxJ8uduXMAo1gs5ozz8A==
"@esbuild/freebsd-arm64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.3.tgz#9f7d789e2eb7747d4868817417cc968ffa84f35b"
integrity sha512-EJiyS70BYybOBpJth3M0KLOus0n+RRMKTYzhYhFeMwp7e/RaajXvP+BWlmEXNk6uk+KAu46j/kaQzr6au+JcIw==
"@esbuild/freebsd-x64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.25.3.tgz#8ad35c51d084184a8e9e76bb4356e95350a64709"
integrity sha512-Q+wSjaLpGxYf7zC0kL0nDlhsfuFkoN+EXrx2KSB33RhinWzejOd6AvgmP5JbkgXKmjhmpfgKZq24pneodYqE8Q==
"@esbuild/linux-arm64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.25.3.tgz#3af0da3d9186092a9edd4e28fa342f57d9e3cd30"
integrity sha512-xCUgnNYhRD5bb1C1nqrDV1PfkwgbswTTBRbAd8aH5PhYzikdf/ddtsYyMXFfGSsb/6t6QaPSzxtbfAZr9uox4A==
"@esbuild/linux-arm@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.25.3.tgz#e91cafa95e4474b3ae3d54da12e006b782e57225"
integrity sha512-dUOVmAUzuHy2ZOKIHIKHCm58HKzFqd+puLaS424h6I85GlSDRZIA5ycBixb3mFgM0Jdh+ZOSB6KptX30DD8YOQ==
"@esbuild/linux-ia32@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.25.3.tgz#81025732d85b68ee510161b94acdf7e3007ea177"
integrity sha512-yplPOpczHOO4jTYKmuYuANI3WhvIPSVANGcNUeMlxH4twz/TeXuzEP41tGKNGWJjuMhotpGabeFYGAOU2ummBw==
"@esbuild/linux-loong64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.25.3.tgz#3c744e4c8d5e1148cbe60a71a11b58ed8ee5deb8"
integrity sha512-P4BLP5/fjyihmXCELRGrLd793q/lBtKMQl8ARGpDxgzgIKJDRJ/u4r1A/HgpBpKpKZelGct2PGI4T+axcedf6g==
"@esbuild/linux-mips64el@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.25.3.tgz#1dfe2a5d63702db9034cc6b10b3087cc0424ec26"
integrity sha512-eRAOV2ODpu6P5divMEMa26RRqb2yUoYsuQQOuFUexUoQndm4MdpXXDBbUoKIc0iPa4aCO7gIhtnYomkn2x+bag==
"@esbuild/linux-ppc64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.25.3.tgz#2e85d9764c04a1ebb346dc0813ea05952c9a5c56"
integrity sha512-ZC4jV2p7VbzTlnl8nZKLcBkfzIf4Yad1SJM4ZMKYnJqZFD4rTI+pBG65u8ev4jk3/MPwY9DvGn50wi3uhdaghg==
"@esbuild/linux-riscv64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.25.3.tgz#a9ea3334556b09f85ccbfead58c803d305092415"
integrity sha512-LDDODcFzNtECTrUUbVCs6j9/bDVqy7DDRsuIXJg6so+mFksgwG7ZVnTruYi5V+z3eE5y+BJZw7VvUadkbfg7QA==
"@esbuild/linux-s390x@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.25.3.tgz#f6a7cb67969222b200974de58f105dfe8e99448d"
integrity sha512-s+w/NOY2k0yC2p9SLen+ymflgcpRkvwwa02fqmAwhBRI3SC12uiS10edHHXlVWwfAagYSY5UpmT/zISXPMW3tQ==
"@esbuild/linux-x64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.25.3.tgz#a237d3578ecdd184a3066b1f425e314ade0f8033"
integrity sha512-nQHDz4pXjSDC6UfOE1Fw9Q8d6GCAd9KdvMZpfVGWSJztYCarRgSDfOVBY5xwhQXseiyxapkiSJi/5/ja8mRFFA==
"@esbuild/netbsd-arm64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.3.tgz#4c15c68d8149614ddb6a56f9c85ae62ccca08259"
integrity sha512-1QaLtOWq0mzK6tzzp0jRN3eccmN3hezey7mhLnzC6oNlJoUJz4nym5ZD7mDnS/LZQgkrhEbEiTn515lPeLpgWA==
"@esbuild/netbsd-x64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.25.3.tgz#12f6856f8c54c2d7d0a8a64a9711c01a743878d5"
integrity sha512-i5Hm68HXHdgv8wkrt+10Bc50zM0/eonPb/a/OFVfB6Qvpiirco5gBA5bz7S2SHuU+Y4LWn/zehzNX14Sp4r27g==
"@esbuild/openbsd-arm64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.3.tgz#ca078dad4a34df192c60233b058db2ca3d94bc5c"
integrity sha512-zGAVApJEYTbOC6H/3QBr2mq3upG/LBEXr85/pTtKiv2IXcgKV0RT0QA/hSXZqSvLEpXeIxah7LczB4lkiYhTAQ==
"@esbuild/openbsd-x64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.25.3.tgz#c9178adb60e140e03a881d0791248489c79f95b2"
integrity sha512-fpqctI45NnCIDKBH5AXQBsD0NDPbEFczK98hk/aa6HJxbl+UtLkJV2+Bvy5hLSLk3LHmqt0NTkKNso1A9y1a4w==
"@esbuild/sunos-x64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.25.3.tgz#03765eb6d4214ff27e5230af779e80790d1ee09f"
integrity sha512-ROJhm7d8bk9dMCUZjkS8fgzsPAZEjtRJqCAmVgB0gMrvG7hfmPmz9k1rwO4jSiblFjYmNvbECL9uhaPzONMfgA==
"@esbuild/win32-arm64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.25.3.tgz#f1c867bd1730a9b8dfc461785ec6462e349411ea"
integrity sha512-YWcow8peiHpNBiIXHwaswPnAXLsLVygFwCB3A7Bh5jRkIBFWHGmNQ48AlX4xDvQNoMZlPYzjVOQDYEzWCqufMQ==
"@esbuild/win32-ia32@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.25.3.tgz#77491f59ef6c9ddf41df70670d5678beb3acc322"
integrity sha512-qspTZOIGoXVS4DpNqUYUs9UxVb04khS1Degaw/MnfMe7goQ3lTfQ13Vw4qY/Nj0979BGvMRpAYbs/BAxEvU8ew==
"@esbuild/win32-x64@0.25.3":
version "0.25.3"
resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.25.3.tgz#b17a2171f9074df9e91bfb07ef99a892ac06412a"
integrity sha512-ICgUR+kPimx0vvRzf+N/7L7tVSQeE3BYY+NhHRHXS1kBuPO7z2+7ea2HbhDyZdTephgvNvKrlDDKUexuCVBVvg==
"@eslint-community/eslint-plugin-eslint-comments@^3.2.1":
version "3.2.1"
resolved "https://registry.yarnpkg.com/@eslint-community/eslint-plugin-eslint-comments/-/eslint-plugin-eslint-comments-3.2.1.tgz#3c65061e27f155eae3744c3b30c5a8253a959040"
@ -3040,6 +3165,37 @@ es-to-primitive@^1.3.0:
is-date-object "^1.0.5"
is-symbol "^1.0.4"
esbuild@~0.25.0:
version "0.25.3"
resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.25.3.tgz#371f7cb41283e5b2191a96047a7a89562965a285"
integrity sha512-qKA6Pvai73+M2FtftpNKRxJ78GIjmFXFxd/1DVBqGo/qNhLSfv+G12n9pNoWdytJC8U00TrViOwpjT0zgqQS8Q==
optionalDependencies:
"@esbuild/aix-ppc64" "0.25.3"
"@esbuild/android-arm" "0.25.3"
"@esbuild/android-arm64" "0.25.3"
"@esbuild/android-x64" "0.25.3"
"@esbuild/darwin-arm64" "0.25.3"
"@esbuild/darwin-x64" "0.25.3"
"@esbuild/freebsd-arm64" "0.25.3"
"@esbuild/freebsd-x64" "0.25.3"
"@esbuild/linux-arm" "0.25.3"
"@esbuild/linux-arm64" "0.25.3"
"@esbuild/linux-ia32" "0.25.3"
"@esbuild/linux-loong64" "0.25.3"
"@esbuild/linux-mips64el" "0.25.3"
"@esbuild/linux-ppc64" "0.25.3"
"@esbuild/linux-riscv64" "0.25.3"
"@esbuild/linux-s390x" "0.25.3"
"@esbuild/linux-x64" "0.25.3"
"@esbuild/netbsd-arm64" "0.25.3"
"@esbuild/netbsd-x64" "0.25.3"
"@esbuild/openbsd-arm64" "0.25.3"
"@esbuild/openbsd-x64" "0.25.3"
"@esbuild/sunos-x64" "0.25.3"
"@esbuild/win32-arm64" "0.25.3"
"@esbuild/win32-ia32" "0.25.3"
"@esbuild/win32-x64" "0.25.3"
escalade@^3.1.1, escalade@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5"
@ -3675,7 +3831,7 @@ fs.realpath@^1.0.0:
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==
fsevents@^2.3.2, fsevents@~2.3.2:
fsevents@^2.3.2, fsevents@~2.3.2, fsevents@~2.3.3:
version "2.3.3"
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6"
integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==
@ -3784,7 +3940,7 @@ get-symbol-description@^1.1.0:
es-errors "^1.3.0"
get-intrinsic "^1.2.6"
get-tsconfig@^4.10.0:
get-tsconfig@^4.10.0, get-tsconfig@^4.7.5:
version "4.10.0"
resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.10.0.tgz#403a682b373a823612475a4c2928c7326fc0f6bb"
integrity sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A==
@ -7091,6 +7247,16 @@ tsutils@^3.21.0:
dependencies:
tslib "^1.8.1"
tsx@^4.19.3:
version "4.19.3"
resolved "https://registry.yarnpkg.com/tsx/-/tsx-4.19.3.tgz#2bdbcb87089374d933596f8645615142ed727666"
integrity sha512-4H8vUNGNjQ4V2EOoGw005+c+dGuPSnhpPBPHBtsZdGZBk/iJb4kguGlPWaZTZ3q5nMtFOEsY0nRDlh9PJyd6SQ==
dependencies:
esbuild "~0.25.0"
get-tsconfig "^4.7.5"
optionalDependencies:
fsevents "~2.3.3"
tunnel@0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c"

View File

@ -2,7 +2,7 @@
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
await queryFn(
`INSERT INTO gradido_community.contributions
`INSERT INTO contributions
(user_id, created_at, contribution_date, memo, amount, moderator_id, confirmed_by, confirmed_at, transaction_id)
SELECT
user_id,
@ -15,12 +15,12 @@ SELECT
balance_date AS confirmed_at,
id
FROM
gradido_community.transactions
transactions
WHERE
type_id = 1
AND NOT EXISTS(
SELECT * FROM gradido_community.contributions
WHERE gradido_community.contributions.transaction_id = gradido_community.transactions.id);`,
SELECT * FROM contributions
WHERE contributions.transaction_id = transactions.id);`,
)
}

View File

@ -13,11 +13,15 @@
"up": "cross-env TZ=UTC node build/src/index.js up",
"down": "cross-env TZ=UTC node build/src/index.js down",
"reset": "cross-env TZ=UTC node build/src/index.js reset",
"dev_up": "cross-env TZ=UTC ts-node src/index.ts up",
"dev_down": "cross-env TZ=UTC ts-node src/index.ts down",
"dev_reset": "cross-env TZ=UTC ts-node src/index.ts reset",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write"
"lint:fix": "biome check --error-on-warnings . --write",
"clear": "cross-env TZ=UTC tsx src/index.ts clear",
"dev_up": "cross-env TZ=UTC tsx src/index.ts up",
"dev_down": "cross-env TZ=UTC tsx src/index.ts down",
"dev_reset": "cross-env TZ=UTC tsx src/index.ts reset",
"up:backend_test": "cross-env TZ=UTC DB_DATABASE=gradido_test_backend tsx src/index.ts up",
"up:federation_test": "cross-env TZ=UTC DB_DATABASE=gradido_test_federation tsx src/index.ts up",
"up:dht_test": "cross-env TZ=UTC DB_DATABASE=gradido_test_dht tsx src/index.ts up"
},
"devDependencies": {
"@biomejs/biome": "1.9.4",
@ -28,6 +32,7 @@
"ncp": "^2.0.0",
"prettier": "^2.8.7",
"ts-node": "^10.9.2",
"tsx": "^4.19.3",
"typescript": "^4.9.5"
},
"dependencies": {

55
database/src/clear.ts Normal file
View File

@ -0,0 +1,55 @@
import { Connection, createConnection } from 'mysql2/promise'
import { CONFIG } from './config'
export async function truncateTables(connection: Connection) {
const [tables] = await connection.query('SHOW TABLES')
const tableNames = (tables as any[]).map((table) => Object.values(table)[0])
if (tableNames.length === 0) {
// No tables found in database.
return
}
// Disabling foreign key checks...
await connection.query('SET FOREIGN_KEY_CHECKS = 0')
// Truncating all tables...
for (const tableName of tableNames) {
if (tableName === CONFIG.MIGRATIONS_TABLE) {
continue
}
await connection.query(`TRUNCATE TABLE \`${tableName}\``)
}
// Re-enabling foreign key checks...
await connection.query('SET FOREIGN_KEY_CHECKS = 1')
}
export async function clearDatabase() {
const connection = await createConnection({
host: CONFIG.DB_HOST,
port: CONFIG.DB_PORT,
user: CONFIG.DB_USER,
password: CONFIG.DB_PASSWORD,
database: CONFIG.DB_DATABASE,
})
await truncateTables(connection)
// Database cleared successfully.
await connection.end()
}
// Execute if this file is run directly
if (require.main === module) {
clearDatabase()
.then(() => {
// Database clear operation completed.
process.exit(0)
})
.catch((error) => {
// biome-ignore lint/suspicious/noConsole: no logger present
console.error('Failed to clear database:', error)
process.exit(1)
})
}

View File

@ -22,6 +22,8 @@ const migrations = {
MIGRATIONS_TABLE: process.env.MIGRATIONS_TABLE || 'migrations',
}
const nodeEnv = process.env.NODE_ENV || 'development'
// Check config version
constants.CONFIG_VERSION.CURRENT = process.env.CONFIG_VERSION || constants.CONFIG_VERSION.DEFAULT
if (
@ -34,4 +36,4 @@ if (
)
}
export const CONFIG = { ...constants, ...database, ...migrations }
export const CONFIG = { ...constants, ...database, ...migrations, NODE_ENV: nodeEnv }

View File

@ -1,14 +1,43 @@
import { CONFIG } from './config'
import { createDatabase } from './prepare'
import { DatabaseState, getDatabaseState } from './prepare'
import path from 'path'
import path from 'node:path'
import { createPool } from 'mysql'
import { Migration } from 'ts-mysql-migrate'
import { clearDatabase } from './clear'
import { latestDbVersion } from './config/detectLastDBVersion'
const run = async (command: string) => {
if (command === 'clear') {
if (CONFIG.NODE_ENV === 'production') {
throw new Error('Clearing database in production is not allowed')
}
await clearDatabase()
return
}
// Database actions not supported by our migration library
await createDatabase()
// await createDatabase()
const state = await getDatabaseState()
if (state === DatabaseState.NOT_CONNECTED) {
throw new Error(
`Database not connected, is database server running?
host: ${CONFIG.DB_HOST}
port: ${CONFIG.DB_PORT}
user: ${CONFIG.DB_USER}
password: ${CONFIG.DB_PASSWORD.slice(-2)}
database: ${CONFIG.DB_DATABASE}`,
)
}
if (state === DatabaseState.HIGHER_VERSION) {
throw new Error('Database version is higher than required, please switch to the correct branch')
}
if (state === DatabaseState.SAME_VERSION) {
if (command === 'up') {
// biome-ignore lint/suspicious/noConsole: no logger present
console.log('Database is up to date')
return
}
}
// Initialize Migrations
const pool = createPool({
host: CONFIG.DB_HOST,
@ -34,12 +63,29 @@ const run = async (command: string) => {
await migration.down() // use for downgrade script
break
case 'reset':
// TODO protect from production
if (CONFIG.NODE_ENV === 'production') {
throw new Error('Resetting database in production is not allowed')
}
await migration.reset()
break
default:
throw new Error(`Unsupported command ${command}`)
}
if (command === 'reset') {
// biome-ignore lint/suspicious/noConsole: no logger present
console.log('Database was reset')
} else {
const currentDbVersion = await migration.getLastVersion()
// biome-ignore lint/suspicious/noConsole: no logger present
console.log(`Database was ${command} migrated to version: ${currentDbVersion.fileName}`)
if (latestDbVersion === currentDbVersion.fileName.split('.')[0]) {
// biome-ignore lint/suspicious/noConsole: no logger present
console.log('Database is now up to date')
} else {
// biome-ignore lint/suspicious/noConsole: no logger present
console.log('The latest database version is: ', latestDbVersion)
}
}
// Terminate connections gracefully
pool.end()

View File

@ -1,20 +1,40 @@
import { createConnection } from 'mysql2/promise'
/* eslint-disable no-unused-vars */
import { Connection, ResultSetHeader, RowDataPacket, createConnection } from 'mysql2/promise'
import { CONFIG } from './config'
import { latestDbVersion } from './config/detectLastDBVersion'
export const createDatabase = async (): Promise<void> => {
const con = await createConnection({
host: CONFIG.DB_HOST,
port: CONFIG.DB_PORT,
user: CONFIG.DB_USER,
password: CONFIG.DB_PASSWORD,
})
export enum DatabaseState {
NOT_CONNECTED = 'NOT_CONNECTED',
LOWER_VERSION = 'LOWER_VERSION',
HIGHER_VERSION = 'HIGHER_VERSION',
SAME_VERSION = 'SAME_VERSION',
}
await con.connect()
async function connectToDatabaseServer(): Promise<Connection | null> {
try {
return await createConnection({
host: CONFIG.DB_HOST,
port: CONFIG.DB_PORT,
user: CONFIG.DB_USER,
password: CONFIG.DB_PASSWORD,
})
} catch (e) {
// biome-ignore lint/suspicious/noConsole: no logger present
console.log('could not connect to database server', e)
return null
}
}
// Create Database `gradido_community`
await con.query(`
CREATE DATABASE IF NOT EXISTS ${CONFIG.DB_DATABASE}
export const getDatabaseState = async (): Promise<DatabaseState> => {
const connection = await connectToDatabaseServer()
if (!connection) {
return DatabaseState.NOT_CONNECTED
}
// make sure the database exists
const [result] = await connection.query<ResultSetHeader>(`
CREATE DATABASE IF NOT EXISTS ${CONFIG.DB_DATABASE}
DEFAULT CHARACTER SET utf8mb4
DEFAULT COLLATE utf8mb4_unicode_ci;`)
@ -31,8 +51,27 @@ export const createDatabase = async (): Promise<void> => {
console.log('Found and dropped old migrations table')
}
}
*/
if (result.affectedRows === 1) {
// biome-ignore lint/suspicious/noConsole: no logger present
console.log(`Database ${CONFIG.DB_DATABASE} created`)
return DatabaseState.LOWER_VERSION
}
await con.end()
await connection.query(`USE ${CONFIG.DB_DATABASE}`)
// check if the database is up to date
const [rows] = await connection.query<RowDataPacket[]>(
`SELECT * FROM ${CONFIG.MIGRATIONS_TABLE} ORDER BY version DESC LIMIT 1`,
)
if (rows.length === 0) {
return DatabaseState.LOWER_VERSION
}
connection.destroy()
const dbVersion = rows[0].fileName.split('.')[0]
return dbVersion === latestDbVersion
? DatabaseState.SAME_VERSION
: dbVersion < latestDbVersion
? DatabaseState.LOWER_VERSION
: DatabaseState.HIGHER_VERSION
}

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,7 @@
"dev": "cross-env TZ=UTC nodemon -w src --ext ts --exec ts-node -r dotenv/config -r tsconfig-paths/register src/index.ts",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write",
"test": "cross-env TZ=UTC NODE_ENV=development jest --runInBand --forceExit --detectOpenHandles"
"test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_dht jest --runInBand --forceExit --detectOpenHandles"
},
"dependencies": {
"@hyperswarm/dht": "^6.4.4",

File diff suppressed because it is too large Load Diff

View File

@ -11,7 +11,7 @@
"build": "tsc --build",
"clean": "tsc --build --clean",
"start": "cross-env TZ=UTC TS_NODE_BASEURL=./build node -r tsconfig-paths/register build/src/index.js",
"test": "cross-env TZ=UTC NODE_ENV=development jest --runInBand --forceExit --detectOpenHandles",
"test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_federation jest --runInBand --forceExit --detectOpenHandles",
"dev": "cross-env TZ=UTC nodemon -w src --ext ts --exec ts-node -r dotenv/config -r tsconfig-paths/register src/index.ts",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write"

File diff suppressed because it is too large Load Diff

View File

@ -67,6 +67,7 @@ const props = defineProps({
})
const emit = defineEmits([
'close-all-open-collapse',
'update-list-contributions',
'update-contribution-form',
'delete-contribution',

View File

@ -14,6 +14,7 @@ vi.mock('@vue/apollo-composable', () => ({
onResult: vi.fn(),
onError: vi.fn(),
load: vi.fn(),
refetch: vi.fn(),
})),
useMutation: vi.fn(() => ({
mutate: vi.fn(),

View File

@ -246,7 +246,7 @@ function deleteContribution(item) {
}
}
const { onResult, onError, load } = useLazyQuery(listContributionMessages, {
const { onResult, onError, load, refetch } = useLazyQuery(listContributionMessages, {
contributionId: props.contributionId,
})
@ -254,9 +254,15 @@ function getListContributionMessages(closeCollapse = true) {
if (closeCollapse) {
emit('close-all-open-collapse')
}
load(listContributionMessages, {
const variables = {
contributionId: props.contributionId,
})
}
// load works only once and return false on second call
if (!load(listContributionMessages, variables)) {
// update list data every time getListContributionMessages is called
// because it could be added new messages
refetch(variables)
}
}
onResult((resultValue) => {

View File

@ -95,6 +95,9 @@ describe('GddTransactionList', () => {
})
describe('timestamp property', () => {
beforeEach(async () => {
await wrapper.setProps({ timestamp: new Date().getTime() })
})
it('emits update-transactions when timestamp changes', async () => {
await wrapper.setProps({ timestamp: 0 })
expect(wrapper.emitted('update-transactions')).toBeTruthy()

View File

@ -101,7 +101,7 @@ export default {
this.updateTransactions()
},
timestamp: {
immediate: true,
immediate: false,
handler: 'updateTransactions',
},
},

View File

@ -256,8 +256,8 @@ export const communityStatistics = gql`
`
export const searchAdminUsers = gql`
query {
searchAdminUsers {
query ($pageSize: Int = 25, $currentPage: Int = 1, $order: Order = ASC) {
searchAdminUsers(pageSize: $pageSize, currentPage: $currentPage, order: $order) {
userCount
userList {
firstName

View File

@ -0,0 +1,56 @@
fragment balanceFields on Balance {
balance
balanceGDT
count
linkCount
}
fragment transactionFields on Transaction {
id
typeId
amount
balance
previousBalance
balanceDate
memo
linkedUser {
firstName
lastName
communityUuid
communityName
gradidoID
alias
}
decay {
decay
start
end
duration
}
linkId
}
query transactionsQuery($currentPage: Int = 1, $pageSize: Int = 25, $order: Order = DESC) {
transactionList(currentPage: $currentPage, pageSize: $pageSize, order: $order) {
balance {
...balanceFields
}
transactions {
...transactionFields
}
}
}
query transactionsUserCountQuery($currentPage: Int = 1, $pageSize: Int = 25, $order: Order = DESC) {
transactionList(currentPage: $currentPage, pageSize: $pageSize, order: $order) {
balance {
...balanceFields
}
transactions {
...transactionFields
}
}
communityStatistics {
totalUsers
}
}

View File

@ -5,6 +5,7 @@ import DashboardLayout from './DashboardLayout'
import { createStore } from 'vuex'
import { createRouter, createWebHistory } from 'vue-router'
import routes from '@/routes/routes'
import { useQuery } from '@vue/apollo-composable'
const toastErrorSpy = vi.fn()
@ -14,18 +15,35 @@ vi.mock('@/composables/useToast', () => ({
}),
}))
const mockQueryFn = vi.fn()
const mockRefetchFn = vi.fn()
const mockMutateFn = vi.fn()
let onErrorHandler
let onResultHandler
const mockQueryResult = ref(null)
const loading = ref(false)
vi.mock('@vue/apollo-composable', () => ({
useLazyQuery: vi.fn(() => ({
load: mockQueryFn,
useQuery: vi.fn(() => ({
refetch: mockRefetchFn,
result: mockQueryResult,
onResult: vi.fn(),
onError: vi.fn(),
onResult: (handler) => {
onResultHandler = handler
},
onError: (handler) => {
onErrorHandler = handler
},
loading,
})),
useLazyQuery: vi.fn(() => ({
refetch: mockRefetchFn,
result: mockQueryResult,
onResult: (handler) => {
onResultHandler = handler
},
onError: (handler) => {
onErrorHandler = handler
},
loading,
})),
useMutation: vi.fn(() => ({
mutate: mockMutateFn,
@ -103,17 +121,6 @@ describe('DashboardLayout', () => {
beforeEach(() => {
vi.useFakeTimers()
mockQueryFn.mockResolvedValue({
communityStatistics: {
totalUsers: 3113,
activeUsers: 1057,
deletedUsers: 35,
totalGradidoCreated: '4083774.05000000000000000000',
totalGradidoDecayed: '-1062639.13634129622923372197',
totalGradidoAvailable: '2513565.869444365732411569',
totalGradidoUnbookedDecayed: '-500474.6738366222166261272',
},
})
wrapper = createWrapper()
})
@ -135,31 +142,32 @@ describe('DashboardLayout', () => {
describe('after a timeout', () => {
beforeEach(async () => {
vi.advanceTimersByTime(1500)
loading.value = false
await nextTick()
})
describe('update transactions', () => {
beforeEach(async () => {
mockQueryResult.value = {
transactionList: {
balance: {
balanceGDT: '100',
count: 4,
linkCount: 8,
balance: '1450',
onResultHandler({
data: {
transactionList: {
balance: {
balanceGDT: '100',
count: 4,
linkCount: 8,
balance: '1450',
},
transactions: ['transaction1', 'transaction2', 'transaction3', 'transaction4'],
},
transactions: ['transaction1', 'transaction2', 'transaction3', 'transaction4'],
},
}
mockQueryFn.mockResolvedValue(mockQueryResult.value)
})
await wrapper.vm.updateTransactions({ currentPage: 2, pageSize: 5 })
await nextTick() // Ensure all promises are resolved
})
it('load call to the API', () => {
expect(mockQueryFn).toHaveBeenCalled()
expect(useQuery).toHaveBeenCalled()
})
it('updates balance', () => {
@ -190,7 +198,7 @@ describe('DashboardLayout', () => {
describe('update transactions returns error', () => {
beforeEach(async () => {
mockQueryFn.mockRejectedValue(new Error('Ouch!'))
wrapper.vm.skeleton = false
await wrapper
.findComponent({ ref: 'router-view' })
.vm.$emit('update-transactions', { currentPage: 2, pageSize: 5 })
@ -202,6 +210,7 @@ describe('DashboardLayout', () => {
})
it('toasts the error message', () => {
onErrorHandler({ message: 'Ouch!' })
expect(toastErrorSpy).toHaveBeenCalledWith('Ouch!')
})
})

View File

@ -187,11 +187,10 @@
</template>
<script setup>
import { ref, onMounted } from 'vue'
import { onMounted, ref } from 'vue'
import { useStore } from 'vuex'
import { useRouter } from 'vue-router'
import { useLazyQuery, useMutation } from '@vue/apollo-composable'
import { useI18n } from 'vue-i18n'
import { useQuery, useMutation } from '@vue/apollo-composable'
import ContentHeader from '@/layouts/templates/ContentHeader'
import CommunityTemplate from '@/layouts/templates/CommunityTemplate'
import Breadcrumb from '@/components/Breadcrumb/breadcrumb'
@ -207,21 +206,23 @@ import GdtAmount from '@/components/Template/ContentHeader/GdtAmount'
import CommunityMember from '@/components/Template/ContentHeader/CommunityMember'
import NavCommunity from '@/components/Template/ContentHeader/NavCommunity'
import LastTransactions from '@/components/Template/RightSide/LastTransactions'
import { transactionsQuery, communityStatistics } from '@/graphql/queries'
import { transactionsUserCountQuery } from '@/graphql/transactions.graphql'
import { logout } from '@/graphql/mutations'
import CONFIG from '@/config'
import { useAppToast } from '@/composables/useToast'
const store = useStore()
const router = useRouter()
const { load: useCommunityStatsQuery } = useLazyQuery(communityStatistics)
const {
load: useTransactionsQuery,
refetch: useRefetchTransactionsQuery,
result: transactionQueryResult,
} = useLazyQuery(transactionsQuery, {}, { fetchPolicy: 'network-only' })
onError,
onResult,
} = useQuery(
transactionsUserCountQuery,
{ currentPage: 1, pageSize: 10, order: 'DESC' },
{ fetchPolicy: 'network-only' },
)
const { mutate: useLogoutMutation } = useMutation(logout)
const { t } = useI18n()
const { toastError } = useAppToast()
const balance = ref(0)
@ -230,15 +231,11 @@ const transactions = ref([])
const transactionCount = ref(0)
const transactionLinkCount = ref(0)
const pending = ref(true)
const visible = ref(false)
const hamburger = ref(true)
const darkMode = ref(false)
const skeleton = ref(true)
const totalUsers = ref(null)
// only error correction, normally skeleton should be visible less than 1500ms
onMounted(() => {
updateTransactions({ currentPage: 1, pageSize: 10 })
getCommunityStatistics()
setTimeout(() => {
skeleton.value = false
}, 1500)
@ -255,50 +252,38 @@ const logoutUser = async () => {
}
}
const updateTransactions = async ({ currentPage, pageSize }) => {
const updateTransactions = ({ currentPage, pageSize }) => {
pending.value = true
try {
await loadOrFetchTransactionQuery({ currentPage, pageSize })
if (!transactionQueryResult) return
const { transactionList } = transactionQueryResult.value
GdtBalance.value =
transactionList.balance.balanceGDT === null ? 0 : Number(transactionList.balance.balanceGDT)
transactions.value = transactionList.transactions
balance.value = Number(transactionList.balance.balance)
transactionCount.value = transactionList.balance.count
transactionLinkCount.value = transactionList.balance.linkCount
pending.value = false
} catch (error) {
pending.value = true
transactionCount.value = -1
toastError(error.message)
}
useRefetchTransactionsQuery({ currentPage, pageSize })
}
const loadOrFetchTransactionQuery = async (queryVariables = { currentPage: 1, pageSize: 25 }) => {
return (
(await useTransactionsQuery(transactionsQuery, queryVariables)) ||
(await useRefetchTransactionsQuery(queryVariables))
)
}
const getCommunityStatistics = async () => {
try {
const result = await useCommunityStatsQuery()
totalUsers.value = result.communityStatistics.totalUsers
} catch {
toastError(t('communityStatistics has no result, use default data'))
onResult((value) => {
if (value && value.data) {
if (value.data.transactionList) {
const tr = value.data.transactionList
GdtBalance.value = tr.balance?.balanceGDT === null ? 0 : Number(tr.balance?.balanceGDT)
transactions.value = tr.transactions || []
balance.value = Number(tr.balance?.balance) || 0
transactionCount.value = tr.balance?.count || 0
transactionLinkCount.value = tr.balance?.linkCount || 0
}
if (value.data.communityStatistics) {
totalUsers.value = value.data.communityStatistics.totalUsers || 0
}
}
}
pending.value = false
skeleton.value = false
})
onError((error) => {
transactionCount.value = -1
toastError(error.message)
})
const admin = () => {
window.location.assign(CONFIG.ADMIN_AUTH_URL + store.state.token)
store.dispatch('logout') // logout without redirect
}
const setVisible = (bool) => {
visible.value = bool
}
</script>
<style>
.breadcrumb {

View File

@ -29,7 +29,7 @@
:show-pagination="true"
:page-size="pageSize"
@close-all-open-collapse="closeAllOpenCollapse"
@update-list-contributions="handleUpdateListAllContributions"
@update-list-contributions="handleUpdateListContributions"
@update-contribution-form="handleUpdateContributionForm"
@delete-contribution="handleDeleteContribution"
@update-status="updateStatus"
@ -272,10 +272,6 @@ const handleUpdateContributionForm = (item) => {
router.push({ params: { tab: 'contribute' } })
}
const updateTransactions = (pagination) => {
emit('update-transactions', pagination)
}
const updateStatus = (id) => {
const item = items.value.find((item) => item.id === id)
if (item) {

View File

@ -49,7 +49,11 @@ const moderators = computed(() => itemsAdminUser.value.filter((item) => item.rol
const { onResult: onContributionLinksResult, onError: onContributionLinksError } =
useQuery(listContributionLinks)
const { onResult: onAdminUsersResult, onError: onAdminUsersError } = useQuery(searchAdminUsers)
const { onResult: onAdminUsersResult, onError: onAdminUsersError } = useQuery(searchAdminUsers, {
pageSize: 25,
currentPage: 1,
order: 'ASC',
})
onContributionLinksResult(({ data }) => {
if (data) {
@ -72,12 +76,4 @@ onContributionLinksError(() => {
onAdminUsersError(() => {
toastError('searchAdminUsers has no result, use default data')
})
const updateTransactions = (pagination) => {
emit('update-transactions', pagination)
}
onMounted(() => {
updateTransactions(0)
})
</script>

View File

@ -172,7 +172,4 @@ function onBack() {
function updateTransactions(pagination) {
emit('update-transactions', pagination)
}
// Equivalent to created hook
updateTransactions({})
</script>