Merge branch 'master' into frontend_community_message_keep_open

This commit is contained in:
einhornimmond 2025-05-16 07:36:26 +02:00
commit 3c86bd1362
367 changed files with 21386 additions and 2159 deletions

View File

@ -1,3 +1,5 @@
**/node_modules
**/build
**/coverage
**/coverage
.git
**/.turbo

View File

@ -33,6 +33,12 @@ admin: &admin
backend: &backend
- 'backend/**/*'
config: &config
- 'config-schema/**/*'
database: &database
- 'database/**/*'
dht_node: &dht_node
- 'dht-node/**/*'

91
.github/workflows/lint.yml vendored Normal file
View File

@ -0,0 +1,91 @@
name: Linting with biomejs
on: push
jobs:
lint:
runs-on: ubuntu-latest
outputs:
config-schema: ${{ steps.config-schema.outputs.success }}
backend: ${{ steps.backend.outputs.success }}
database: ${{ steps.database.outputs.success }}
dht-node: ${{ steps.dht-node.outputs.success }}
federation: ${{ steps.federation.outputs.success }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Biome
uses: biomejs/setup-biome@v2
with:
version: latest
- name: Lint - Config-Schema
id: config-schema
run: |
cd ./config-schema
biome ci .
echo $?
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
- name: Lint - Backend
id: backend
run: |
cd ./backend
biome ci .
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
- name: Lint - Database Up
id: database
run: |
cd ./database
biome ci .
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
- name: Lint - DHT Node
id: dht-node
run: |
cd ./dht-node
biome ci .
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
- name: Lint - Federation
id: federation
run: |
cd ./federation
biome ci .
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint_config_schema:
name: Lint - Config-Schema
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.config-schema }}" != "true" ]; then exit 1; fi
lint_backend:
name: Lint - Backend
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.backend }}" != "true" ]; then exit 1; fi
lint_database:
name: Lint - Database Up
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.database }}" != "true" ]; then exit 1; fi
lint_dht_node:
name: Lint - DHT Node
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.dht-node }}" != "true" ]; then exit 1; fi
lint_federation:
name: Lint - Federation
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.federation }}" != "true" ]; then exit 1; fi

View File

@ -18,7 +18,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
@ -55,7 +55,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
@ -80,6 +80,80 @@ jobs:
name: docker-backend-production
path: /tmp/backend.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION DHT-NODE ######################################
##############################################################################
build_production_dht-node:
name: Docker Build Production - DHT-Node
runs-on: ubuntu-latest
#needs: [nothing]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
- name: ENV - VERSION
run: echo "VERSION=$(node -p -e "require('./package.json').version")" >> $GITHUB_ENV
- name: ENV - BUILD_DATE
run: echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV
- name: ENV - BUILD_VERSION
run: echo "BUILD_VERSION=${VERSION}.${GITHUB_RUN_NUMBER}" >> $GITHUB_ENV
- name: ENV - BUILD_COMMIT
run: echo "BUILD_COMMIT=${GITHUB_SHA}" >> $GITHUB_ENV
##########################################################################
# DHT-NODE ################################################################
##########################################################################
- name: DHT-Node | Build `production` image
run: |
docker build -f ./dht-node/Dockerfile --target production -t "gradido/dht-node:latest" -t "gradido/dht-node:production" -t "gradido/dht-node:${VERSION}" -t "gradido/dht-node:${BUILD_VERSION}" .
docker save "gradido/dht-node" > /tmp/dht-node.tar
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: docker-dht-node-production
path: /tmp/dht-node.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION FEDERATION ######################################
##############################################################################
build_production_federation:
name: Docker Build Production - Federation
runs-on: ubuntu-latest
#needs: [nothing]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
- name: ENV - VERSION
run: echo "VERSION=$(node -p -e "require('./package.json').version")" >> $GITHUB_ENV
- name: ENV - BUILD_DATE
run: echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV
- name: ENV - BUILD_VERSION
run: echo "BUILD_VERSION=${VERSION}.${GITHUB_RUN_NUMBER}" >> $GITHUB_ENV
- name: ENV - BUILD_COMMIT
run: echo "BUILD_COMMIT=${GITHUB_SHA}" >> $GITHUB_ENV
##########################################################################
# FEDERATION ##############################################################
##########################################################################
- name: Federation | Build `production` image
run: |
docker build -f ./federation/Dockerfile --target production -t "gradido/federation:latest" -t "gradido/federation:production" -t "gradido/federation:${VERSION}" -t "gradido/federation:${BUILD_VERSION}" .
docker save "gradido/federation" > /tmp/federation.tar
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: docker-federation-production
path: /tmp/federation.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION DATABASE UP ###################################
##############################################################################
@ -92,7 +166,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# DATABASE UP ############################################################
##########################################################################
@ -106,43 +180,6 @@ jobs:
name: docker-database-production_up
path: /tmp/database_up.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION MARIADB #######################################
##############################################################################
build_production_mariadb:
name: Docker Build Production - MariaDB
runs-on: ubuntu-latest
#needs: [nothing]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
##########################################################################
# SET ENVS ###############################################################
##########################################################################
- name: ENV - VERSION
run: echo "VERSION=$(node -p -e "require('./package.json').version")" >> $GITHUB_ENV
- name: ENV - BUILD_DATE
run: echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV
- name: ENV - BUILD_VERSION
run: echo "BUILD_VERSION=${VERSION}.${GITHUB_RUN_NUMBER}" >> $GITHUB_ENV
- name: ENV - BUILD_COMMIT
run: echo "BUILD_COMMIT=${GITHUB_SHA}" >> $GITHUB_ENV
##########################################################################
# MARIADB ################################################################
##########################################################################
- name: MariaDB | Build `production` image
run: |
docker build -t "gradido/mariadb:latest" -t "gradido/mariadb:production" -t "gradido/mariadb:${VERSION}" -t "gradido/mariadb:${BUILD_VERSION}" -f ./mariadb/Dockerfile ./
docker save "gradido/mariadb" > /tmp/mariadb.tar
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: docker-mariadb-production
path: /tmp/mariadb.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION NGINX #########################################
##############################################################################
@ -155,7 +192,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
@ -186,7 +223,7 @@ jobs:
upload_to_dockerhub:
name: Upload to Dockerhub
runs-on: ubuntu-latest
needs: [build_production_frontend, build_production_backend, build_production_database_up, build_production_mariadb, build_production_nginx]
needs: [build_production_frontend, build_production_backend, build_production_database_up, build_production_nginx]
env:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
@ -195,7 +232,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# DOWNLOAD DOCKER IMAGES #################################################
##########################################################################
@ -213,6 +250,20 @@ jobs:
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/backend.tar
- name: Download Docker Image (DHT-Node)
uses: actions/download-artifact@v4
with:
name: docker-dht-node-production
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/dht-node.tar
- name: Download Docker Image (Federation)
uses: actions/download-artifact@v4
with:
name: docker-federation-production
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/federation.tar
- name: Download Docker Image (Database)
uses: actions/download-artifact@v4
with:
@ -220,11 +271,6 @@ jobs:
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/database_up.tar
- name: Download Docker Image (MariaDB)
uses: actions/download-artifact@v4
with:
name: docker-mariadb-production
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/mariadb.tar
- name: Download Docker Image (Nginx)
@ -243,10 +289,12 @@ jobs:
run: docker push --all-tags gradido/frontend
- name: Push backend
run: docker push --all-tags gradido/backend
- name: Push dht-node
run: docker push --all-tags gradido/dht-node
- name: Push federation
run: docker push --all-tags gradido/federation
- name: Push database
run: docker push --all-tags gradido/database
- name: Push MariaDB
run: docker push --all-tags gradido/mariadb
- name: Push Nginx
run: docker push --all-tags gradido/nginx
@ -262,7 +310,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch full History for changelog
##########################################################################

View File

@ -10,6 +10,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
admin: ${{ steps.changes.outputs.admin }}
config: ${{ steps.changes.outputs.config }}
steps:
- uses: actions/checkout@v3.3.0
@ -23,7 +24,7 @@ jobs:
build_test:
if: needs.files-changed.outputs.admin == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.admin == 'true'
name: Docker Build Test - Admin Interface
needs: files-changed
runs-on: ubuntu-latest
@ -33,43 +34,56 @@ jobs:
uses: actions/checkout@v3
- name: Admin Interface | Build 'test' image
run: docker build -f ./admin/Dockerfile --target test -t "gradido/admin:test" --build-arg NODE_ENV="test" .
run: docker build -f ./admin/Dockerfile --target production -t "gradido/admin:production" --build-arg NODE_ENV="production" --build-arg BUILD_COMMIT=$(git rev-parse HEAD) --build-arg BUILD_COMMIT_SHORT=$(git rev-parse --short HEAD) .
unit_test:
if: needs.files-changed.outputs.admin == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.admin == 'true'
name: Unit Tests - Admin Interface
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: |
bun install --filter admin --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Admin Interface | Unit tests
run: cd admin && yarn global add node-gyp && yarn && yarn run test
id: test
run: |
turbo admin#test admin#lint
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint:
if: needs.files-changed.outputs.admin == 'true'
name: Lint - Admin Interface
needs: files-changed
needs: unit_test
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Admin Interface | Lint
run: cd admin && yarn global add node-gyp && yarn && yarn run lint
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
stylelint:
if: needs.files-changed.outputs.admin == 'true'
name: Stylelint - Admin Interface
needs: files-changed
needs: unit_test
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Admin Interface | Stylelint
run: cd admin && yarn global add node-gyp && yarn && yarn run stylelint
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
locales:
if: needs.files-changed.outputs.admin == 'true'
@ -81,4 +95,4 @@ jobs:
uses: actions/checkout@v3
- name: Admin Interface | Locales
run: cd admin && yarn global add node-gyp && yarn && yarn run locales
run: cd admin && yarn locales

View File

@ -8,6 +8,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
backend: ${{ steps.changes.outputs.backend }}
config: ${{ steps.changes.outputs.config }}
database: ${{ steps.changes.outputs.database }}
docker-compose: ${{ steps.changes.outputs.docker-compose }}
mariadb: ${{ steps.changes.outputs.mariadb }}
@ -23,7 +24,7 @@ jobs:
list-files: shell
build_test:
if: needs.files-changed.outputs.backend == 'true'
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.database == 'true'
name: Docker Build Test - Backend
needs: files-changed
runs-on: ubuntu-latest
@ -32,43 +33,60 @@ jobs:
uses: actions/checkout@v3
- name: Backend | Build 'test' image
run: docker build -f ./backend/Dockerfile --target test -t "gradido/backend:test" .
run: docker build -f ./backend/Dockerfile --target production -t "gradido/backend:production" .
unit_test:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true'
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true' || needs.files-changed.outputs.config == 'true'
name: Unit tests - Backend
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v4
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Backend | install and build
run: cd database && yarn && yarn build && cd ../config && yarn && cd ../backend && yarn && yarn build
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: install bun
uses: oven-sh/setup-bun@v2
- name: Backend | prepare database
run: cd database && yarn up:backend_test
- name: install dependencies
run: |
bun install --filter backend --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Backend | Unit tests
run: cd backend && yarn test
run: turbo backend#test
lint:
typecheck:
if: needs.files-changed.outputs.backend == 'true'
name: Lint - Backend
name: Typecheck - Backend
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Backend | Lint
run: cd database && yarn && cd ../backend && yarn && yarn run lint
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: |
bun install --filter backend --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Backend | Typecheck
run: turbo backend#typecheck backend#build
locales:
if: needs.files-changed.outputs.backend == 'true'
@ -80,4 +98,4 @@ jobs:
uses: actions/checkout@v3
- name: Backend | Locales
run: cd backend && yarn && yarn locales
run: cd backend && yarn locales

40
.github/workflows/test_config.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: Gradido Config Schema Test CI
on: push
jobs:
files-changed:
name: Detect File Changes - Config-Schema
runs-on: ubuntu-latest
outputs:
config: ${{ steps.changes.outputs.config }}
docker-compose: ${{ steps.changes.outputs.docker-compose }}
steps:
- uses: actions/checkout@v3.3.0
- name: Check for config-schema file changes
uses: dorny/paths-filter@v2.11.1
id: changes
with:
token: ${{ github.token }}
filters: .github/file-filters.yml
list-files: shell
build:
name: typecheck - Config-Schema
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.docker-compose == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: bun install --filter config-schema --frozen-lockfile
- name: typecheck
run: cd config-schema && yarn typecheck

View File

@ -30,8 +30,8 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Database | Build 'test_up' image
run: docker build --target test_up -t "gradido/database:test_up" database/
- name: Database | Build image
run: docker build --target build -t "gradido/database:build" -f database/Dockerfile .
database_migration_test:
if: needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true'
@ -42,15 +42,28 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: docker-compose mariadb
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Database | docker-compose
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach mariadb
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: |
bun install --filter database --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Database | up
run: docker compose -f docker-compose.yml up --no-deps database
run: turbo up
- name: Database | reset
run: docker compose -f docker-compose.yml -f docker-compose.reset.yml up --no-deps database
run: turbo reset
lint:
if: needs.files-changed.outputs.database == 'true'
name: Lint - Database Up
@ -60,5 +73,13 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Database | Lint
run: cd database && yarn && yarn run lint
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: |
bun install --filter database --frozen-lockfile
bun install --global turbo@^2
- name: Database | build & typecheck
run: turbo database#build database#typecheck

View File

@ -7,13 +7,14 @@ jobs:
name: Detect File Changes - DHT Node
runs-on: ubuntu-latest
outputs:
config: ${{ steps.changes.outputs.config }}
database: ${{ steps.changes.outputs.database }}
dht_node: ${{ steps.changes.outputs.dht_node }}
docker-compose: ${{ steps.changes.outputs.docker-compose }}
steps:
- uses: actions/checkout@v3.3.0
- name: Check for frontend file changes
- name: Check for dht-node, config-schema, database, docker-compose file changes
uses: dorny/paths-filter@v2.11.1
id: changes
with:
@ -23,49 +24,41 @@ jobs:
build:
name: Docker Build Test - DHT Node
if: needs.files-changed.outputs.dht_node == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.dht_node == 'true' || needs.files-changed.outputs.docker-compose == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Build 'test' image
run: docker build --target test -t "gradido/dht-node:test" -f dht-node/Dockerfile .
lint:
name: Lint - DHT Node
if: needs.files-changed.outputs.dht_node == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: DHT-Node | Lint
run: cd database && yarn && cd ../dht-node && yarn && yarn run lint
- name: Build 'production' image
run: docker build --target production -t "gradido/dht-node:production" -f dht-node/Dockerfile .
unit_test:
name: Unit Tests - DHT Node
if: needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.dht_node == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true'
needs: [files-changed, build]
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.dht_node == 'true' || needs.files-changed.outputs.docker-compose == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: DHT-Node | install and build
run: cd database && yarn && yarn build && cd ../config && yarn && cd ../dht-node && yarn && yarn build
- name: install bun
uses: oven-sh/setup-bun@v2
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: install dependencies
run: |
bun install --filter dht-node --frozen-lockfile
bun install --global --no-save turbo@^2
- name: DHT-Node | prepare database
run: cd database && yarn up:dht_test
- name: DHT-Node | Unit tests
run: cd dht-node && yarn test
- name: run unit test & build & typecheck
run: turbo dht-node#test dht-node#build dht-node#typecheck

View File

@ -8,40 +8,64 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Boot up test system | docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach mariadb
- name: install bun
uses: oven-sh/setup-bun@v2
- name: Boot up test system | docker-compose mariadb mailserver
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach mariadb mailserver
- name: Prepare test system
run: |
sudo chown runner:docker -R *
cd database && yarn && yarn build
cd ../config && yarn
cd ../backend && yarn
bun install
sudo cp ./nginx/e2e-test.conf /etc/nginx/sites-available/default
- name: Boot up test system | seed backend
run: bun turbo seed
- name: Moving logs after seeding
run: |
mkdir -p /home/runner/work/gradido/gradido/logs/backend/seed
mv /home/runner/work/gradido/gradido/logs/backend/*.log /home/runner/work/gradido/gradido/logs/backend/seed/
- name: Boot up test system | docker-compose backend, frontend
run: |
cd backend
cp .env.test_e2e .env
cd ..
bun turbo backend#build
bun turbo frontend#build
bun turbo backend#start frontend#start --env-mode=loose &
- name: End-to-end tests | prepare
run: |
wget --no-verbose -O /opt/cucumber-json-formatter "https://github.com/cucumber/json-formatter/releases/download/v19.0.0/cucumber-json-formatter-linux-386"
chmod +x /opt/cucumber-json-formatter
sudo ln -fs /opt/cucumber-json-formatter /usr/bin/cucumber-json-formatter
cd e2e-tests/
yarn
bun install --production
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Boot up test system | seed backend
- name: wait for frontend and backend to be ready
run: |
cd database && yarn dev_reset
cd ../backend && yarn seed
- name: Boot up test system | docker-compose backend, frontend, admin, nginx, mailserver
until nc -z 127.0.0.1 3000; do echo waiting for frontend; sleep 1; done;
until nc -z 127.0.0.1 4000; do echo waiting for backend; sleep 1; done;
- name: Start local nginx webserver
run: |
cd backend
cp .env.test_e2e .env
cd ..
docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps backend frontend admin nginx mailserver
sudo nginx -t
sudo systemctl start nginx
- name: wait for nginx and mailserver to be ready
run: |
until nc -z 127.0.0.1 80; do echo waiting for nginx; sleep 1; done;
until nc -z 127.0.0.1 1025; do echo waiting for mailserver; sleep 1; done;
- name: End-to-end tests | run tests
id: e2e-tests
@ -67,3 +91,18 @@ jobs:
with:
name: cypress-report-pr-#${{ steps.pr.outputs.number }}
path: /home/runner/work/gradido/gradido/e2e-tests/cypress/reports/cucumber_html_report
- name: End-to-end tests | if tests failed, upload video
id: e2e-video
if: ${{ failure() && steps.e2e-tests.conclusion == 'failure' }}
uses: actions/upload-artifact@v4
with:
name: cypress-videos-pr-#${{ steps.pr.outputs.number }}
path: /home/runner/work/gradido/gradido/e2e-tests/cypress/videos
- name: End-to-end tests | if tests failed, upload logs
if: ${{ failure() && steps.e2e-tests.conclusion == 'failure' }}
uses: actions/upload-artifact@v4
with:
name: backend-logs-pr-#${{ steps.pr.outputs.number }}
path: /home/runner/work/gradido/gradido/logs/backend

View File

@ -7,6 +7,8 @@ jobs:
name: Detect File Changes - Federation
runs-on: ubuntu-latest
outputs:
config: ${{ steps.changes.outputs.config }}
database: ${{ steps.changes.outputs.database }}
docker-compose: ${{ steps.changes.outputs.docker-compose }}
federation: ${{ steps.changes.outputs.federation }}
steps:
@ -30,57 +32,33 @@ jobs:
uses: actions/checkout@v3
- name: Build `test` image
run: |
docker build --target test -t "gradido/federation:test" -f federation/Dockerfile .
docker save "gradido/federation:test" > /tmp/federation.tar
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: docker-federation-test
path: /tmp/federation.tar
lint:
name: Lint - Federation
if: needs.files-changed.outputs.federation == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Lint
run: cd federation && yarn && yarn run lint
run: docker build --target production -t "gradido/federation:production" -f federation/Dockerfile .
unit_test:
name: Unit Tests - Federation
if: needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.federation == 'true' || needs.files-changed.outputs.mariadb == 'true'
needs: [files-changed, build]
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
- name: Download Docker Image
uses: actions/download-artifact@v4
with:
name: docker-federation-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/federation.tar
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: install bun
uses: oven-sh/setup-bun@v2
- name: Federation | install and build
run: cd database && yarn && yarn build && cd ../config && yarn && cd ../federation && yarn && yarn build
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Federation | prepare database
run: cd database && yarn up:federation_test
- name: install dependencies
run: |
bun install --filter federation --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Federation | Unit tests
run: docker run --env NODE_ENV=test --env DB_HOST=mariadb --network gradido_internal-net --rm gradido/federation:test yarn run test
id: test
run: turbo federation#test federation#build federation#typecheck

View File

@ -9,6 +9,7 @@ jobs:
name: Detect File Changes - Frontend
runs-on: ubuntu-latest
outputs:
config: ${{ steps.changes.outputs.config }}
frontend: ${{ steps.changes.outputs.frontend }}
steps:
- uses: actions/checkout@v3.3.0
@ -23,7 +24,7 @@ jobs:
build_test:
if: needs.files-changed.outputs.frontend == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.frontend == 'true'
name: Docker Build Test - Frontend
needs: files-changed
runs-on: ubuntu-latest
@ -33,43 +34,69 @@ jobs:
uses: actions/checkout@v3
- name: Frontend | Build 'test' image
run: docker build -f ./frontend/Dockerfile --target test -t "gradido/frontend:test" --build-arg NODE_ENV="test" .
run: docker build -f ./frontend/Dockerfile --target production -t "gradido/frontend:production" --build-arg NODE_ENV="production" --build-arg BUILD_COMMIT=$(git rev-parse HEAD) --build-arg BUILD_COMMIT_SHORT=$(git rev-parse --short HEAD) .
unit_test:
if: needs.files-changed.outputs.frontend == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.frontend == 'true'
name: Unit Tests - Frontend
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: bun install --filter frontend --frozen-lockfile
- name: Frontend | Unit tests
run: cd frontend && yarn global add node-gyp && yarn && yarn run test
run: cd frontend && yarn test
lint:
if: needs.files-changed.outputs.frontend == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.frontend == 'true'
name: Lint - Frontend
needs: files-changed
runs-on: ubuntu-latest
outputs:
success: ${{ steps.lint.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: |
bun install --filter frontend --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Frontend | Lint
run: cd frontend && yarn global add node-gyp && yarn && yarn run lint
id: lint
run: |
turbo frontend#lint
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
stylelint:
if: needs.files-changed.outputs.frontend == 'true'
name: Stylelint - Frontend
needs: files-changed
needs: [files-changed, lint]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Frontend | Stylelint
run: cd frontend && yarn global add node-gyp && yarn && yarn run stylelint
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.success }}" != "true" ]; then exit 1; fi
locales:
if: needs.files-changed.outputs.frontend == 'true'
@ -81,4 +108,4 @@ jobs:
uses: actions/checkout@v3
- name: Frontend | Locales
run: cd frontend && yarn global add node-gyp && yarn && yarn run locales
run: cd frontend && yarn locales

2
.gitignore vendored
View File

@ -2,10 +2,12 @@
.project
*.log
*.bak
.turbo
/node_modules/*
messages.pot
nbproject
.metadata
/out/*
/.env
package-lock.json
/deployment/bare_metal/.env

196
Dockerfile Normal file
View File

@ -0,0 +1,196 @@
##################################################################################
# BASE ###########################################################################
##################################################################################
FROM node:18.20.7-bookworm as base
# ENVs (available in production aswell, can be overwritten by commandline or env file)
ENV TURBO_CACHE_DIR=/tmp/turbo
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
ENV DOCKER_WORKDIR="/app"
## We Cannot do `$(date -u +'%Y-%m-%dT%H:%M:%SZ')` here so we use unix timestamp=0
ENV BUILD_DATE="1970-01-01T00:00:00.00Z"
## We cannot do $(npm run version).${BUILD_NUMBER} here so we default to 0.0.0.0
ENV BUILD_VERSION="0.0.0.0"
## We cannot do `$(git rev-parse --short HEAD)` here so we default to 0000000
ARG BUILD_COMMIT
ENV BUILD_COMMIT=${BUILD_COMMIT}
## SET NODE_ENV
ENV NODE_ENV="production"
## App relevant Envs
ENV BACKEND_PORT="4000"
ENV FEDERATION_PORT="5010"
ENV FRONTEND_MODULE_PORT="3000"
ENV ADMIN_MODULE_PORT="8080"
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
LABEL org.label-schema.name="gradido:backend"
LABEL org.label-schema.description="Gradido GraphQL Backend"
LABEL org.label-schema.usage="https://github.com/gradido/gradido/blob/master/README.md"
LABEL org.label-schema.url="https://gradido.net"
LABEL org.label-schema.vcs-url="https://github.com/gradido/gradido/tree/master/backend"
LABEL org.label-schema.vcs-ref="${BUILD_COMMIT}"
LABEL org.label-schema.vendor="Gradido Community"
LABEL org.label-schema.version="${BUILD_VERSION}"
LABEL org.label-schema.schema-version="1.0"
LABEL maintainer="support@gradido.net"
# Install Additional Software
## install: git
#apk add --no-cache libc6-compat
#RUN apk --no-cache add git
# Install bun
# RUN apt-get update && apt-get install -y curl unzip
RUN curl -fsSL https://bun.sh/install | BUN_INSTALL=/usr/local bash
# Add bun to PATH
# Install turbo globally
RUN bun install --global turbo
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
#RUN yarn global add turbo
# Settings
## Expose Container Port
EXPOSE ${BACKEND_PORT}
EXPOSE ${FEDERATION_PORT}
EXPOSE ${FRONTEND_MODULE_PORT}
EXPOSE ${ADMIN_MODULE_PORT}
## Workdir
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
##################################################################################
# DEVELOPMENT (Connected to the local environment, to reload on demand) ##########
##################################################################################
FROM base as development
# We don't need to copy or build anything since we gonna bind to the
# local filesystem which will need a rebuild anyway
# Run command
# (for development we need to execute yarn install since the
# node_modules are on another volume and need updating)
CMD /bin/sh -c "bun install && turbo dev --env-mode=loose"
##################################################################################
# INSTALL (Does contain all node_modules) ########################################
##################################################################################
FROM base as install
# Copy everything
COPY --chown=app:app ./ ./
# yarn install
RUN bun install --frozen-lockfile --non-interactive
# try with bun, use yarn if problems occur
# go into admin folder and use yarn to install local dependencies which need to use nohoist for @vee-validate/i18n which isn't supported by bun
#RUN bun install --frozen-lockfile
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM install as test
# Run command
CMD /bin/sh -c "turbo test --env-mode=loose"
##################################################################################
# RESET DB #######################################################################
##################################################################################
FROM install as reset
# Run command
CMD /bin/sh -c "cd database && bun run reset"
##################################################################################
# BUILD (Does contain all files and is therefore bloated) ########################
##################################################################################
FROM install as build
# turbo build
RUN turbo build --env-mode=loose
##################################################################################
# PRODUCTION #####################################################################
##################################################################################
FROM build as production
# Run command
CMD /bin/sh -c "turbo start --env-mode=loose"
##################################################################################
# FINAL PRODUCTION IMAGE #########################################################
##################################################################################
FROM node:18.20.7-bookworm-slim as production2
ENV TURBO_CACHE_DIR=/tmp/turbo
ENV DOCKER_WORKDIR="/app"
ENV NODE_ENV="production"
ENV DB_HOST=mariadb
WORKDIR ${DOCKER_WORKDIR}
# Copy only the build artifacts from the previous build stage
COPY --chown=app:app --from=build /app/node_modules ./node_modules
COPY --chown=app:app --from=build /app/package.json ./package.json
COPY --chown=app:app --from=build /app/yarn.lock ./yarn.lock
COPY --chown=app:app --from=build /app/turbo.json ./turbo.json
# and Turbo cache to prevent rebuilding
COPY --chown=app:app --from=build /tmp/turbo ./tmp/turbo
RUN yarn global add turbo
COPY --chown=app:app --from=build /app/backend ./backend
COPY --chown=app:app --from=build /app/frontend ./frontend
COPY --chown=app:app --from=build /app/admin ./admin
COPY --chown=app:app --from=build /app/database ./database
COPY --chown=app:app --from=build /app/config-schema ./config-schema
COPY --chown=app:app --from=build /app/federation ./federation
COPY --chown=app:app --from=build /app/dht-node ./dht-node
# Ports exposen
EXPOSE ${BACKEND_PORT}
EXPOSE ${FEDERATION_PORT}
EXPOSE ${FRONTEND_MODULE_PORT}
EXPOSE ${ADMIN_MODULE_PORT}
# Command to start
CMD ["turbo", "start", "--env-mode=loose"]
##################################################################################
# FINAL PRODUCTION IMAGE #########################################################
##################################################################################
FROM node:18.20.7-alpine3.21 as production-slim
ENV TURBO_CACHE_DIR=/tmp/turbo
ENV DOCKER_WORKDIR="/app"
ENV NODE_ENV="production"
ENV DB_HOST=mariadb
WORKDIR ${DOCKER_WORKDIR}
# Ports exposen
EXPOSE ${BACKEND_PORT}
EXPOSE ${FEDERATION_PORT}
EXPOSE ${FRONTEND_MODULE_PORT}
EXPOSE ${ADMIN_MODULE_PORT}
# Copy only the build artifacts from the previous build stage
COPY --chown=app:app --from=build /app/backend/build ./backend/build
COPY --chown=app:app --from=build /app/backend/locales ./backend/locales
COPY --chown=app:app --from=build /app/backend/log4js-config.json ./backend/log4js-config.json
COPY --chown=app:app --from=build /app/dht-node/build ./dht-node/build
COPY --chown=app:app --from=build /app/dht-node/log4js-config.json ./dht-node/log4js-config.json
COPY --chown=app:app --from=build /app/federation/build ./federation/build
COPY --chown=app:app --from=build /app/federation/log4js-config.json ./federation/log4js-config.json
COPY --chown=app:app --from=build /app/frontend/build ./frontend
COPY --chown=app:app --from=build /app/admin/build ./admin
RUN yarn global add udx-native@1.5.3 sodium-native@4.0.0
CMD ["turbo", "start", "--env-mode=loose"]

183
README.md
View File

@ -10,37 +10,109 @@ The dominant financial system threatens to fail around the globe, followed by ma
Find out more about the Project on its [Website](https://gradido.net/). It is offering vast resources about the idea. The remaining document will discuss the gradido software only.
## Software requirements
## Getting Started
Currently we only support `docker` install instructions to run all services, since many different programming languages and frameworks are used.
We are still in active development, so some things might not work as expected. If you encounter any issues, please feel free to report them via the [Issue Tracker](https://github.com/gradido/gradido/issues). Your feedback is valuable as we continue to build a more sustainable financial system!
- [docker](https://www.docker.com/)
- [docker-compose]
- [yarn](https://phoenixnap.com/kb/yarn-windows)
### For Arch Linux
Install the required packages:
### Get Gradido to your local machine
Clone the Gradido repository to your local machine.
```bash
sudo pacman -S docker
sudo pacman -S docker-compose
git clone https://github.com/gradido/gradido.git
cd gradido
```
Add group `docker` and then your user to it in order to allow you to run docker without sudo
For local development, you can run Gradido with **Docker** or **natively**, depending on your preferences and system setup. If you don't have a native MariaDB or MySQL installation, Docker can be used to handle the database as well.
### Docker Setup
You can also run Gradido using Docker.
- **Development Mode (Hot-Reload)**:
```bash
sudo groupadd docker # may already exist `groupadd: group 'docker' already exists`
sudo usermod -aG docker $USER
groups # verify you have the group (requires relog)
docker compose up
```
Start the docker service:
- **Production Build**:
```bash
sudo systemctrl start docker
docker compose -f docker-compose.yml up
```
This will launch the following services as containers:
| Service | Description |
| --- | --- |
| gradido | Backend & Frontend (All Modules) |
| mariadb | MariaDB Database Server |
| nginx | Webserver acting as a reverse proxy |
#### Nginx Routing Overview
```mermaid
graph TD;
A[localhost nginx] -->|/| B[frontend port 3000]
A -->|/admin| C[Admin UI port 8080]
A -->|/graphql| D[backend port 4000]
classDef default fill:#ffdf97,stroke:#333,stroke-width:2px;
class A,B,C,D default;
```
### Database Setup
Gradido requires a running **MariaDB** or **MySQL** database instance.
By default, the application expects the following credentials:
- Database name: gradido_community (will be automatically created on startup)
- User: root
- Password: (empty)
You can either run the database **natively** on your system, or use **Docker** to spin up the database along with an optional phpMyAdmin interface:
- Run database using Docker:
```bash
docker compose up mariadb
```
- To launch phpMyAdmin along with the database:
```bash
docker compose up mariadb phpmyadmin
```
Once started, phpMyAdmin will be available at:
http://localhost:8074
### Native Setup
Install all node modules with [Bun](https://bun.sh/) and [Turborepo](https://turborepo.com/docs/getting-started/installation) (globally, for convenience):
```bash
bun install
bun install --global turbo@^2
```
If this does not work, try to use [yarn](https://classic.yarnpkg.com/en/docs/install) instead
```bash
yarn install
yarn global add turbo@^2
```
- **Development Mode (Hot-Reload)**:
Launches Gradido with hot-reloading for fast iteration.
```bash
turbo dev
```
- **Production Build**:
Builds and runs Gradido optimized for production.
A deployment script for Hetzner Cloud is available [here](./deployment/hetzner_cloud/README.md).
```bash
turbo start
```
[More Infos for using turbo](./working-native.md)
### For Windows
#### docker
@ -52,61 +124,76 @@ The installation of dockers depends on your selected product package from the [d
* In case the docker desktop will not start correctly because of previous docker installations, then please clean the used directories of previous docker installation - `C:\Users` - before you retry starting docker desktop. For further problems executing docker desktop please take a look in this description "[logs and trouble shooting](https://docs.docker.com/desktop/windows/troubleshoot/)"
* In case your docker desktop installation causes high memory consumption per vmmem process, then please take a look at this description "[vmmen process consuming too much memory (Docker Desktop)](https://dev.to/tallesl/vmmen-process-consuming-too-much-memory-docker-desktop-273p)"
#### yarn
### yarn
For the Gradido build process the yarn package manager will be used. Please download and install [yarn for windows](https://phoenixnap.com/kb/yarn-windows) by following the instructions there.
## How to run?
As soon as the software requirements are fulfilled and a docker installation is up and running then open a powershell on Windows or an other commandline prompt on Linux.
### ⚡ Workspaces and Bun Compatibility
The project now uses **Workspaces**, and work is ongoing to make all modules **Bun-compatible**. You can currently use `bun install`, but not all modules are fully Bun-compatible yet.
Create and navigate to the directory, where you want to create the Gradido runtime environment.
```
mkdir \Gradido
cd \Gradido
```
### 1. Clone Sources
Clone the repo and pull all submodules
To install bun, run:
```bash
git clone git@github.com:gradido/gradido.git
git submodule update --recursive --init
curl -fsSL https://bun.sh/install | bash
```
### 2. Install modules
You can go in each under folder (admin, frontend, database, backend, ...) and call ``yarn`` in each folder or you can call ``yarn installAll``.
### 3. Run docker-compose
Run docker-compose to bring up the development environment
To install dependencies with Bun:
```bash
docker-compose up
bun install
```
### Additional Build options
If you want to build for production you can do this aswell:
Note that some modules are still not fully compatible with Bun. Therefore, continue using **Yarn** for development if you run into any issues.
### EMFILE: too many open files
With
```bash
docker-compose -f docker-compose.yml up
yarn docker_dev
```
or also
```bash
turbo dev
```
Many files will be watched by the various services.
This can lead to this error: **EMFILE: too many open files**
If increasing ulimit don't help, consider starting only the services on
which you are working on in dev mode and the rest in production mode.
For example if you are only working on the frontend, you can start the frontend in dev mode and the rest in production mode:
```bash
yarn docker_dev frontend
```
and in another bash
```bash
yarn docker backend admin database nginx --no-deps
```
or local with turbo
```bash
turbo frontend#dev backend#start admin#start --env-mode=loose
```
Tip: for local setup use a local nginx server with similar config like docker nginx [nginx.conf](./nginx/gradido.conf) but replace docker image name with localhost
## Services defined in this package
- [frontend](./frontend) Wallet frontend
- [admin](./admin) Admin interface
- [backend](./backend) GraphQL & Business logic backend
- [mariadb](./mariadb) Database backend
- [dht-node](./dht-node) DHT Node Discover other Gradido Communities
- [dlt-connector](./dlt-connector) DLT Connector (WIP), connect to blockchain
- [federation](./federation) Federation, process incoming requests from another gradido communities
- [database](./database) Contains EntityORM entities and migration code for database
- [mariadb](./mariadb) Database server
- [config-schema](./config-schema) Contains common configuration schemas
- [e2e-tests](./e2e-tests) End-to-end tests
We are currently restructuring the service to reduce dependencies and unify business logic into one place. Furthermore the databases defined for each service will be unified into one.
### Open the wallet
Once you have `docker-compose` up and running, you can open [http://localhost/](http://localhost/) and create yourself a new wallet account.
Once you have gradido up and running you can open [http://localhost/](http://localhost/) and create a new wallet account.
## How to release

View File

@ -1,4 +1,5 @@
node_modules
.git
.gitignore
!.eslintignore
!.eslintignore
!.env.git

View File

@ -1,17 +1,8 @@
'use strict'
'use strict';
module.exports = {
extends: ['stylelint-config-standard-scss', 'stylelint-config-recommended-vue'],
overrides: [
{
files: '**/*.{scss}',
customSyntax: 'postcss-scss',
extends: ['stylelint-config-standard-scss'],
},
{
files: '**/*.vue',
customSyntax: 'postcss-html',
extends: ['stylelint-config-recommended-vue'],
},
],
}
"extends": [
"stylelint-config-standard-scss",
"stylelint-config-recommended-vue/scss"
]
};

View File

@ -1,21 +1,29 @@
##################################################################################
# BASE ###########################################################################
##################################################################################
FROM node:18.20-alpine3.20 as base
FROM node:18.20.7-alpine3.21 as base
ENV TURBO_CACHE_DIR=/tmp/turbo
# ENVs (available in production aswell, can be overwritten by commandline or env file)
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
ENV DOCKER_WORKDIR="/app"
## We Cannot do `$(date -u +'%Y-%m-%dT%H:%M:%SZ')` here so we use unix timestamp=0
ENV BUILD_DATE="1970-01-01T00:00:00.00Z"
## We cannot do $(npm run version).${BUILD_NUMBER} here so we default to 0.0.0.0
ENV BUILD_VERSION="0.0.0.0"
## We cannot do `$(git rev-parse --short HEAD)` here so we default to 0000000
ENV BUILD_COMMIT_SHORT="0000000"
# TODO: get the actually git commit hash into docker
ARG BUILD_VERSION
ENV BUILD_VERSION=${BUILD_VERSION:-'broken'}
ARG BUILD_COMMIT
ENV BUILD_COMMIT=${BUILD_COMMIT:-'decafcabdecafcabdecafcabdecafcabdecafcab'}
ARG BUILD_COMMIT_SHORT
ENV BUILD_COMMIT_SHORT=${BUILD_COMMIT_SHORT:-'decafcab'}
## SET NODE_ENV
ARG NODE_ENV="production"
ARG NODE_ENV=production
ENV NODE_ENV=${NODE_ENV}
## App relevant Envs
ENV PORT="8080"
## Timezone
ENV TZ=UTC
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
@ -33,6 +41,7 @@ LABEL maintainer="support@ogradido.net"
# Install Additional Software
## install: git
#RUN apk --no-cache add git
# RUN bun add --global yarn@1.22.20
# Settings
## Expose Container Port
@ -42,67 +51,57 @@ EXPOSE ${PORT}
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
RUN mkdir -p /config
##################################################################################
# BUN ############################################################################
##################################################################################
FROM base as bun-base
RUN apk update && apk add --no-cache curl tar bash
RUN curl -fsSL https://bun.sh/install | bash
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
##################################################################################
# DEVELOPMENT (Connected to the local environment, to reload on demand) ##########
# Development ####################################################################
##################################################################################
FROM base as development
FROM bun-base AS development
# We don't need to copy or build anything since we gonna bind to the
# local filesystem which will need a rebuild anyway
# used for getting git commit hash direct from .git
RUN apk update && apk add --no-cache git
# Run command
# (for development we need to execute yarn install since the
# node_modules are on another volume and need updating)
CMD /bin/sh -c "cd /config && yarn install && cd /app && yarn && yarn run dev"
CMD /bin/sh -c "bun install --filter admin --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2 \
&& turbo admin#dev --env-mode=loose"
##################################################################################
# BUILD (Does contain all files and is therefore bloated) ########################
# Build ##########################################################################
##################################################################################
FROM base as build
FROM bun-base as build
# Copy everything
COPY ./admin/ .
# Copy everything from config
COPY ./config/ ../config/
COPY --chown=app:app . .
RUN bun install --filter admin --no-cache --frozen-lockfile \
&& bun install --global turbo@^2
# yarn install and build config
RUN cd ../config && yarn install --production=false --frozen-lockfile --non-interactive && yarn build
# yarn install admin
RUN yarn install --production=false --frozen-lockfile --non-interactive
# yarn build
RUN yarn run build
RUN turbo admin#build --env-mode=loose
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM build as test
# Install Additional Software
RUN apk add --no-cache bash jq
# Run command
CMD /bin/sh -c "yarn run dev"
CMD /bin/sh -c "turbo admin#test --env-mode=loose"
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
##################################################################################
FROM base as production
FROM nginx:1.28.0-alpine3.21-slim as production
# Copy "binary"-files from build image
COPY --from=build ${DOCKER_WORKDIR}/build ./build
COPY --from=build ${DOCKER_WORKDIR}/../config/build ../config/build
# We also copy the node_modules express and serve-static for the run script
COPY --from=build ${DOCKER_WORKDIR}/node_modules ./node_modules
# Copy static files
COPY --from=build ${DOCKER_WORKDIR}/public ./public
# Copy package.json for script definitions (lock file should not be needed)
COPY --from=build ${DOCKER_WORKDIR}/package.json ./package.json
# Copy run scripts run/
COPY --from=build ${DOCKER_WORKDIR}/run ./run
COPY ./nginx/admin.conf /etc/nginx/conf.d/default.conf
# Run command
CMD /bin/sh -c "yarn run start"
WORKDIR /app
# copy builded frontend files
COPY --from=build /app/admin/build/ ./admin/

View File

@ -2,15 +2,14 @@
"name": "admin",
"description": "Administration Interface for Gradido",
"main": "index.js",
"author": "Moriz Wahl",
"author": "Gradido Academy - https://www.gradido.net",
"version": "2.5.2",
"license": "Apache-2.0",
"scripts": {
"start": "node run/server.js",
"dev": "vite",
"build": "vite build",
"serve": "vite preview",
"postbuild": "find build -type f -regex '.*\\.\\(html\\|js\\|css\\|svg\\|json\\)' -exec gzip -9 -k {} +",
"start": "vite preview",
"postbuild": "uname | grep -q Linux && find build -type f -regex '.*\\.\\(html\\|js\\|css\\|svg\\|json\\)' -exec gzip -9 -k {} + || echo 'Skip precompress on non-Linux'",
"lint": "eslint --max-warnings=0 --ext .js,.vue,.json .",
"stylelint": "stylelint --max-warnings=0 '**/*.{scss,vue}'",
"test": "cross-env TZ=UTC vitest run",
@ -20,34 +19,25 @@
"locales": "scripts/sort.sh"
},
"dependencies": {
"@babel/core": "^7.15.8",
"@babel/eslint-parser": "^7.24.8",
"@babel/node": "^7.15.8",
"@babel/preset-env": "^7.15.8",
"@iconify/json": "^2.2.228",
"@vitejs/plugin-vue": "3.2.0",
"@popperjs/core": "^2.11.8",
"@vitejs/plugin-vue": "^5.2.3",
"@vue/apollo-composable": "^4.0.2",
"@vue/apollo-option": "^4.0.0",
"@vue/compat": "3.4.31",
"@vue/eslint-config-prettier": "^6.0.0",
"@vue/compat": "3.5.13",
"apollo-boost": "^0.4.9",
"babel-core": "7.0.0-bridge.0",
"babel-plugin-component": "^1.1.1",
"babel-preset-env": "^1.7.0",
"babel-preset-vue": "^2.0.2",
"bootstrap": "^5.3.3",
"bootstrap-vue-next": "0.26.8",
"date-fns": "^2.29.3",
"dotenv-webpack": "^7.0.3",
"express": "^4.17.1",
"graphql": "^16.9.0",
"graphql": "^15.10.1",
"graphql-tag": "^2.12.6",
"identity-obj-proxy": "^3.0.0",
"portal-vue": "3.0.0",
"qrcanvas-vue": "3.0.0",
"regenerator-runtime": "^0.13.9",
"sass": "^1.77.8",
"vite": "3.2.10",
"unplugin-icons": "^0.19.0",
"unplugin-vue-components": "^0.27.3",
"vite": "^5.4.14",
"vite-plugin-commonjs": "^0.10.1",
"vue": "3.5.13",
"vue-apollo": "3.1.2",
@ -63,49 +53,46 @@
"@intlify/eslint-plugin-vue-i18n": "^1.4.0",
"@vitest/coverage-v8": "^2.0.5",
"@vue/compiler-sfc": "^3.4.32",
"@vue/eslint-config-prettier": "^10.2.0",
"@vue/test-utils": "^2.4.6",
"babel-plugin-transform-require-context": "^0.1.1",
"config-schema": "*",
"cross-env": "^7.0.3",
"eslint": "8.57.0",
"eslint-config-prettier": "8.10.0",
"eslint-config-standard": "^16.0.3",
"eslint-loader": "^4.0.2",
"dotenv-webpack": "^7.0.3",
"eslint": "8.57.1",
"eslint-config-prettier": "^10.1.1",
"eslint-config-standard": "^17.0.0",
"eslint-plugin-import": "^2.25.2",
"eslint-plugin-n": "^16",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "5.2.1",
"eslint-plugin-promise": "^5.1.1",
"eslint-plugin-prettier": "^5.2.3",
"eslint-plugin-promise": "^6.1.1",
"eslint-plugin-vue": "8.7.1",
"gradido-config": "../config",
"joi": "^17.13.3",
"jsdom": "^25.0.0",
"mock-apollo-client": "^1.2.1",
"postcss": "^8.4.8",
"postcss-html": "^1.3.0",
"postcss-scss": "^4.0.3",
"prettier": "^3.3.3",
"stylelint": "16.7.0",
"stylelint-config-recommended-vue": "1.5.0",
"stylelint-config-standard-scss": "13.1.0",
"unplugin-icons": "^0.19.0",
"unplugin-vue-components": "^0.27.3",
"postcss-html": "^1.8.0",
"prettier": "^3.5.3",
"sass": "^1.77.8",
"stylelint": "^16.19.1",
"stylelint-config-recommended-vue": "^1.6.0",
"stylelint-config-standard-scss": "^14.0.0",
"vite-plugin-environment": "^1.1.3",
"vite-plugin-graphql-loader": "^4.0.4",
"vitest": "^2.0.5",
"vitest-canvas-mock": "^0.3.3"
"vitest-canvas-mock": "^0.3.3",
"webpack": "^5"
},
"browserslist": [
"> 1%",
"last 2 versions",
"not ie <= 10"
],
"nodemonConfig": {
"ignore": [
"**/*.spec.js"
]
},
"resolutions": {
"strip-ansi": "6.0.1",
"string-width": "4.2.2",
"wrap-ansi": "7.0.0"
},
"engines": {
"node": ">=18"
}
}

View File

@ -469,7 +469,7 @@ describe('CommunityVisualizeItem', () => {
mocks: {
$t: (key) => key,
$i18n: {
locale: locale,
locale,
},
},
stubs: {

View File

@ -30,8 +30,8 @@ if (process.env.ADMIN_HOSTING === 'nodejs') {
const environment = {
NODE_ENV: process.env.NODE_ENV,
DEBUG: process.env.NODE_ENV !== 'production' ?? false,
PRODUCTION: process.env.NODE_ENV === 'production' ?? false,
DEBUG: process.env.NODE_ENV !== 'production',
PRODUCTION: process.env.NODE_ENV === 'production',
}
// const COMMUNITY_HOST = process.env.COMMUNITY_HOST ?? undefined
@ -48,14 +48,14 @@ const endpoints = {
}
const debug = {
DEBUG_DISABLE_AUTH: process.env.DEBUG_DISABLE_AUTH === 'true' ?? false,
DEBUG_DISABLE_AUTH: process.env.DEBUG_DISABLE_AUTH === 'true',
}
const humhub = {
HUMHUB_ACTIVE: process.env.HUMHUB_ACTIVE === 'true' || false,
HUMHUB_ACTIVE: process.env.HUMHUB_ACTIVE === 'true',
HUMHUB_API_URL: process.env.HUMHUB_API_URL ?? COMMUNITY_URL + '/community/',
}
const OPENAI_ACTIVE = process.env.OPENAI_ACTIVE === 'true' ?? false
const OPENAI_ACTIVE = process.env.OPENAI_ACTIVE === 'true'
const CONFIG = {
...version,

View File

@ -1,4 +1,4 @@
const {
import {
APP_VERSION,
BUILD_COMMIT,
BUILD_COMMIT_SHORT,
@ -10,8 +10,8 @@ const {
NODE_ENV,
OPENAI_ACTIVE,
PRODUCTION,
} = require('gradido-config/build/src/commonSchema.js')
const Joi = require('joi')
} from 'config-schema'
import Joi from 'joi'
module.exports = Joi.object({
APP_VERSION,

View File

@ -73,7 +73,7 @@ const route = useRoute()
const { result, refetch } = useQuery(searchUsers, {
query: criteria.value,
filters: filters,
filters,
currentPage: currentPage.value,
pageSize: perPage.value,
order: 'DESC',
@ -138,7 +138,7 @@ watch(
if (newValue !== oldValue) {
await refetch({
query: criteria.value,
filters: filters,
filters,
currentPage: newValue,
pageSize: perPage.value,
order: 'DESC',

10
admin/turbo.json Normal file
View File

@ -0,0 +1,10 @@
{
"extends": ["//"],
"tasks": {
"stylelint": {},
"locales": {},
"lint": {
"dependsOn": ["stylelint", "locales"]
}
}
}

View File

@ -6,17 +6,22 @@ import Components from 'unplugin-vue-components/vite'
import IconsResolve from 'unplugin-icons/resolver'
import { BootstrapVueNextResolver } from 'bootstrap-vue-next'
import EnvironmentPlugin from 'vite-plugin-environment'
import schema from './src/config/schema'
import { validate, browserUrls } from 'gradido-config/build/src/index.js'
import { execSync } from 'node:child_process'
import { existsSync, constants } from 'node:fs'
import { validate, browserUrls } from 'config-schema'
import path from 'node:path'
import { createRequire } from 'node:module'
import dotenv from 'dotenv'
dotenv.config() // load env vars from .env
const require = createRequire(import.meta.url)
const CONFIG = require('./src/config')
const path = require('path')
export default defineConfig(async ({ command }) => {
const { vitePluginGraphqlLoader } = await import('vite-plugin-graphql-loader')
if (command === 'serve') {
@ -24,6 +29,10 @@ export default defineConfig(async ({ command }) => {
} else {
CONFIG.ADMIN_HOSTING = 'nginx'
}
if (existsSync('../.git', constants.F_OK)) {
CONFIG.BUILD_COMMIT = execSync('git rev-parse HEAD').toString().trim()
CONFIG.BUILD_COMMIT_SHORT = (CONFIG.BUILD_COMMIT ?? '0000000').slice(0, 7)
}
validate(schema, CONFIG)
// make sure that all urls used in browser have the same protocol to prevent mixed content errors
validate(browserUrls, [
@ -70,7 +79,7 @@ export default defineConfig(async ({ command }) => {
compiler: 'vue3',
}),
EnvironmentPlugin({
BUILD_COMMIT: null,
BUILD_COMMIT: CONFIG.BUILD_COMMIT ?? undefined,
PORT: CONFIG.ADMIN_MODULE_PORT ?? null, // null,
COMMUNITY_HOST: CONFIG.ADMIN_MODULE_HOST ?? null, // null,
COMMUNITY_URL: CONFIG.COMMUNITY_URL ?? null,

View File

@ -11,3 +11,4 @@ EMAIL_TEST_MODUS=false
EMAIL_TLS=false
# for testing password reset
EMAIL_CODE_REQUEST_TIME=1
EMAIL_SMTP_HOST=127.0.0.1

1
backend/.gitignore vendored
View File

@ -2,6 +2,7 @@
/.env
/.env.bak
/build/
/locales/
package-json.lock
coverage
# emacs

View File

@ -1,4 +1,4 @@
/* eslint-disable @typescript-eslint/ban-types */
declare module 'random-bigint' {
function random(bits: number, cb?: (err: Error, num: BigInt) => void): BigInt
export = random

View File

@ -1,4 +1,4 @@
// eslint-disable-next-line import/no-unresolved
export * from '@/node_modules/@types/sodium-native'
declare module 'sodium-native' {

View File

@ -2,6 +2,8 @@
# BASE ###########################################################################
##################################################################################
FROM node:18.20.7-bookworm-slim as base
#FROM node:18.20.7-alpine3.21 as base
# change to alpine after sodium-native ship with native alpine build
# ENVs (available in production aswell, can be overwritten by commandline or env file)
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
@ -13,9 +15,12 @@ ENV BUILD_VERSION="0.0.0.0"
## We cannot do `$(git rev-parse --short HEAD)` here so we default to 0000000
ENV BUILD_COMMIT="0000000"
## SET NODE_ENV
ENV NODE_ENV="production"
ENV NODE_ENV=production
## App relevant Envs
ENV PORT="4000"
## Timezone
ENV TZ=UTC
ENV DB_HOST=mariadb
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
@ -34,6 +39,7 @@ LABEL maintainer="support@gradido.net"
## install: git
#RUN apk --no-cache add git
# Settings
## Expose Container Port
EXPOSE ${PORT}
@ -42,48 +48,41 @@ EXPOSE ${PORT}
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
RUN mkdir -p /database
RUN mkdir -p /config
##################################################################################
# BUN ############################################################################
##################################################################################
FROM base as bun-base
RUN apt update && apt install -y --no-install-recommends ca-certificates curl bash unzip
#RUN apk update && apk add --no-cache curl tar bash
RUN curl -fsSL https://bun.sh/install | bash
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
##################################################################################
# DEVELOPMENT (Connected to the local environment, to reload on demand) ##########
# Development ####################################################################
##################################################################################
FROM base as development
# We don't need to copy or build anything since we gonna bind to the
# local filesystem which will need a rebuild anyway
FROM bun-base AS development
# Run command
# (for development we need to execute yarn install since the
# node_modules are on another volume and need updating)
CMD /bin/sh -c "cd /database && yarn install && yarn build && cd /config && yarn install && cd /app && yarn install && yarn run dev"
CMD /bin/sh -c "bun install --filter backend --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2 \
&& turbo backend#dev --env-mode=loose"
##################################################################################
# BUILD (Does contain all files and is therefore bloated) ########################
# Basic Image with bun setup and project and source code #########################
##################################################################################
FROM base as build
FROM bun-base as bun-base-src
COPY --chown=app:app . .
# Copy everything from backend
COPY ./backend/ ./
# Copy everything from database
COPY ./database/ ../database/
# Copy everything from config
COPY ./config/ ../config/
##################################################################################
# Build ##########################################################################
##################################################################################
FROM bun-base-src as build
# yarn install and build config
RUN cd ../config && yarn install --production=false --frozen-lockfile --non-interactive && yarn build
# yarn install backend
RUN yarn install --production=false --frozen-lockfile --non-interactive
# yarn install database
RUN cd ../database && yarn install --production=false --frozen-lockfile --non-interactive
# yarn build
RUN yarn build
# yarn build database
RUN cd ../database && yarn build
RUN bun install --filter backend --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2
RUN turbo backend#build backend#typecheck --env-mode=loose
##################################################################################
# TEST ###########################################################################
@ -91,7 +90,17 @@ RUN cd ../database && yarn build
FROM build as test
# Run command
CMD /bin/sh -c "yarn run start"
CMD /bin/sh -c "turbo backend#test --env-mode=loose"
##################################################################################
# install only node modules needed for running bundle ############################
##################################################################################
FROM bun-base-src as production-node-modules
# add node_modules from production_node_modules
RUN bun install --filter backend --production --frozen-lockfile --no-cache \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& ./scripts/clean-prebuilds.sh
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
@ -99,25 +108,15 @@ CMD /bin/sh -c "yarn run start"
FROM base as production
# Copy "binary"-files from build image
COPY --from=build ${DOCKER_WORKDIR}/build ./build
COPY --from=build ${DOCKER_WORKDIR}/../database/build ../database/build
COPY --from=build ${DOCKER_WORKDIR}/../config/build ../config/build
# We also copy the node_modules express and serve-static for the run script
COPY --from=build ${DOCKER_WORKDIR}/node_modules ./node_modules
COPY --from=build ${DOCKER_WORKDIR}/../database/node_modules ../database/node_modules
COPY --from=build ${DOCKER_WORKDIR}/../config/node_modules ../config/node_modules
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/build/index.js ./index.js
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/build/worker.js ./worker.js
# add node_modules from production_node_modules
COPY --chown=app:app --from=production-node-modules ${DOCKER_WORKDIR}/node_modules ./node_modules
# Copy static files
# COPY --from=build ${DOCKER_WORKDIR}/public ./public
# Copy package.json for script definitions (lock file should not be needed)
COPY --from=build ${DOCKER_WORKDIR}/package.json ./package.json
# Copy tsconfig.json to provide alias path definitions
COPY --from=build ${DOCKER_WORKDIR}/tsconfig.json ./tsconfig.json
# Copy log4js-config.json to provide log configuration
COPY --from=build ${DOCKER_WORKDIR}/log4js-config.json ./log4js-config.json
# Copy run scripts run/
# COPY --from=build ${DOCKER_WORKDIR}/run ./run
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/log4js-config.json ./log4js-config.json
# Copy locales
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/locales ./locales
# Run command
CMD /bin/sh -c "yarn run start"
CMD ["node", "index.js"]

View File

@ -2,10 +2,6 @@
## Project setup
```bash
yarn install
```
## Seed DB
```bash

16
backend/esbuild.config.ts Normal file
View File

@ -0,0 +1,16 @@
import { esbuildDecorators } from '@anatine/esbuild-decorators'
import { build } from 'esbuild'
build({
entryPoints: ['src/index.ts', 'src/password/worker.js'],
outdir: 'build',
platform: 'node',
target: 'node18.20.7',
bundle: true,
keepNames: true,
entryNames: '[name]',
// legalComments: 'inline',
external: ['sodium-native', 'email-templates'],
plugins: [esbuildDecorators()],
minify: true,
})

View File

@ -1,5 +1,4 @@
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
// eslint-disable-next-line import/no-commonjs, import/unambiguous
module.exports = {
verbose: true,
preset: 'ts-jest',
@ -7,7 +6,7 @@ module.exports = {
collectCoverageFrom: ['src/**/*.ts', '!**/node_modules/**', '!src/seeds/**', '!build/**'],
coverageThreshold: {
global: {
lines: 77,
lines: 75,
},
},
setupFiles: ['<rootDir>/test/testSetup.ts'],
@ -25,22 +24,18 @@ module.exports = {
'@typeorm/(.*)': '<rootDir>/src/typeorm/$1',
'@test/(.*)': '<rootDir>/test/$1',
'@entity/(.*)':
// eslint-disable-next-line n/no-process-env
process.env.NODE_ENV === 'development'
? '<rootDir>/../database/entity/$1'
: '<rootDir>/../database/build/entity/$1',
'@logging/(.*)':
// eslint-disable-next-line n/no-process-env
process.env.NODE_ENV === 'development'
? '<rootDir>/../database/logging/$1'
: '<rootDir>/../database/build/logging/$1',
'@dbTools/(.*)':
// eslint-disable-next-line n/no-process-env
process.env.NODE_ENV === 'development'
? '<rootDir>/../database/src/$1'
: '<rootDir>/../database/build/src/$1',
'@config/(.*)':
// eslint-disable-next-line n/no-process-env
process.env.NODE_ENV === 'development'
? '<rootDir>/../config/src/$1'
: '<rootDir>/../config/build/$1',

View File

@ -1,96 +1,101 @@
{
"name": "gradido-backend",
"name": "backend",
"version": "2.5.2",
"description": "Gradido unified backend providing an API-Service for Gradido Transactions",
"main": "src/index.ts",
"repository": "https://github.com/gradido/gradido/backend",
"author": "Ulf Gebhardt",
"license": "Apache-2.0",
"private": false,
"description": "Gradido unified backend providing an API-Service for Gradido Transactions",
"repository": "https://github.com/gradido/gradido/backend",
"license": "Apache-2.0",
"author": "Gradido Academy - https://www.gradido.net",
"main": "src/index.ts",
"scripts": {
"build": "tsc --build && mkdirp build/src/emails/templates/ && ncp src/emails/templates build/src/emails/templates && mkdirp build/src/locales/ && ncp src/locales build/src/locales",
"build": "ts-node ./esbuild.config.ts && mkdirp build/templates/ && ncp src/emails/templates build/templates && mkdirp locales/ && ncp src/locales locales",
"clean": "tsc --build --clean",
"start": "cross-env TZ=UTC TS_NODE_BASEURL=./build node -r tsconfig-paths/register build/src/index.js",
"dev": "cross-env TZ=UTC nodemon -w src --ext ts,pug,json,css --exec ts-node -r tsconfig-paths/register src/index.ts",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write",
"dev": "cross-env TZ=UTC nodemon -w src --ext ts,pug,json,css -r tsconfig-paths/register src/index.ts",
"test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_backend jest --runInBand --forceExit --detectOpenHandles",
"seed": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/seeds/index.ts",
"klicktipp": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/util/executeKlicktipp.ts",
"gmsusers": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/apis/gms/ExportUsers.ts",
"humhubUserExport": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/apis/humhub/ExportUsers.ts",
"locales": "scripts/sort.sh"
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write",
"lint:fix:unsafe": "biome check --fix --unsafe",
"locales": "scripts/sort.sh",
"start": "cross-env TZ=UTC NODE_ENV=production node build/index.js",
"typecheck": "tsc --noEmit"
},
"nodemonConfig": {
"ignore": ["**/*.test.ts"]
},
"dependencies": {
"apollo-server-express": "^2.25.2",
"await-semaphore": "^0.1.3",
"axios": "^0.21.1",
"class-validator": "^0.13.1",
"cors": "^2.8.5",
"cross-env": "^7.0.3",
"decimal.js-light": "^2.5.1",
"dotenv": "^10.0.0",
"email-templates": "^10.0.1",
"express": "^4.17.1",
"express-slow-down": "^2.0.1",
"gradido-config": "file:../config",
"gradido-database": "file:../database",
"graphql": "^15.5.1",
"graphql-parse-resolve-info": "^4.13.0",
"graphql-request": "5.0.0",
"graphql-type-json": "0.3.2",
"helmet": "^5.1.1",
"i18n": "^0.15.1",
"joi": "^17.13.3",
"jose": "^4.14.4",
"lodash.clonedeep": "^4.5.0",
"log4js": "^6.4.6",
"mysql2": "^2.3.0",
"nodemailer": "^6.6.5",
"openai": "^4.87.3",
"pug": "^3.0.2",
"random-bigint": "^0.0.1",
"reflect-metadata": "^0.1.13",
"sodium-native": "^3.4.1",
"type-graphql": "^1.1.1",
"typed-rest-client": "^1.8.11",
"uuid": "^8.3.2",
"workerpool": "^9.2.0",
"xregexp": "^5.1.1"
"sodium-native": "^3.4.1"
},
"devDependencies": {
"@anatine/esbuild-decorators": "^0.2.19",
"@biomejs/biome": "1.9.4",
"@types/email-templates": "^10.0.1",
"@types/express": "^4.17.12",
"@swc/cli": "^0.7.3",
"@swc/core": "^1.11.24",
"@swc/helpers": "^0.5.17",
"@types/email-templates": "^10.0.4",
"@types/express": "^4.17.21",
"@types/faker": "^5.5.9",
"@types/i18n": "^0.13.4",
"@types/jest": "^27.0.2",
"@types/joi": "^17.2.3",
"@types/jest": "27.0.2",
"@types/lodash.clonedeep": "^4.5.6",
"@types/node": "^17.0.21",
"@types/nodemailer": "^6.4.4",
"@types/sodium-native": "^2.3.5",
"@types/uuid": "^8.3.4",
"apollo-server-express": "^2.25.2",
"apollo-server-testing": "^2.25.2",
"await-semaphore": "^0.1.3",
"axios": "^0.21.1",
"class-validator": "^0.13.1",
"config-schema": "*",
"cors": "^2.8.5",
"database": "*",
"decimal.js-light": "^2.5.1",
"dotenv": "^10.0.0",
"esbuild": "^0.25.2",
"express": "^4.17.21",
"express-slow-down": "^2.0.1",
"faker": "^5.5.3",
"graphql": "^15.10.1",
"graphql-parse-resolve-info": "^4.13.1",
"graphql-request": "5.0.0",
"graphql-tag": "^2.12.6",
"jest": "^27.2.4",
"graphql-type-json": "0.3.2",
"helmet": "^5.1.1",
"i18n": "^0.15.1",
"jest": "27.2.4",
"joi": "^17.13.3",
"jose": "^4.14.4",
"klicktipp-api": "^1.0.2",
"lodash.clonedeep": "^4.5.0",
"log4js": "^6.7.1",
"mkdirp": "^3.0.1",
"ncp": "^2.0.0",
"nodemailer": "^6.6.5",
"nodemon": "^2.0.7",
"prettier": "^2.8.7",
"ts-jest": "^27.0.5",
"openai": "^4.87.3",
"prettier": "^3.5.3",
"pug": "^3.0.2",
"random-bigint": "^0.0.1",
"reflect-metadata": "^0.1.13",
"regenerator-runtime": "^0.14.1",
"ts-jest": "27.0.5",
"ts-node": "^10.9.2",
"tsconfig-paths": "^3.14.0",
"typescript": "^4.9.5"
},
"nodemonConfig": {
"ignore": [
"**/*.test.ts"
]
"tsconfig-paths": "^4.1.1",
"type-graphql": "^1.1.1",
"typed-rest-client": "^1.8.11",
"typeorm": "^0.3.16",
"typescript": "^4.9.5",
"uuid": "^8.3.2",
"workerpool": "^9.2.0",
"xregexp": "^5.1.1"
},
"engines": {
"node": ">=14"
"node": ">=18"
}
}

View File

@ -1,6 +1,6 @@
import { Connection } from '@dbTools/typeorm'
import { Transaction as DbTransaction } from '@entity/Transaction'
import { Transaction as DbTransaction } from 'database'
import { Decimal } from 'decimal.js-light'
import { Connection } from 'typeorm'
import { cleanDB, testEnvironment } from '@test/helpers'
@ -111,7 +111,7 @@ describe('transmitTransaction', () => {
await DltConnectorClient.getInstance()?.transmitTransaction(localTransaction)
} catch (e) {
expect(e).toMatchObject(
new LogError('invalid transaction type id: ' + localTransaction.typeId.toString()),
new LogError(`invalid transaction type id: ${localTransaction.typeId.toString()}`),
)
}
})

View File

@ -1,4 +1,4 @@
import { Transaction as DbTransaction } from '@entity/Transaction'
import { Transaction as DbTransaction } from 'database'
import { GraphQLClient, gql } from 'graphql-request'
import { CONFIG } from '@/config'

View File

@ -1,4 +1,4 @@
import { User as DbUser } from '@entity/User'
import { User as DbUser } from 'database'
// import { createTestClient } from 'apollo-server-testing'
// import { createGmsUser } from '@/apis/gms/GmsClient'

View File

@ -1,4 +1,4 @@
import { User as dbUser } from '@entity/User'
import { User as dbUser } from 'database'
import { PublishNameLogic } from '@/data/PublishName.logic'
// import { GmsPublishLocationType } from '@/graphql/enum/GmsPublishLocationType'

View File

@ -1,5 +1,5 @@
import { IsNull, Not } from '@dbTools/typeorm'
import { User } from '@entity/User'
import { User } from 'database'
import { IsNull, Not } from 'typeorm'
import { CONFIG } from '@/config'
import { LogError } from '@/server/LogError'

View File

@ -1,4 +1,4 @@
import { ProjectBranding } from '@entity/ProjectBranding'
import { ProjectBranding } from 'database'
import { SignJWT } from 'jose'
import { IRequestOptions, IRestResponse, RestClient } from 'typed-rest-client'

View File

@ -1,5 +1,4 @@
import { User } from '@entity/User'
import { UserContact } from '@entity/UserContact'
import { User, UserContact } from 'database'
import { IRestResponse } from 'typed-rest-client'
import { GetUser } from '@/apis/humhub/model/GetUser'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { isHumhubUserIdenticalToDbUser } from '@/apis/humhub/compareHumhubUserDbUser'
import { GetUser } from '@/apis/humhub/model/GetUser'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { Account } from './model/Account'
import { GetUser } from './model/GetUser'

View File

@ -1,4 +1,4 @@
import { AbstractLoggingView } from '@logging/AbstractLogging.view'
import { AbstractLoggingView } from 'database'
import { Account } from '@/apis/humhub/model/Account'

View File

@ -1,4 +1,4 @@
import { AbstractLoggingView } from '@logging/AbstractLogging.view'
import { AbstractLoggingView } from 'database'
import { PostUser } from '@/apis/humhub/model/PostUser'

View File

@ -1,4 +1,4 @@
import { AbstractLoggingView } from '@logging/AbstractLogging.view'
import { AbstractLoggingView } from 'database'
import { Profile } from '@/apis/humhub/model/Profile'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { Account } from './Account'
import { Profile } from './Profile'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { convertGradidoLanguageToHumhub } from '@/apis/humhub/convertLanguage'
import { PublishNameLogic } from '@/data/PublishName.logic'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { AbstractUser } from './AbstractUser'

View File

@ -1,5 +1,4 @@
import { User } from '@entity/User'
import { UserContact } from '@entity/UserContact'
import { User, UserContact } from 'database'
import { v4 as uuidv4 } from 'uuid'
import { PublishNameType } from '@/graphql/enum/PublishNameType'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { CONFIG } from '@/config'
import { PublishNameLogic } from '@/data/PublishName.logic'

View File

@ -1,5 +1,4 @@
import { User } from '@entity/User'
import { UserContact } from '@entity/UserContact'
import { User, UserContact } from 'database'
import { GetUser } from './model/GetUser'
import { ExecutedHumhubAction, syncUser } from './syncUser'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { LogError } from '@/server/LogError'
import { backendLogger as logger } from '@/server/logger'

View File

@ -1,5 +1,4 @@
import { OpenaiThreads } from '@entity/OpenaiThreads'
import { User } from '@entity/User'
import { OpenaiThreads, User } from 'database'
import { OpenAI } from 'openai'
import { Message } from 'openai/resources/beta/threads/messages'

View File

@ -7,6 +7,7 @@ export const INALIENABLE_RIGHTS = [
RIGHTS.SEND_RESET_PASSWORD_EMAIL,
RIGHTS.SET_PASSWORD,
RIGHTS.QUERY_TRANSACTION_LINK,
RIGHTS.QUERY_REDEEM_JWT,
RIGHTS.QUERY_OPT_IN,
RIGHTS.CHECK_USERNAME,
RIGHTS.PROJECT_BRANDING_BANNER,

View File

@ -6,6 +6,7 @@ export enum RIGHTS {
SEND_RESET_PASSWORD_EMAIL = 'SEND_RESET_PASSWORD_EMAIL',
SET_PASSWORD = 'SET_PASSWORD',
QUERY_TRANSACTION_LINK = 'QUERY_TRANSACTION_LINK',
QUERY_REDEEM_JWT = 'QUERY_REDEEM_JWT',
QUERY_OPT_IN = 'QUERY_OPT_IN',
CHECK_USERNAME = 'CHECK_USERNAME',
PROJECT_BRANDING_BANNER = 'PROJECT_BRANDING_BANNER',
@ -24,6 +25,7 @@ export enum RIGHTS {
CREATE_TRANSACTION_LINK = 'CREATE_TRANSACTION_LINK',
DELETE_TRANSACTION_LINK = 'DELETE_TRANSACTION_LINK',
REDEEM_TRANSACTION_LINK = 'REDEEM_TRANSACTION_LINK',
DISBURSE_TRANSACTION_LINK = 'DISBURSE_TRANSACTION_LINK',
LIST_TRANSACTION_LINKS = 'LIST_TRANSACTION_LINKS',
GDT_BALANCE = 'GDT_BALANCE',
CREATE_CONTRIBUTION = 'CREATE_CONTRIBUTION',

View File

@ -15,6 +15,7 @@ export const USER_RIGHTS = [
RIGHTS.CREATE_TRANSACTION_LINK,
RIGHTS.DELETE_TRANSACTION_LINK,
RIGHTS.REDEEM_TRANSACTION_LINK,
RIGHTS.DISBURSE_TRANSACTION_LINK,
RIGHTS.LIST_TRANSACTION_LINKS,
RIGHTS.GDT_BALANCE,
RIGHTS.CREATE_CONTRIBUTION,

View File

@ -0,0 +1,70 @@
import { createPrivateKey, sign } from 'node:crypto'
import { JWTPayload, SignJWT, decodeJwt, jwtVerify } from 'jose'
import { LogError } from '@/server/LogError'
import { backendLogger as logger } from '@/server/logger'
import { JwtPayloadType } from './payloadtypes/JwtPayloadType'
export const verify = async (token: string, signkey: string): Promise<JwtPayloadType | null> => {
if (!token) {
throw new LogError('401 Unauthorized')
}
logger.info('JWT.verify... token, signkey=', token, signkey)
try {
/*
const { KeyObject } = await import('node:crypto')
const cryptoKey = await crypto.subtle.importKey('raw', signkey, { name: 'RS256' }, false, [
'sign',
])
const keyObject = KeyObject.from(cryptoKey)
logger.info('JWT.verify... keyObject=', keyObject)
logger.info('JWT.verify... keyObject.asymmetricKeyDetails=', keyObject.asymmetricKeyDetails)
logger.info('JWT.verify... keyObject.asymmetricKeyType=', keyObject.asymmetricKeyType)
logger.info('JWT.verify... keyObject.asymmetricKeySize=', keyObject.asymmetricKeySize)
*/
const secret = new TextEncoder().encode(signkey)
const { payload } = await jwtVerify(token, secret, {
issuer: 'urn:gradido:issuer',
audience: 'urn:gradido:audience',
})
logger.info('JWT.verify after jwtVerify... payload=', payload)
return payload as JwtPayloadType
} catch (err) {
logger.error('JWT.verify after jwtVerify... error=', err)
return null
}
}
export const encode = async (payload: JwtPayloadType, signkey: string): Promise<string> => {
logger.info('JWT.encode... payload=', payload)
logger.info('JWT.encode... signkey=', signkey)
try {
const secret = new TextEncoder().encode(signkey)
const token = await new SignJWT({ payload, 'urn:gradido:claim': true })
.setProtectedHeader({
alg: 'HS256',
})
.setIssuedAt()
.setIssuer('urn:gradido:issuer')
.setAudience('urn:gradido:audience')
.setExpirationTime(payload.expiration)
.sign(secret)
return token
} catch (e) {
logger.error('Failed to sign JWT:', e)
throw e
}
}
export const verifyJwtType = async (token: string, signkey: string): Promise<string> => {
const payload = await verify(token, signkey)
return payload ? payload.tokentype : 'unknown token type'
}
export const decode = (token: string): JwtPayloadType => {
const { payload } = decodeJwt(token)
return payload as JwtPayloadType
}

View File

@ -0,0 +1,48 @@
// import { JWTPayload } from 'jose'
import { JwtPayloadType } from './JwtPayloadType'
export class DisburseJwtPayloadType extends JwtPayloadType {
static DISBURSE_ACTIVATION_TYPE = 'disburse-activation'
sendercommunityuuid: string
sendergradidoid: string
recipientcommunityuuid: string
recipientcommunityname: string
recipientgradidoid: string
recipientfirstname: string
code: string
amount: string
memo: string
validuntil: string
recipientalias: string
constructor(
senderCommunityUuid: string,
senderGradidoId: string,
recipientCommunityUuid: string,
recipientCommunityName: string,
recipientGradidoId: string,
recipientFirstName: string,
code: string,
amount: string,
memo: string,
validUntil: string,
recipientAlias: string,
) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
super()
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
this.tokentype = DisburseJwtPayloadType.DISBURSE_ACTIVATION_TYPE
this.sendercommunityuuid = senderCommunityUuid
this.sendergradidoid = senderGradidoId
this.recipientcommunityuuid = recipientCommunityUuid
this.recipientcommunityname = recipientCommunityName
this.recipientgradidoid = recipientGradidoId
this.recipientfirstname = recipientFirstName
this.code = code
this.amount = amount
this.memo = memo
this.validuntil = validUntil
this.recipientalias = recipientAlias
}
}

View File

@ -0,0 +1,21 @@
import { JWTPayload } from 'jose'
import { CONFIG } from '@/config'
export class JwtPayloadType implements JWTPayload {
iat?: number | undefined
exp?: number | undefined
nbf?: number | undefined
jti?: string | undefined
aud?: string | string[] | undefined
sub?: string | undefined
iss?: string | undefined;
[propName: string]: unknown
tokentype: string
expiration: string // in minutes (format: 10m for ten minutes)
constructor() {
this.tokentype = 'unknown jwt type'
this.expiration = CONFIG.REDEEM_JWT_TOKEN_EXPIRATION || '10m'
}
}

View File

@ -0,0 +1,36 @@
// import { JWTPayload } from 'jose'
import { JwtPayloadType } from './JwtPayloadType'
export class RedeemJwtPayloadType extends JwtPayloadType {
static REDEEM_ACTIVATION_TYPE = 'redeem-activation'
sendercommunityuuid: string
sendergradidoid: string
sendername: string // alias or firstname
redeemcode: string
amount: string
memo: string
validuntil: string
constructor(
senderCom: string,
senderUser: string,
sendername: string,
code: string,
amount: string,
memo: string,
validUntil: string,
) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
super()
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
this.tokentype = RedeemJwtPayloadType.REDEEM_ACTIVATION_TYPE
this.sendercommunityuuid = senderCom
this.sendergradidoid = senderUser
this.sendername = sendername
this.redeemcode = code
this.amount = amount
this.memo = memo
this.validuntil = validUntil
}
}

View File

@ -1,7 +1,7 @@
// ATTENTION: DO NOT PUT ANY SECRETS IN HERE (or the .env)
import { validate } from '@config/index'
import { latestDbVersion } from '@dbTools/config/detectLastDBVersion'
import { validate } from 'config-schema'
import { latestDbVersion } from 'database'
import { Decimal } from 'decimal.js-light'
import dotenv from 'dotenv'
@ -25,6 +25,7 @@ const server = {
PORT: process.env.PORT ?? 4000,
JWT_SECRET: process.env.JWT_SECRET ?? 'secret123',
JWT_EXPIRES_IN: process.env.JWT_EXPIRES_IN ?? '10m',
REDEEM_JWT_TOKEN_EXPIRATION: process.env.REDEEM_JWT_TOKEN_EXPIRATION ?? '10m',
GRAPHIQL: process.env.GRAPHIQL === 'true' || false,
GDT_ACTIVE: process.env.GDT_ACTIVE === 'true' || false,
GDT_API_URL: process.env.GDT_API_URL ?? 'https://gdt.gradido.net',
@ -34,8 +35,14 @@ const server = {
}
const database = {
DB_CONNECT_RETRY_COUNT: process.env.DB_CONNECT_RETRY_COUNT
? Number.parseInt(process.env.DB_CONNECT_RETRY_COUNT)
: 15,
DB_CONNECT_RETRY_DELAY_MS: process.env.DB_CONNECT_RETRY_DELAY_MS
? Number.parseInt(process.env.DB_CONNECT_RETRY_DELAY_MS)
: 500,
DB_HOST: process.env.DB_HOST ?? 'localhost',
DB_PORT: process.env.DB_PORT ? parseInt(process.env.DB_PORT) : 3306,
DB_PORT: process.env.DB_PORT ? Number.parseInt(process.env.DB_PORT) : 3306,
DB_USER: process.env.DB_USER ?? 'root',
DB_PASSWORD: process.env.DB_PASSWORD ?? '',
DB_DATABASE: process.env.DB_DATABASE ?? 'gradido_community',
@ -75,12 +82,12 @@ const community = {
const loginServer = {
LOGIN_APP_SECRET: process.env.LOGIN_APP_SECRET ?? '21ffbbc616fe',
LOGIN_SERVER_KEY: process.env.LOGIN_SERVER_KEY ?? 'a51ef8ac7ef1abf162fb7a65261acd7a',
USE_CRYPTO_WORKER: process.env.USE_CRYPTO_WORKER ?? false,
USE_CRYPTO_WORKER: process.env.USE_CRYPTO_WORKER === 'true',
}
const email = {
EMAIL: process.env.EMAIL === 'true' || false,
EMAIL_TEST_MODUS: process.env.EMAIL_TEST_MODUS === 'true' || false,
EMAIL: process.env.EMAIL === 'true',
EMAIL_TEST_MODUS: process.env.EMAIL_TEST_MODUS === 'true',
EMAIL_TEST_RECEIVER: process.env.EMAIL_TEST_RECEIVER ?? 'stage1@gradido.net',
EMAIL_USERNAME: process.env.EMAIL_USERNAME ?? '',
EMAIL_SENDER: process.env.EMAIL_SENDER ?? 'info@gradido.net',

View File

@ -3,6 +3,8 @@ import {
COMMUNITY_NAME,
COMMUNITY_SUPPORT_MAIL,
COMMUNITY_URL,
DB_CONNECT_RETRY_COUNT,
DB_CONNECT_RETRY_DELAY_MS,
DB_DATABASE,
DB_HOST,
DB_PASSWORD,
@ -24,7 +26,7 @@ import {
OPENAI_ACTIVE,
PRODUCTION,
TYPEORM_LOGGING_RELATIVE_PATH,
} from '@config/commonSchema'
} from 'config-schema'
import Joi from 'joi'
export const schema = Joi.object({
@ -38,6 +40,8 @@ export const schema = Joi.object({
DB_USER,
DB_VERSION,
DB_DATABASE,
DB_CONNECT_RETRY_COUNT,
DB_CONNECT_RETRY_DELAY_MS,
DECAY_START_TIME,
GDT_API_URL,
GDT_ACTIVE,
@ -363,5 +367,20 @@ export const schema = Joi.object({
.required()
.description('Time for JWT token to expire, auto logout'),
REDEEM_JWT_TOKEN_EXPIRATION: Joi.alternatives()
.try(
Joi.string()
.pattern(/^\d+[smhdw]$/)
.description(
'Expiration time for x-community redeem JWT token, in format like "10m", "1h", "1d"',
)
.default('10m'),
Joi.number()
.positive()
.description('Expiration time for x-community redeem JWT token in minutes'),
)
.required()
.description('Time for x-community redeem JWT token to expire'),
WEBHOOK_ELOPAGE_SECRET: Joi.string().description("isn't really used any more").optional(),
})

View File

@ -1,4 +1,4 @@
import { Contribution } from '@entity/Contribution'
import { Contribution } from 'database'
import { Decimal } from 'decimal.js-light'
import {

View File

@ -1,6 +1,4 @@
import { Contribution } from '@entity/Contribution'
import { ContributionMessage } from '@entity/ContributionMessage'
import { User } from '@entity/User'
import { Contribution, ContributionMessage, User } from 'database'
import { ContributionMessageType } from '@/graphql/enum/ContributionMessageType'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { v4 as uuidv4 } from 'uuid'
import { PublishNameType } from '@/graphql/enum/PublishNameType'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import XRegExp from 'xregexp'
import { PublishNameType } from '@/graphql/enum/PublishNameType'
@ -92,7 +92,7 @@ export class PublishNameLogic {
? this.getUsernameFromAlias()
: this.isUsernameFromInitials(publishNameType)
? this.getUsernameFromInitials()
: (this.getFirstName(publishNameType) + ' ' + this.getLastName(publishNameType)).trim()
: `${this.getFirstName(publishNameType)} ${this.getLastName(publishNameType)}`.trim()
}
public getUsernameFromInitials(): string {

View File

@ -1,5 +1,4 @@
import { User } from '@entity/User'
import { UserRole } from '@entity/UserRole'
import { User, UserRole } from 'database'
import { RoleNames } from '@enum/RoleNames'

View File

@ -1,6 +1,6 @@
import { Connection } from '@dbTools/typeorm'
import { ApolloServerTestClient } from 'apollo-server-testing'
import { Decimal } from 'decimal.js-light'
import { Connection } from 'typeorm'
import { testEnvironment } from '@test/helpers'
import { i18n as localization, logger } from '@test/testSetup'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { ContributionLink as DbContributionLink } from '@entity/ContributionLink'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { ContributionLink as DbContributionLink, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { ContributionLink as DbContributionLink } from '@entity/ContributionLink'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { ContributionLink as DbContributionLink, Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { ContributionLink as DbContributionLink } from '@entity/ContributionLink'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { ContributionLink as DbContributionLink, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,7 +1,9 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { ContributionMessage as DbContributionMessage } from '@entity/ContributionMessage'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import {
Contribution as DbContribution,
ContributionMessage as DbContributionMessage,
Event as DbEvent,
User as DbUser,
} from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,8 +1,10 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { ContributionLink as DbContributionLink } from '@entity/ContributionLink'
import { Event as DbEvent } from '@entity/Event'
import { Transaction as DbTransaction } from '@entity/Transaction'
import { User as DbUser } from '@entity/User'
import {
Contribution as DbContribution,
ContributionLink as DbContributionLink,
Event as DbEvent,
Transaction as DbTransaction,
User as DbUser,
} from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,7 +1,9 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { ContributionMessage as DbContributionMessage } from '@entity/ContributionMessage'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import {
Contribution as DbContribution,
ContributionMessage as DbContributionMessage,
Event as DbEvent,
User as DbUser,
} from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { TransactionLink as DbTransactionLink } from '@entity/TransactionLink'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, TransactionLink as DbTransactionLink, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { TransactionLink as DbTransactionLink } from '@entity/TransactionLink'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, TransactionLink as DbTransactionLink, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { TransactionLink as DbTransactionLink } from '@entity/TransactionLink'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, TransactionLink as DbTransactionLink, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { Transaction as DbTransaction } from '@entity/Transaction'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, Transaction as DbTransaction, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { Transaction as DbTransaction } from '@entity/Transaction'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, Transaction as DbTransaction, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

Some files were not shown because too many files have changed in this diff Show More