Merge pull request #3479 from gradido/upgrade_esbuild

refactor(other): use esbuild instead of tsc
This commit is contained in:
einhornimmond 2025-05-15 19:22:27 +02:00 committed by GitHub
commit 9399a3fbfe
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
108 changed files with 2708 additions and 1222 deletions

View File

@ -1,3 +1,5 @@
**/node_modules
**/build
**/coverage
**/coverage
.git
**/.turbo

91
.github/workflows/lint.yml vendored Normal file
View File

@ -0,0 +1,91 @@
name: Linting with biomejs
on: push
jobs:
lint:
runs-on: ubuntu-latest
outputs:
config-schema: ${{ steps.config-schema.outputs.success }}
backend: ${{ steps.backend.outputs.success }}
database: ${{ steps.database.outputs.success }}
dht-node: ${{ steps.dht-node.outputs.success }}
federation: ${{ steps.federation.outputs.success }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Biome
uses: biomejs/setup-biome@v2
with:
version: latest
- name: Lint - Config-Schema
id: config-schema
run: |
cd ./config-schema
biome ci .
echo $?
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
- name: Lint - Backend
id: backend
run: |
cd ./backend
biome ci .
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
- name: Lint - Database Up
id: database
run: |
cd ./database
biome ci .
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
- name: Lint - DHT Node
id: dht-node
run: |
cd ./dht-node
biome ci .
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
- name: Lint - Federation
id: federation
run: |
cd ./federation
biome ci .
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint_config_schema:
name: Lint - Config-Schema
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.config-schema }}" != "true" ]; then exit 1; fi
lint_backend:
name: Lint - Backend
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.backend }}" != "true" ]; then exit 1; fi
lint_database:
name: Lint - Database Up
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.database }}" != "true" ]; then exit 1; fi
lint_dht_node:
name: Lint - DHT Node
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.dht-node }}" != "true" ]; then exit 1; fi
lint_federation:
name: Lint - Federation
needs: lint
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.lint.outputs.federation }}" != "true" ]; then exit 1; fi

View File

@ -18,7 +18,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
@ -55,7 +55,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
@ -80,6 +80,80 @@ jobs:
name: docker-backend-production
path: /tmp/backend.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION DHT-NODE ######################################
##############################################################################
build_production_dht-node:
name: Docker Build Production - DHT-Node
runs-on: ubuntu-latest
#needs: [nothing]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
- name: ENV - VERSION
run: echo "VERSION=$(node -p -e "require('./package.json').version")" >> $GITHUB_ENV
- name: ENV - BUILD_DATE
run: echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV
- name: ENV - BUILD_VERSION
run: echo "BUILD_VERSION=${VERSION}.${GITHUB_RUN_NUMBER}" >> $GITHUB_ENV
- name: ENV - BUILD_COMMIT
run: echo "BUILD_COMMIT=${GITHUB_SHA}" >> $GITHUB_ENV
##########################################################################
# DHT-NODE ################################################################
##########################################################################
- name: DHT-Node | Build `production` image
run: |
docker build -f ./dht-node/Dockerfile --target production -t "gradido/dht-node:latest" -t "gradido/dht-node:production" -t "gradido/dht-node:${VERSION}" -t "gradido/dht-node:${BUILD_VERSION}" .
docker save "gradido/dht-node" > /tmp/dht-node.tar
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: docker-dht-node-production
path: /tmp/dht-node.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION FEDERATION ######################################
##############################################################################
build_production_federation:
name: Docker Build Production - Federation
runs-on: ubuntu-latest
#needs: [nothing]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
- name: ENV - VERSION
run: echo "VERSION=$(node -p -e "require('./package.json').version")" >> $GITHUB_ENV
- name: ENV - BUILD_DATE
run: echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV
- name: ENV - BUILD_VERSION
run: echo "BUILD_VERSION=${VERSION}.${GITHUB_RUN_NUMBER}" >> $GITHUB_ENV
- name: ENV - BUILD_COMMIT
run: echo "BUILD_COMMIT=${GITHUB_SHA}" >> $GITHUB_ENV
##########################################################################
# FEDERATION ##############################################################
##########################################################################
- name: Federation | Build `production` image
run: |
docker build -f ./federation/Dockerfile --target production -t "gradido/federation:latest" -t "gradido/federation:production" -t "gradido/federation:${VERSION}" -t "gradido/federation:${BUILD_VERSION}" .
docker save "gradido/federation" > /tmp/federation.tar
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: docker-federation-production
path: /tmp/federation.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION DATABASE UP ###################################
##############################################################################
@ -92,7 +166,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# DATABASE UP ############################################################
##########################################################################
@ -106,43 +180,6 @@ jobs:
name: docker-database-production_up
path: /tmp/database_up.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION MARIADB #######################################
##############################################################################
build_production_mariadb:
name: Docker Build Production - MariaDB
runs-on: ubuntu-latest
#needs: [nothing]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
##########################################################################
# SET ENVS ###############################################################
##########################################################################
- name: ENV - VERSION
run: echo "VERSION=$(node -p -e "require('./package.json').version")" >> $GITHUB_ENV
- name: ENV - BUILD_DATE
run: echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV
- name: ENV - BUILD_VERSION
run: echo "BUILD_VERSION=${VERSION}.${GITHUB_RUN_NUMBER}" >> $GITHUB_ENV
- name: ENV - BUILD_COMMIT
run: echo "BUILD_COMMIT=${GITHUB_SHA}" >> $GITHUB_ENV
##########################################################################
# MARIADB ################################################################
##########################################################################
- name: MariaDB | Build `production` image
run: |
docker build -t "gradido/mariadb:latest" -t "gradido/mariadb:production" -t "gradido/mariadb:${VERSION}" -t "gradido/mariadb:${BUILD_VERSION}" -f ./mariadb/Dockerfile ./
docker save "gradido/mariadb" > /tmp/mariadb.tar
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: docker-mariadb-production
path: /tmp/mariadb.tar
##############################################################################
# JOB: DOCKER BUILD PRODUCTION NGINX #########################################
##############################################################################
@ -155,7 +192,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# SET ENVS ###############################################################
##########################################################################
@ -186,7 +223,7 @@ jobs:
upload_to_dockerhub:
name: Upload to Dockerhub
runs-on: ubuntu-latest
needs: [build_production_frontend, build_production_backend, build_production_database_up, build_production_mariadb, build_production_nginx]
needs: [build_production_frontend, build_production_backend, build_production_database_up, build_production_nginx]
env:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
@ -195,7 +232,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
##########################################################################
# DOWNLOAD DOCKER IMAGES #################################################
##########################################################################
@ -213,6 +250,20 @@ jobs:
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/backend.tar
- name: Download Docker Image (DHT-Node)
uses: actions/download-artifact@v4
with:
name: docker-dht-node-production
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/dht-node.tar
- name: Download Docker Image (Federation)
uses: actions/download-artifact@v4
with:
name: docker-federation-production
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/federation.tar
- name: Download Docker Image (Database)
uses: actions/download-artifact@v4
with:
@ -220,11 +271,6 @@ jobs:
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/database_up.tar
- name: Download Docker Image (MariaDB)
uses: actions/download-artifact@v4
with:
name: docker-mariadb-production
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/mariadb.tar
- name: Download Docker Image (Nginx)
@ -243,10 +289,12 @@ jobs:
run: docker push --all-tags gradido/frontend
- name: Push backend
run: docker push --all-tags gradido/backend
- name: Push dht-node
run: docker push --all-tags gradido/dht-node
- name: Push federation
run: docker push --all-tags gradido/federation
- name: Push database
run: docker push --all-tags gradido/database
- name: Push MariaDB
run: docker push --all-tags gradido/mariadb
- name: Push Nginx
run: docker push --all-tags gradido/nginx
@ -262,7 +310,7 @@ jobs:
# CHECKOUT CODE ##########################################################
##########################################################################
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch full History for changelog
##########################################################################

View File

@ -52,34 +52,31 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune admin with turbos help
run: turbo prune admin
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
run: |
bun install --filter admin --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Admin Interface | Unit tests
id: test
run: |
cd out && turbo admin#test admin#lint
turbo admin#test admin#lint
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint:
if: needs.files-changed.outputs.admin == 'true'
name: Lint - Admin Interface
needs: [files-changed, unit_test]
needs: unit_test
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
stylelint:
if: needs.files-changed.outputs.admin == 'true'
name: Stylelint - Admin Interface
needs: [files-changed, unit_test]
needs: unit_test
runs-on: ubuntu-latest
steps:
- name: Checkout code
@ -91,11 +88,11 @@ jobs:
locales:
if: needs.files-changed.outputs.admin == 'true'
name: Locales - Admin Interface
needs: [files-changed, unit_test]
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
- name: Admin Interface | Locales
run: cd admin && yarn locales

View File

@ -33,15 +33,13 @@ jobs:
uses: actions/checkout@v3
- name: Backend | Build 'test' image
run: docker build -f ./backend/Dockerfile --target test -t "gradido/backend:test" .
run: docker build -f ./backend/Dockerfile --target production -t "gradido/backend:production" .
unit_test:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true' || needs.files-changed.outputs.config == 'true'
name: Unit tests - Backend
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
@ -49,43 +47,55 @@ jobs:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune backend with turbos help
run: turbo prune backend
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Wait for MariaDB to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
run: |
bun install --filter backend --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Backend | Unit tests
id: test
run: |
cd out && turbo backend#lint backend#test
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
run: turbo backend#test
lint:
typecheck:
if: needs.files-changed.outputs.backend == 'true'
name: Lint - Backend
needs: [files-changed, unit_test]
name: Typecheck - Backend
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
- name: Checkout code
uses: actions/checkout@v3
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: |
bun install --filter backend --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Backend | Typecheck
run: turbo backend#typecheck backend#build
locales:
if: needs.files-changed.outputs.backend == 'true'
name: Locales - Backend
needs: [files-changed, unit_test]
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
- name: Checkout code
uses: actions/checkout@v3
- name: Backend | Locales
run: cd backend && yarn locales

View File

@ -21,7 +21,7 @@ jobs:
list-files: shell
build:
name: typecheck and lint - Config-Schema
name: typecheck - Config-Schema
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.docker-compose == 'true'
needs: files-changed
runs-on: ubuntu-latest
@ -29,15 +29,12 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune config with turbos help
run: turbo prune config-schema
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
run: bun install --filter config-schema --frozen-lockfile
- name: typecheck and lint
run: cd out && turbo typecheck lint
- name: typecheck
run: cd config-schema && yarn typecheck

View File

@ -30,8 +30,8 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Database | Build 'up' image
run: docker build --target up -t "gradido/database:up" -f database/Dockerfile .
- name: Database | Build image
run: docker build --target build -t "gradido/database:build" -f database/Dockerfile .
database_migration_test:
if: needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true'
@ -50,23 +50,19 @@ jobs:
- name: Database | docker-compose
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach mariadb
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune database with turbos help
run: turbo prune database
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
run: |
bun install --filter database --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Wait for MariaDB to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Database | up
run: cd out && turbo up
run: turbo up
- name: Database | reset
run: cd out && turbo reset
run: turbo reset
lint:
if: needs.files-changed.outputs.database == 'true'
@ -77,14 +73,13 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune database with turbos help
run: turbo prune database
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
run: |
bun install --filter database --frozen-lockfile
bun install --global turbo@^2
- name: Database | Lint
run: cd out && turbo lint
- name: Database | build & typecheck
run: turbo database#build database#typecheck

View File

@ -31,16 +31,14 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Build 'test' image (typecheck & build)
run: docker build --target test -t "gradido/dht-node:test" -f dht-node/Dockerfile .
- name: Build 'production' image
run: docker build --target production -t "gradido/dht-node:production" -f dht-node/Dockerfile .
unit_test:
name: Unit Tests - DHT Node
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.dht_node == 'true' || needs.files-changed.outputs.docker-compose == 'true'
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
@ -48,34 +46,19 @@ jobs:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune dht-node with turbos help
run: turbo prune dht-node
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Wait for MariaDB to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: run unit test & lint & build
id: test
run: |
cd out && turbo dht-node#lint dht-node#test
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
bun install --filter dht-node --frozen-lockfile
bun install --global --no-save turbo@^2
- name: run unit test & build & typecheck
run: turbo dht-node#test dht-node#build dht-node#typecheck
lint:
name: Lint - DHT Node
if: needs.files-changed.outputs.dht_node == 'true'
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi

View File

@ -27,9 +27,6 @@ jobs:
bun install
sudo cp ./nginx/e2e-test.conf /etc/nginx/sites-available/default
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Boot up test system | seed backend
run: bun turbo seed
@ -43,8 +40,9 @@ jobs:
cd backend
cp .env.test_e2e .env
cd ..
bun turbo backend#build frontend#build
bun turbo backend#start frontend#start &
bun turbo backend#build
bun turbo frontend#build
bun turbo backend#start frontend#start --env-mode=loose &
- name: End-to-end tests | prepare
run: |
@ -52,7 +50,7 @@ jobs:
chmod +x /opt/cucumber-json-formatter
sudo ln -fs /opt/cucumber-json-formatter /usr/bin/cucumber-json-formatter
cd e2e-tests/
bun install
bun install --production
- name: wait for frontend and backend to be ready
run: |
@ -64,8 +62,10 @@ jobs:
sudo nginx -t
sudo systemctl start nginx
- name: wait for nginx to be ready
run: until nc -z 127.0.0.1 80; do echo waiting for nginx; sleep 1; done;
- name: wait for nginx and mailserver to be ready
run: |
until nc -z 127.0.0.1 80; do echo waiting for nginx; sleep 1; done;
until nc -z 127.0.0.1 1025; do echo waiting for mailserver; sleep 1; done;
- name: End-to-end tests | run tests
id: e2e-tests

View File

@ -32,15 +32,13 @@ jobs:
uses: actions/checkout@v3
- name: Build `test` image
run: docker build --target test -t "gradido/federation:test" -f federation/Dockerfile .
run: docker build --target production -t "gradido/federation:production" -f federation/Dockerfile .
unit_test:
name: Unit Tests - Federation
if: needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.federation == 'true' || needs.files-changed.outputs.mariadb == 'true'
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
@ -53,29 +51,14 @@ jobs:
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune federation with turbos help
run: turbo prune federation
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Wait for MariaDB to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
run: |
bun install --filter federation --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Federation | Unit tests
id: test
run: |
cd out && turbo federation#lint federation#test
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint:
name: Lint - Federation
if: needs.files-changed.outputs.federation == 'true'
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
run: turbo federation#test federation#build federation#typecheck

View File

@ -41,8 +41,6 @@ jobs:
name: Unit Tests - Frontend
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
@ -50,46 +48,64 @@ jobs:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune frontend with turbos help
run: turbo prune frontend
uses: actions/checkout@v4
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
run: bun install --filter frontend --frozen-lockfile
- name: Frontend | Unit tests
id: test
run: |
cd out && turbo frontend#test frontend#lint
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
run: cd frontend && yarn test
lint:
if: needs.files-changed.outputs.frontend == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.frontend == 'true'
name: Lint - Frontend
needs: [files-changed, unit_test]
needs: files-changed
runs-on: ubuntu-latest
outputs:
success: ${{ steps.lint.outputs.success }}
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v4
- name: install bun
uses: oven-sh/setup-bun@v2
- name: install dependencies
run: |
bun install --filter frontend --frozen-lockfile
bun install --global --no-save turbo@^2
- name: Frontend | Lint
id: lint
run: |
turbo frontend#lint
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
stylelint:
if: needs.files-changed.outputs.frontend == 'true'
name: Stylelint - Frontend
needs: [files-changed, unit_test]
needs: [files-changed, lint]
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
run: if [ "${{ needs.lint.outputs.success }}" != "true" ]; then exit 1; fi
locales:
if: needs.files-changed.outputs.frontend == 'true'
name: Locales - Frontend
needs: [files-changed, unit_test]
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
- name: Checkout code
uses: actions/checkout@v3
- name: Frontend | Locales
run: cd frontend && yarn locales

View File

@ -147,7 +147,7 @@ COPY --chown=app:app --from=build /app/backend ./backend
COPY --chown=app:app --from=build /app/frontend ./frontend
COPY --chown=app:app --from=build /app/admin ./admin
COPY --chown=app:app --from=build /app/database ./database
COPY --chown=app:app --from=build /app/config ./config
COPY --chown=app:app --from=build /app/config-schema ./config-schema
COPY --chown=app:app --from=build /app/federation ./federation
COPY --chown=app:app --from=build /app/dht-node ./dht-node
@ -158,4 +158,39 @@ EXPOSE ${FRONTEND_MODULE_PORT}
EXPOSE ${ADMIN_MODULE_PORT}
# Command to start
CMD ["turbo", "start", "--env-mode=loose"]
CMD ["turbo", "start", "--env-mode=loose"]
##################################################################################
# FINAL PRODUCTION IMAGE #########################################################
##################################################################################
FROM node:18.20.7-alpine3.21 as production-slim
ENV TURBO_CACHE_DIR=/tmp/turbo
ENV DOCKER_WORKDIR="/app"
ENV NODE_ENV="production"
ENV DB_HOST=mariadb
WORKDIR ${DOCKER_WORKDIR}
# Ports exposen
EXPOSE ${BACKEND_PORT}
EXPOSE ${FEDERATION_PORT}
EXPOSE ${FRONTEND_MODULE_PORT}
EXPOSE ${ADMIN_MODULE_PORT}
# Copy only the build artifacts from the previous build stage
COPY --chown=app:app --from=build /app/backend/build ./backend/build
COPY --chown=app:app --from=build /app/backend/locales ./backend/locales
COPY --chown=app:app --from=build /app/backend/log4js-config.json ./backend/log4js-config.json
COPY --chown=app:app --from=build /app/dht-node/build ./dht-node/build
COPY --chown=app:app --from=build /app/dht-node/log4js-config.json ./dht-node/log4js-config.json
COPY --chown=app:app --from=build /app/federation/build ./federation/build
COPY --chown=app:app --from=build /app/federation/log4js-config.json ./federation/log4js-config.json
COPY --chown=app:app --from=build /app/frontend/build ./frontend
COPY --chown=app:app --from=build /app/admin/build ./admin
RUN yarn global add udx-native@1.5.3 sodium-native@4.0.0
CMD ["turbo", "start", "--env-mode=loose"]

View File

@ -145,6 +145,36 @@ bun install
Note that some modules are still not fully compatible with Bun. Therefore, continue using **Yarn** for development if you run into any issues.
### EMFILE: too many open files
With
```bash
yarn docker_dev
```
or also
```bash
turbo dev
```
Many files will be watched by the various services.
This can lead to this error: **EMFILE: too many open files**
If increasing ulimit don't help, consider starting only the services on
which you are working on in dev mode and the rest in production mode.
For example if you are only working on the frontend, you can start the frontend in dev mode and the rest in production mode:
```bash
yarn docker_dev frontend
```
and in another bash
```bash
yarn docker backend admin database nginx --no-deps
```
or local with turbo
```bash
turbo frontend#dev backend#start admin#start --env-mode=loose
```
Tip: for local setup use a local nginx server with similar config like docker nginx [nginx.conf](./nginx/gradido.conf) but replace docker image name with localhost
## Services defined in this package
- [frontend](./frontend) Wallet frontend

View File

@ -1,4 +1,5 @@
node_modules
.git
.gitignore
!.eslintignore
!.eslintignore
!.env.git

View File

@ -3,22 +3,27 @@
##################################################################################
FROM node:18.20.7-alpine3.21 as base
ENV TURBO_CACHE_DIR=/tmp/turbo
# ENVs (available in production aswell, can be overwritten by commandline or env file)
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
ENV DOCKER_WORKDIR="/app"
## We Cannot do `$(date -u +'%Y-%m-%dT%H:%M:%SZ')` here so we use unix timestamp=0
ENV BUILD_DATE="1970-01-01T00:00:00.00Z"
## We cannot do $(npm run version).${BUILD_NUMBER} here so we default to 0.0.0.0
# TODO: get the actually git commit hash into docker
ARG BUILD_VERSION
ENV BUILD_VERSION=${BUILD_VERSION}
ENV BUILD_VERSION=${BUILD_VERSION:-'broken'}
ARG BUILD_COMMIT
ENV BUILD_COMMIT=${BUILD_COMMIT}
ENV BUILD_COMMIT=${BUILD_COMMIT:-'deadbeefdeadbeefdeadbeefdeadbeefdeadbeef'}
ARG BUILD_COMMIT_SHORT
ENV BUILD_COMMIT_SHORT=${BUILD_COMMIT_SHORT}
ENV BUILD_COMMIT_SHORT=${BUILD_COMMIT_SHORT:-'deadbeef'}
## SET NODE_ENV
ARG NODE_ENV="production"
ARG NODE_ENV=production
ENV NODE_ENV=${NODE_ENV}
## App relevant Envs
ENV PORT="8080"
## Timezone
ENV TZ=UTC
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
@ -36,6 +41,7 @@ LABEL maintainer="support@ogradido.net"
# Install Additional Software
## install: git
#RUN apk --no-cache add git
# RUN bun add --global yarn@1.22.20
# Settings
## Expose Container Port
@ -45,44 +51,45 @@ EXPOSE ${PORT}
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
##################################################################################
# BUN ############################################################################
##################################################################################
FROM base as bun-base
RUN apk update && apk add --no-cache curl tar bash
RUN curl -fsSL https://bun.sh/install | bash
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
##################################################################################
# Base with turbo ################################################################
# Development ####################################################################
##################################################################################
FROM base as turbo-base
FROM bun-base AS development
# used for getting git commit hash direct from .git
RUN apk update && apk add --no-cache git
# Run command
CMD /bin/sh -c "bun install --filter admin --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2 \
&& turbo admin#dev --env-mode=loose"
RUN apk update && apk add --no-cache libc6-compat \
&& yarn global add turbo@^2 \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
##################################################################################
# BUILDER (create partly monorepo only with data needed by admin) ################
# Build ##########################################################################
##################################################################################
FROM turbo-base as builder
FROM bun-base as build
COPY --chown=app:app . .
RUN turbo prune admin --docker
RUN bun install --filter admin --no-cache --frozen-lockfile \
&& bun install --global turbo@^2
##################################################################################
# INSTALLER (create production image) ############################################
##################################################################################
FROM turbo-base AS installer
RUN turbo admin#build --env-mode=loose
# First install the dependencies (as they change less often)
COPY --chown=app:app --from=builder /app/out/json/ .
RUN yarn install --frozen-lockfile --production=false \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# Build the project
COPY --chown=app:app --from=builder /app/out/full/ .
RUN turbo build --env-mode=loose
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM installer as test
FROM build as test
# Run command
CMD /bin/sh -c "turbo admin#test --env-mode=loose"
@ -90,10 +97,11 @@ CMD /bin/sh -c "turbo admin#test --env-mode=loose"
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
##################################################################################
FROM lipanski/docker-static-website:latest as production
FROM nginx:1.28.0-alpine3.21-slim as production
# tiny static webserver
# https://lipanski.com/posts/smallest-docker-image-static-website
COPY ./nginx/admin.conf /etc/nginx/conf.d/default.conf
WORKDIR /app
# copy builded frontend files
COPY --from=installer /app/admin/build/ ./admin/
COPY --from=build /app/admin/build/ ./admin/

View File

@ -1,4 +1,4 @@
const {
import {
APP_VERSION,
BUILD_COMMIT,
BUILD_COMMIT_SHORT,
@ -10,8 +10,8 @@ const {
NODE_ENV,
OPENAI_ACTIVE,
PRODUCTION,
} = require('config-schema')
const Joi = require('joi')
} from 'config-schema'
import Joi from 'joi'
module.exports = Joi.object({
APP_VERSION,

View File

@ -6,17 +6,20 @@ import Components from 'unplugin-vue-components/vite'
import IconsResolve from 'unplugin-icons/resolver'
import { BootstrapVueNextResolver } from 'bootstrap-vue-next'
import EnvironmentPlugin from 'vite-plugin-environment'
import schema from './src/config/schema'
import { execSync } from 'node:child_process'
import { existsSync, constants } from 'node:fs'
import { validate, browserUrls } from 'config-schema'
import path from 'node:path'
import { createRequire } from 'node:module'
import dotenv from 'dotenv'
dotenv.config() // load env vars from .env
const require = createRequire(import.meta.url)
const CONFIG = require('./src/config')
export default defineConfig(async ({ command }) => {

1
backend/.gitignore vendored
View File

@ -2,6 +2,7 @@
/.env
/.env.bak
/build/
/locales/
package-json.lock
coverage
# emacs

View File

@ -1,7 +1,9 @@
##################################################################################
# BASE ###########################################################################
##################################################################################
FROM node:18.20.7-alpine3.21 as base
FROM node:18.20.7-bookworm-slim as base
#FROM node:18.20.7-alpine3.21 as base
# change to alpine after sodium-native ship with native alpine build
# ENVs (available in production aswell, can be overwritten by commandline or env file)
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
@ -13,9 +15,12 @@ ENV BUILD_VERSION="0.0.0.0"
## We cannot do `$(git rev-parse --short HEAD)` here so we default to 0000000
ENV BUILD_COMMIT="0000000"
## SET NODE_ENV
ENV NODE_ENV="production"
ENV NODE_ENV=production
## App relevant Envs
ENV PORT="4000"
## Timezone
ENV TZ=UTC
ENV DB_HOST=mariadb
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
@ -34,6 +39,7 @@ LABEL maintainer="support@gradido.net"
## install: git
#RUN apk --no-cache add git
# Settings
## Expose Container Port
EXPOSE ${PORT}
@ -42,51 +48,59 @@ EXPOSE ${PORT}
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
##################################################################################
# BUN ############################################################################
##################################################################################
FROM base as bun-base
RUN apt update && apt install -y --no-install-recommends ca-certificates curl bash unzip
#RUN apk update && apk add --no-cache curl tar bash
RUN curl -fsSL https://bun.sh/install | bash
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
##################################################################################
# Base with turbo ################################################################
# Development ####################################################################
##################################################################################
FROM base as turbo-base
FROM bun-base AS development
RUN apk update && apk add --no-cache libc6-compat \
&& yarn global add turbo@^2 \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# Run command
CMD /bin/sh -c "bun install --filter backend --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2 \
&& turbo backend#dev --env-mode=loose"
##################################################################################
# BUILDER (create partly monorepo only with data needed by backend) ##############
# Basic Image with bun setup and project and source code #########################
##################################################################################
FROM turbo-base as builder
FROM bun-base as bun-base-src
COPY --chown=app:app . .
RUN turbo prune backend --docker
##################################################################################
# INSTALLER (create production image) ############################################
# Build ##########################################################################
##################################################################################
FROM turbo-base AS installer
FROM bun-base-src as build
# First install the dependencies (as they change less often)
COPY --chown=app:app --from=builder /app/out/json/ .
RUN yarn install --frozen-lockfile --production=false \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# Build the project
COPY --chown=app:app --from=builder /app/out/full/ .
RUN turbo build
RUN bun install --filter backend --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2
RUN turbo backend#build backend#typecheck --env-mode=loose
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM installer as test
ENV DB_HOST=mariadb
FROM build as test
# Run command
CMD /bin/sh -c "turbo backend#test --env-mode=loose"
##################################################################################
# install only node modules needed for running bundle ############################
##################################################################################
FROM bun-base-src as production-node-modules
# add node_modules from production_node_modules
RUN bun install --filter backend --production --frozen-lockfile --no-cache \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& ./scripts/clean-prebuilds.sh
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
@ -94,10 +108,15 @@ CMD /bin/sh -c "turbo backend#test --env-mode=loose"
FROM base as production
# Copy "binary"-files from build image
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/backend/build/src/index.js ./index.js
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/build/index.js ./index.js
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/build/worker.js ./worker.js
# add node_modules from production_node_modules
COPY --chown=app:app --from=production-node-modules ${DOCKER_WORKDIR}/node_modules ./node_modules
# Copy log4js-config.json to provide log configuration
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/backend/log4js-config.json ./log4js-config.json
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/log4js-config.json ./log4js-config.json
# Copy locales
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/backend/locales ./locales
# Run command
CMD ["node", "index.js"]
CMD ["node", "index.js"]

View File

@ -2,10 +2,6 @@
## Project setup
```bash
yarn install
```
## Seed DB
```bash

16
backend/esbuild.config.ts Normal file
View File

@ -0,0 +1,16 @@
import { esbuildDecorators } from '@anatine/esbuild-decorators'
import { build } from 'esbuild'
build({
entryPoints: ['src/index.ts', 'src/password/worker.js'],
outdir: 'build',
platform: 'node',
target: 'node18.20.7',
bundle: true,
keepNames: true,
entryNames: '[name]',
// legalComments: 'inline',
external: ['sodium-native', 'email-templates'],
plugins: [esbuildDecorators()],
minify: true,
})

View File

@ -1,69 +1,44 @@
{
"name": "backend",
"version": "2.5.2",
"description": "Gradido unified backend providing an API-Service for Gradido Transactions",
"main": "src/index.ts",
"repository": "https://github.com/gradido/gradido/backend",
"author": "Gradido Academy - https://www.gradido.net",
"license": "Apache-2.0",
"private": false,
"description": "Gradido unified backend providing an API-Service for Gradido Transactions",
"repository": "https://github.com/gradido/gradido/backend",
"license": "Apache-2.0",
"author": "Gradido Academy - https://www.gradido.net",
"main": "src/index.ts",
"scripts": {
"build": "tsc --build && mkdirp build/src/emails/templates/ && ncp src/emails/templates build/src/emails/templates && mkdirp build/src/locales/ && ncp src/locales build/src/locales",
"build": "ts-node ./esbuild.config.ts && mkdirp build/templates/ && ncp src/emails/templates build/templates && mkdirp locales/ && ncp src/locales locales",
"clean": "tsc --build --clean",
"start": "cross-env TZ=UTC TS_NODE_BASEURL=./build node -r tsconfig-paths/register build/src/index.js",
"dev": "cross-env TZ=UTC nodemon -w src --ext ts,pug,json,css --exec ts-node -r tsconfig-paths/register src/index.ts",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write",
"lint:fix:unsafe": "biome check --fix --unsafe",
"dev": "cross-env TZ=UTC nodemon -w src --ext ts,pug,json,css -r tsconfig-paths/register src/index.ts",
"test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_backend jest --runInBand --forceExit --detectOpenHandles",
"seed": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/seeds/index.ts",
"klicktipp": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/util/executeKlicktipp.ts",
"gmsusers": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/apis/gms/ExportUsers.ts",
"humhubUserExport": "cross-env TZ=UTC NODE_ENV=development ts-node -r tsconfig-paths/register src/apis/humhub/ExportUsers.ts",
"locales": "scripts/sort.sh"
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write",
"lint:fix:unsafe": "biome check --fix --unsafe",
"locales": "scripts/sort.sh",
"start": "cross-env TZ=UTC NODE_ENV=production node build/index.js",
"typecheck": "tsc --noEmit"
},
"nodemonConfig": {
"ignore": ["**/*.test.ts"]
},
"dependencies": {
"apollo-server-express": "^2.25.2",
"await-semaphore": "^0.1.3",
"axios": "^0.21.1",
"class-validator": "^0.13.1",
"config-schema": "*",
"cors": "^2.8.5",
"cross-env": "^7.0.3",
"database": "*",
"decimal.js-light": "^2.5.1",
"dotenv": "^10.0.0",
"email-templates": "^10.0.1",
"express": "^4.17.21",
"express-slow-down": "^2.0.1",
"graphql": "^15.10.1",
"graphql-parse-resolve-info": "^4.13.1",
"graphql-request": "5.0.0",
"graphql-type-json": "0.3.2",
"helmet": "^5.1.1",
"i18n": "^0.15.1",
"joi": "^17.13.3",
"jose": "^4.14.4",
"lodash.clonedeep": "^4.5.0",
"log4js": "^6.7.1",
"mysql2": "^2.3.0",
"nodemailer": "^6.6.5",
"openai": "^4.87.3",
"pug": "^3.0.2",
"random-bigint": "^0.0.1",
"reflect-metadata": "^0.1.13",
"sodium-native": "^3.4.1",
"type-graphql": "^1.1.1",
"typed-rest-client": "^1.8.11",
"typeorm": "^0.3.16",
"uuid": "^8.3.2",
"workerpool": "^9.2.0",
"xregexp": "^5.1.1"
"sodium-native": "^3.4.1"
},
"devDependencies": {
"@anatine/esbuild-decorators": "^0.2.19",
"@biomejs/biome": "1.9.4",
"@swc/cli": "^0.7.3",
"@swc/core": "^1.11.24",
"@swc/helpers": "^0.5.17",
"@types/email-templates": "^10.0.4",
"@types/express": "^4.17.21",
"@biomejs/biome": "1.9.4",
"@types/faker": "^5.5.9",
"@types/i18n": "^0.13.4",
"@types/jest": "27.0.2",
@ -72,24 +47,53 @@
"@types/nodemailer": "^6.4.4",
"@types/sodium-native": "^2.3.5",
"@types/uuid": "^8.3.4",
"apollo-server-express": "^2.25.2",
"apollo-server-testing": "^2.25.2",
"await-semaphore": "^0.1.3",
"axios": "^0.21.1",
"class-validator": "^0.13.1",
"config-schema": "*",
"cors": "^2.8.5",
"database": "*",
"decimal.js-light": "^2.5.1",
"dotenv": "^10.0.0",
"esbuild": "^0.25.2",
"express": "^4.17.21",
"express-slow-down": "^2.0.1",
"faker": "^5.5.3",
"graphql": "^15.10.1",
"graphql-parse-resolve-info": "^4.13.1",
"graphql-request": "5.0.0",
"graphql-tag": "^2.12.6",
"graphql-type-json": "0.3.2",
"helmet": "^5.1.1",
"i18n": "^0.15.1",
"jest": "27.2.4",
"joi": "^17.13.3",
"jose": "^4.14.4",
"klicktipp-api": "^1.0.2",
"lodash.clonedeep": "^4.5.0",
"log4js": "^6.7.1",
"mkdirp": "^3.0.1",
"ncp": "^2.0.0",
"nodemailer": "^6.6.5",
"nodemon": "^2.0.7",
"openai": "^4.87.3",
"prettier": "^3.5.3",
"pug": "^3.0.2",
"random-bigint": "^0.0.1",
"reflect-metadata": "^0.1.13",
"regenerator-runtime": "^0.14.1",
"ts-jest": "27.0.5",
"ts-node": "^10.9.2",
"tsconfig-paths": "^4.1.1",
"typescript": "^4.9.5"
},
"nodemonConfig": {
"ignore": [
"**/*.test.ts"
]
"type-graphql": "^1.1.1",
"typed-rest-client": "^1.8.11",
"typeorm": "^0.3.16",
"typescript": "^4.9.5",
"uuid": "^8.3.2",
"workerpool": "^9.2.0",
"xregexp": "^5.1.1"
},
"engines": {
"node": ">=18"

View File

@ -35,8 +35,14 @@ const server = {
}
const database = {
DB_CONNECT_RETRY_COUNT: process.env.DB_CONNECT_RETRY_COUNT
? Number.parseInt(process.env.DB_CONNECT_RETRY_COUNT)
: 15,
DB_CONNECT_RETRY_DELAY_MS: process.env.DB_CONNECT_RETRY_DELAY_MS
? Number.parseInt(process.env.DB_CONNECT_RETRY_DELAY_MS)
: 500,
DB_HOST: process.env.DB_HOST ?? 'localhost',
DB_PORT: process.env.DB_PORT ? parseInt(process.env.DB_PORT) : 3306,
DB_PORT: process.env.DB_PORT ? Number.parseInt(process.env.DB_PORT) : 3306,
DB_USER: process.env.DB_USER ?? 'root',
DB_PASSWORD: process.env.DB_PASSWORD ?? '',
DB_DATABASE: process.env.DB_DATABASE ?? 'gradido_community',
@ -76,12 +82,12 @@ const community = {
const loginServer = {
LOGIN_APP_SECRET: process.env.LOGIN_APP_SECRET ?? '21ffbbc616fe',
LOGIN_SERVER_KEY: process.env.LOGIN_SERVER_KEY ?? 'a51ef8ac7ef1abf162fb7a65261acd7a',
USE_CRYPTO_WORKER: process.env.USE_CRYPTO_WORKER ?? false,
USE_CRYPTO_WORKER: process.env.USE_CRYPTO_WORKER === 'true',
}
const email = {
EMAIL: process.env.EMAIL === 'true' || false,
EMAIL_TEST_MODUS: process.env.EMAIL_TEST_MODUS === 'true' || false,
EMAIL: process.env.EMAIL === 'true',
EMAIL_TEST_MODUS: process.env.EMAIL_TEST_MODUS === 'true',
EMAIL_TEST_RECEIVER: process.env.EMAIL_TEST_RECEIVER ?? 'stage1@gradido.net',
EMAIL_USERNAME: process.env.EMAIL_USERNAME ?? '',
EMAIL_SENDER: process.env.EMAIL_SENDER ?? 'info@gradido.net',

View File

@ -3,6 +3,8 @@ import {
COMMUNITY_NAME,
COMMUNITY_SUPPORT_MAIL,
COMMUNITY_URL,
DB_CONNECT_RETRY_COUNT,
DB_CONNECT_RETRY_DELAY_MS,
DB_DATABASE,
DB_HOST,
DB_PASSWORD,
@ -38,6 +40,8 @@ export const schema = Joi.object({
DB_USER,
DB_VERSION,
DB_DATABASE,
DB_CONNECT_RETRY_COUNT,
DB_CONNECT_RETRY_DELAY_MS,
DECAY_START_TIME,
GDT_API_URL,
GDT_ACTIVE,

View File

@ -1,5 +1,3 @@
import path from 'path'
import { Decimal } from 'decimal.js-light'
import { GraphQLSchema } from 'graphql'
import { buildSchema } from 'type-graphql'
@ -7,12 +5,39 @@ import { buildSchema } from 'type-graphql'
import { Location } from '@model/Location'
import { isAuthorized } from './directive/isAuthorized'
import { AiChatResolver } from './resolver/AiChatResolver'
import { BalanceResolver } from './resolver/BalanceResolver'
import { CommunityResolver } from './resolver/CommunityResolver'
import { ContributionLinkResolver } from './resolver/ContributionLinkResolver'
import { ContributionMessageResolver } from './resolver/ContributionMessageResolver'
import { ContributionResolver } from './resolver/ContributionResolver'
import { GdtResolver } from './resolver/GdtResolver'
import { KlicktippResolver } from './resolver/KlicktippResolver'
import { ProjectBrandingResolver } from './resolver/ProjectBrandingResolver'
import { StatisticsResolver } from './resolver/StatisticsResolver'
import { TransactionLinkResolver } from './resolver/TransactionLinkResolver'
import { TransactionResolver } from './resolver/TransactionResolver'
import { UserResolver } from './resolver/UserResolver'
import { DecimalScalar } from './scalar/Decimal'
import { LocationScalar } from './scalar/Location'
export const schema = async (): Promise<GraphQLSchema> => {
return buildSchema({
resolvers: [path.join(__dirname, 'resolver', `!(*.test).{js,ts}`)],
resolvers: [
AiChatResolver,
BalanceResolver,
CommunityResolver,
ContributionLinkResolver,
ContributionMessageResolver,
ContributionResolver,
GdtResolver,
KlicktippResolver,
ProjectBrandingResolver,
StatisticsResolver,
TransactionLinkResolver,
TransactionResolver,
UserResolver,
],
authChecker: isAuthorized,
scalarsMap: [
{ type: Decimal, scalar: DecimalScalar },

View File

@ -1,3 +1,4 @@
import 'reflect-metadata'
import { CONFIG } from './config'
import { startValidateCommunities } from './federation/validateCommunities'
import { createServer } from './server/createServer'

View File

@ -0,0 +1,6 @@
export function SecretKeyCryptographyCreateKeyFunc(
salt: string,
password: string,
configLoginAppSecret: Buffer,
configLoginServerKey: Buffer,
): bigint

View File

@ -1,8 +1,4 @@
import { worker } from 'workerpool'
import { CONFIG } from '@/config'
import {
const {
crypto_box_SEEDBYTES,
crypto_hash_sha512_BYTES,
crypto_hash_sha512_STATEBYTES,
@ -13,14 +9,14 @@ import {
crypto_pwhash_SALTBYTES,
crypto_shorthash,
crypto_shorthash_BYTES,
} from 'sodium-native'
} = require('sodium-native')
export const SecretKeyCryptographyCreateKey = (
salt: string,
password: string,
configLoginAppSecret: Buffer,
configLoginServerKey: Buffer,
): bigint => {
exports.SecretKeyCryptographyCreateKeyFunc = (
salt,
password,
configLoginAppSecret,
configLoginServerKey,
) => {
const state = Buffer.alloc(crypto_hash_sha512_STATEBYTES)
crypto_hash_sha512_init(state)
crypto_hash_sha512_update(state, Buffer.from(salt))
@ -45,9 +41,3 @@ export const SecretKeyCryptographyCreateKey = (
crypto_shorthash(encryptionKeyHash, encryptionKey, configLoginServerKey)
return encryptionKeyHash.readBigUInt64LE()
}
if (CONFIG.USE_CRYPTO_WORKER === true && typeof process.send === 'function') {
worker({
SecretKeyCryptographyCreateKey,
})
}

View File

@ -1,5 +1,5 @@
import { cpus } from 'os'
import path from 'path'
import { cpus } from 'node:os'
import path from 'node:path'
import { User } from 'database'
import { Pool, pool } from 'workerpool'
@ -12,7 +12,7 @@ import { backendLogger as logger } from '@/server/logger'
import { crypto_shorthash_KEYBYTES } from 'sodium-native'
import { SecretKeyCryptographyCreateKey as SecretKeyCryptographyCreateKeySync } from './EncryptionWorker'
import { SecretKeyCryptographyCreateKeyFunc } from './EncryptionWorker.js'
const configLoginAppSecret = Buffer.from(CONFIG.LOGIN_APP_SECRET, 'hex')
const configLoginServerKey = Buffer.from(CONFIG.LOGIN_SERVER_KEY, 'hex')
@ -20,13 +20,9 @@ const configLoginServerKey = Buffer.from(CONFIG.LOGIN_SERVER_KEY, 'hex')
let encryptionWorkerPool: Pool | undefined
if (CONFIG.USE_CRYPTO_WORKER === true) {
encryptionWorkerPool = pool(
path.join(__dirname, '..', '..', 'build', 'src', 'password', '/EncryptionWorker.js'),
{
// TODO: put maxQueueSize into config
maxQueueSize: 30 * cpus().length,
},
)
encryptionWorkerPool = pool(path.join(__dirname, 'worker.js'), {
maxQueueSize: 30 * cpus().length,
})
}
// We will reuse this for changePassword
@ -52,22 +48,20 @@ export const SecretKeyCryptographyCreateKey = async (
crypto_shorthash_KEYBYTES,
)
}
let result: Promise<bigint>
let result: bigint
if (encryptionWorkerPool) {
result = (await encryptionWorkerPool.exec('SecretKeyCryptographyCreateKey', [
result = await encryptionWorkerPool.exec('SecretKeyCryptographyCreateKeyFunc', [
salt,
password,
configLoginAppSecret,
configLoginServerKey,
])) as Promise<bigint>
])
} else {
result = Promise.resolve(
SecretKeyCryptographyCreateKeySync(
salt,
password,
configLoginAppSecret,
configLoginServerKey,
),
result = SecretKeyCryptographyCreateKeyFunc(
salt,
password,
configLoginAppSecret,
configLoginServerKey,
)
}
return result

View File

View File

@ -0,0 +1,6 @@
const { worker } = require('workerpool')
const { SecretKeyCryptographyCreateKeyFunc } = require('./EncryptionWorker')
worker({
SecretKeyCryptographyCreateKeyFunc,
})

View File

@ -7,8 +7,7 @@ import { Connection as DbConnection } from 'typeorm'
import { CONFIG } from '@/config'
import { schema } from '@/graphql/schema'
import { checkDBVersion } from '@/typeorm/DBVersion'
import { Connection } from '@/typeorm/connection'
import { checkDBVersionUntil } from '@/typeorm/DBVersion'
import { elopageWebhook } from '@/webhook/elopage'
import { gmsWebhook } from '@/webhook/gms'
@ -35,19 +34,13 @@ export const createServer = async (
logger.addContext('user', 'unknown')
logger.debug('createServer...')
// open mysql connection
const con = await Connection.getInstance()
if (!con?.isConnected) {
logger.fatal(`Couldn't open connection to database!`)
throw new Error(`Fatal: Couldn't open connection to database`)
}
// open mariadb connection, retry connecting with mariadb
// check for correct database version
const dbVersion = await checkDBVersion(CONFIG.DB_VERSION)
if (!dbVersion) {
logger.fatal('Fatal: Database Version incorrect')
throw new Error('Fatal: Database Version incorrect')
}
// retry max CONFIG.DB_CONNECT_RETRY_COUNT times, wait CONFIG.DB_CONNECT_RETRY_DELAY ms between tries
const con = await checkDBVersionUntil(
CONFIG.DB_CONNECT_RETRY_COUNT,
CONFIG.DB_CONNECT_RETRY_DELAY_MS,
)
// Express Server
const app = express()

View File

@ -1,5 +1,4 @@
import path from 'path'
import path from 'node:path'
import i18n from 'i18n'
import { backendLogger } from './logger'

View File

@ -2,6 +2,33 @@ import { Migration } from 'database'
import { backendLogger as logger } from '@/server/logger'
import { CONFIG } from '@/config'
import { Connection } from '@/typeorm/connection'
import { Connection as DbConnection } from 'typeorm'
async function checkDBVersionUntil(maxRetries: number, delayMs: number): Promise<DbConnection> {
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
const connection = await Connection.getInstance()
if (connection?.isInitialized) {
const dbVersion = await checkDBVersion(CONFIG.DB_VERSION)
if (dbVersion) {
logger.info('Database connection and version check succeeded.')
return connection
}
}
} catch (err) {
logger.warn(`Attempt ${attempt}: Waiting for DB...`, err)
}
await new Promise((resolve) => setTimeout(resolve, delayMs))
}
logger.fatal(
`Fatal: Could not connect to database or version check failed after ${maxRetries} attempts.`,
)
throw new Error('Fatal: Database not ready.')
}
const getDBVersion = async (): Promise<string | null> => {
try {
const [dbVersion] = await Migration.find({ order: { version: 'DESC' }, take: 1 })
@ -25,4 +52,4 @@ const checkDBVersion = async (DB_VERSION: string): Promise<boolean> => {
return true
}
export { checkDBVersion, getDBVersion }
export { checkDBVersion, getDBVersion, checkDBVersionUntil }

View File

@ -80,9 +80,13 @@
/* Experimental Options */
"experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
"emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
"resolveJsonModule": true,
/* Advanced Options */
"skipLibCheck": true, /* Skip type checking of declaration files. */
"forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */
},
"ts-node": {
"swc": true
}
}

View File

@ -9,6 +9,9 @@
"lint": {
"dependsOn": ["locales", "database#build"]
},
"typecheck": {
"dependsOn": ["database#build", "config-schema#build"]
},
"test": {
"dependsOn": ["database#up:backend_test", "config-schema#build", "database#build"]
},

View File

@ -4,7 +4,14 @@
"files": {
"ignoreUnknown": false,
"ignore": ["build", "node_modules", "coverage"],
"include": ["./src/**/*.js", "./src/**/*.ts", "./entity/**/*.ts", "./logging/**/*.ts", "./migrations/**/*.ts"]
"include": [
"package.json",
"./src/**/*.js",
"./src/**/*.ts",
"./entity/**/*.ts",
"./logging/**/*.ts",
"./migrations/**/*.ts"
]
},
"formatter": {
"enabled": true,

673
bun.lock

File diff suppressed because it is too large Load Diff

4
config-schema/README.md Normal file
View File

@ -0,0 +1,4 @@
# config-schema
## Bun-Compatibility
Full bun compatible

View File

@ -16,6 +16,7 @@
"private": true,
"scripts": {
"build": "esbuild src/index.ts --outdir=build --platform=node --target=node18.20.7 --bundle --packages=external",
"build:bun": "bun build src/index.ts --outdir=build --target=bun --packages=external",
"typecheck": "tsc --noEmit",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write"
@ -23,11 +24,11 @@
"devDependencies": {
"@biomejs/biome": "1.9.4",
"@types/node": "^17.0.21",
"esbuild": "^0.25.2",
"typescript": "^4.9.5"
},
"dependencies": {
"joi": "^17.13.3"
"joi": "^17.13.3",
"esbuild": "^0.25.2"
},
"engines": {
"node": ">=18"

View File

@ -37,6 +37,20 @@ export const DB_VERSION = Joi.string()
)
.required()
export const DB_CONNECT_RETRY_COUNT = Joi.number()
.default(15)
.min(1)
.max(1000)
.description('Number of retries to connect to the database')
.optional()
export const DB_CONNECT_RETRY_DELAY_MS = Joi.number()
.default(500)
.min(100)
.max(10000)
.description('Delay in milliseconds between retries to connect to the database')
.optional()
export const COMMUNITY_URL = Joi.string()
.uri({ scheme: ['http', 'https'] })
.custom((value: string, helpers: Joi.CustomHelpers<string>) => {

View File

@ -14,6 +14,9 @@ ENV BUILD_VERSION="0.0.0.0"
ENV BUILD_COMMIT="0000000"
## SET NODE_ENV
ENV NODE_ENV="production"
## Timezone
ENV TZ=UTC
ENV DB_HOST=mariadb
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
@ -37,59 +40,65 @@ RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
##################################################################################
# Base with turbo ################################################################
# BUN ############################################################################
##################################################################################
FROM base as turbo-base
RUN apk update && apk add --no-cache libc6-compat \
&& yarn global add turbo@^2 \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
FROM base as bun-base
#RUN apt update && apt install -y --no-install-recommends ca-certificates curl bash unzip
RUN apk update && apk add --no-cache curl tar bash
RUN curl -fsSL https://bun.sh/install | bash
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
##################################################################################
# BUILDER (create partly monorepo only with data needed by database) #############
# Installer ######################################################################
##################################################################################
FROM turbo-base as builder
FROM bun-base as installer
COPY --chown=app:app . .
RUN turbo prune database --docker
COPY --chown=app:app ./database .
RUN bun install --production --no-cache --frozen-lockfile
##################################################################################
# INSTALLER (create production image) ############################################
# Build ##########################################################################
##################################################################################
FROM turbo-base AS installer
FROM installer as build
RUN bun install --no-cache --frozen-lockfile \
yarn build && yarn typecheck
##################################################################################
# PRODUCTION IMAGE ###############################################################
##################################################################################
FROM base as production
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/src ./src
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/migrations ./migrations
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/entity ./entity
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/node_modules ./node_modules
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/package.json ./package.json
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/tsconfig.json ./tsconfig.json
# First install the dependencies (as they change less often)
COPY --chown=app:app --from=builder /app/out/json/ .
RUN yarn install --frozen-lockfile --production=false \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# Build the project
COPY --chown=app:app --from=builder /app/out/full/ .
#RUN turbo build
##################################################################################
# TEST UP ########################################################################
##################################################################################
FROM installer as up
FROM production as up
# Run command
CMD /bin/sh -c "turbo up"
CMD /bin/sh -c "yarn up"
##################################################################################
# TEST RESET #####################################################################
##################################################################################
FROM installer as reset
FROM production as reset
# Run command
CMD /bin/sh -c "turbo reset"
CMD /bin/sh -c "yarn reset"
##################################################################################
# TEST DOWN ######################################################################
##################################################################################
FROM installer as down
FROM production as down
# Run command
CMD /bin/sh -c "turbo down"
CMD /bin/sh -c "yarn down"

View File

@ -1,39 +1,51 @@
# database
## Project setup
## Bun-Compatibility
```bash
yarn install
This module uses `TypeORM` and `ts-mysql-migrate`. Bun currently has several issues running it:
### Known Issues
1. **`Geometry` type not recognized**
`Geometry` must be imported as type:
```ts
import type { Geometry } from 'typeorm'
```
2. **Circular imports between entities**
Bun fails when two entities import each other (e.g., via @ManyToOne / @OneToMany). Node.js tolerates this, Bun does not.
3. ts-mysql-migrate **breaks**
Bun crashes due to unsupported module.parent.parent.require():
```ts
TypeError: undefined is not an object (evaluating 'module.parent.parent.require')
```
## Upgrade migrations production
## Upgrade migrations
```bash
yarn up
```
## Upgrade migrations development
```bash
yarn dev_up
```
## Downgrade migrations production
## Downgrade migrations
```bash
yarn down
```
## Downgrade migrations development
```bash
yarn dev_down
```
## Reset database
```bash
yarn dev_reset
yarn reset
```
Runs all down migrations and after this all up migrations.
## Clear database
call truncate for all tables
```bash
yarn clear
```

View File

@ -5,7 +5,7 @@ export class Migration extends BaseEntity {
@PrimaryGeneratedColumn() // This is actually not a primary column
version: number
@Column({ length: 256, nullable: true, default: null })
@Column({ type: 'varchar', length: 256, nullable: true, default: null })
fileName: string
@Column({ type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })

View File

@ -6,21 +6,21 @@ export class LoginEmailOptIn extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ name: 'user_id' })
@Column({ name: 'user_id', type: 'bigint', unsigned: true, nullable: false })
userId: number
@Column({ name: 'verification_code', type: 'bigint', unsigned: true, unique: true })
verificationCode: BigInt
@Column({ name: 'email_opt_in_type_id' })
@Column({ name: 'email_opt_in_type_id', type: 'int', unsigned: true, nullable: false })
emailOptInTypeId: number
@Column({ name: 'created', default: () => 'CURRENT_TIMESTAMP' })
@Column({ name: 'created', type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
createdAt: Date
@Column({ name: 'resend_count', default: 0 })
@Column({ name: 'resend_count', type: 'int', unsigned: true, default: 0 })
resendCount: number
@Column({ name: 'updated', default: () => 'CURRENT_TIMESTAMP' })
@Column({ name: 'updated', type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
updatedAt: Date
}

View File

@ -20,11 +20,12 @@ export class LoginElopageBuys extends BaseEntity {
@Column({ type: 'int', width: 11, name: 'product_id', nullable: true, default: null })
productId: number | null
@Column({ name: 'product_price', nullable: false })
@Column({ name: 'product_price', type: 'int', nullable: false })
productPrice: number
@Column({
name: 'payer_email',
type: 'varchar',
length: 255,
nullable: false,
charset: 'utf8',
@ -34,6 +35,7 @@ export class LoginElopageBuys extends BaseEntity {
@Column({
name: 'publisher_email',
type: 'varchar',
length: 255,
nullable: false,
charset: 'utf8',
@ -41,12 +43,12 @@ export class LoginElopageBuys extends BaseEntity {
})
publisherEmail: string
@Column({ nullable: false })
@Column({ type: 'bool', nullable: false })
payed: boolean
@Column({ name: 'success_date', nullable: false })
@Column({ name: 'success_date', type: 'datetime', nullable: false })
successDate: Date
@Column({ length: 255, nullable: false })
@Column({ type: 'varchar', length: 255, nullable: false })
event: string
}

View File

@ -7,7 +7,7 @@ export class TransactionLink extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ unsigned: true, nullable: false })
@Column({ type: 'bigint', unsigned: true, nullable: false })
userId: number
@Column({
@ -29,10 +29,10 @@ export class TransactionLink extends BaseEntity {
})
holdAvailableAmount: Decimal
@Column({ length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
memo: string
@Column({ length: 24, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 24, nullable: false, collation: 'utf8mb4_unicode_ci' })
code: string
@Column({

View File

@ -7,7 +7,7 @@ export class AdminPendingCreation extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ unsigned: true, nullable: false })
@Column({ type: 'bigint', unsigned: true, nullable: false })
userId: number
@Column({ type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
@ -16,7 +16,7 @@ export class AdminPendingCreation extends BaseEntity {
@Column({ type: 'datetime', nullable: false })
date: Date
@Column({ length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
memo: string
@Column({

View File

@ -1,3 +1,4 @@
/* eslint-disable no-use-before-define */
import { Decimal } from 'decimal.js-light'
import { BaseEntity, Column, Entity, JoinColumn, OneToOne, PrimaryGeneratedColumn } from 'typeorm'
import { DecimalTransformer } from '../../src/typeorm/DecimalTransformer'

View File

@ -7,10 +7,10 @@ export class ContributionLink extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ length: 100, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 100, nullable: false, collation: 'utf8mb4_unicode_ci' })
name: string
@Column({ length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
memo: string
@Column({ name: 'valid_from', type: 'datetime', nullable: false })
@ -28,10 +28,10 @@ export class ContributionLink extends BaseEntity {
})
amount: Decimal
@Column({ length: 12, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 12, nullable: false, collation: 'utf8mb4_unicode_ci' })
cycle: string
@Column({ name: 'max_per_cycle', unsigned: true, nullable: false, default: 1 })
@Column({ name: 'max_per_cycle', type: 'int', unsigned: true, nullable: false, default: 1 })
maxPerCycle: number
@Column({
@ -77,10 +77,10 @@ export class ContributionLink extends BaseEntity {
@Column({ name: 'created_at', type: 'datetime', default: () => 'CURRENT_TIMESTAMP' })
createdAt: Date
@DeleteDateColumn({ name: 'deleted_at' })
@DeleteDateColumn({ name: 'deleted_at', type: 'datetime' })
deletedAt: Date | null
@Column({ length: 24, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 24, nullable: false, collation: 'utf8mb4_unicode_ci' })
code: string
@Column({ name: 'link_enabled', type: 'boolean', default: true })

View File

@ -21,7 +21,7 @@ export class Event extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ length: 100, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 100, nullable: false, collation: 'utf8mb4_unicode_ci' })
type: string
@CreateDateColumn({
@ -32,56 +32,61 @@ export class Event extends BaseEntity {
})
createdAt: Date
@Column({ name: 'affected_user_id', unsigned: true, nullable: false })
@Column({ name: 'affected_user_id', type: 'bigint', unsigned: true, nullable: false })
affectedUserId: number
@ManyToOne(() => User)
@JoinColumn({ name: 'affected_user_id', referencedColumnName: 'id' })
affectedUser: User
@Column({ name: 'acting_user_id', unsigned: true, nullable: false })
@Column({ name: 'acting_user_id', type: 'bigint', unsigned: true, nullable: false })
actingUserId: number
@ManyToOne(() => User)
@JoinColumn({ name: 'acting_user_id', referencedColumnName: 'id' })
actingUser: User
@Column({ name: 'involved_user_id', type: 'int', unsigned: true, nullable: true })
@Column({ name: 'involved_user_id', type: 'bigint', unsigned: true, nullable: true })
involvedUserId: number | null
@ManyToOne(() => User)
@JoinColumn({ name: 'involved_user_id', referencedColumnName: 'id' })
involvedUser: User | null
@Column({ name: 'involved_transaction_id', type: 'int', unsigned: true, nullable: true })
@Column({ name: 'involved_transaction_id', type: 'bigint', unsigned: true, nullable: true })
involvedTransactionId: number | null
@ManyToOne(() => Transaction)
@JoinColumn({ name: 'involved_transaction_id', referencedColumnName: 'id' })
involvedTransaction: Transaction | null
@Column({ name: 'involved_contribution_id', type: 'int', unsigned: true, nullable: true })
@Column({ name: 'involved_contribution_id', type: 'bigint', unsigned: true, nullable: true })
involvedContributionId: number | null
@ManyToOne(() => Contribution)
@JoinColumn({ name: 'involved_contribution_id', referencedColumnName: 'id' })
involvedContribution: Contribution | null
@Column({ name: 'involved_contribution_message_id', type: 'int', unsigned: true, nullable: true })
@Column({
name: 'involved_contribution_message_id',
type: 'bigint',
unsigned: true,
nullable: true,
})
involvedContributionMessageId: number | null
@ManyToOne(() => ContributionMessage)
@JoinColumn({ name: 'involved_contribution_message_id', referencedColumnName: 'id' })
involvedContributionMessage: ContributionMessage | null
@Column({ name: 'involved_transaction_link_id', type: 'int', unsigned: true, nullable: true })
@Column({ name: 'involved_transaction_link_id', type: 'bigint', unsigned: true, nullable: true })
involvedTransactionLinkId: number | null
@ManyToOne(() => TransactionLink)
@JoinColumn({ name: 'involved_transaction_link_id', referencedColumnName: 'id' })
involvedTransactionLink: TransactionLink | null
@Column({ name: 'involved_contribution_link_id', type: 'int', unsigned: true, nullable: true })
@Column({ name: 'involved_contribution_link_id', type: 'bigint', unsigned: true, nullable: true })
involvedContributionLinkId: number | null
@ManyToOne(() => ContributionLink)

View File

@ -1,3 +1,4 @@
/* eslint-disable no-use-before-define */
import { Decimal } from 'decimal.js-light'
import { BaseEntity, Column, Entity, JoinColumn, OneToOne, PrimaryGeneratedColumn } from 'typeorm'
import { DecimalTransformer } from '../../src/typeorm/DecimalTransformer'

View File

@ -9,13 +9,18 @@ export class UserRole extends BaseEntity {
@Column({ name: 'user_id', type: 'int', unsigned: true, nullable: false })
userId: number
@Column({ length: 40, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 40, nullable: false, collation: 'utf8mb4_unicode_ci' })
role: string
@Column({ name: 'created_at', default: () => 'CURRENT_TIMESTAMP(3)', nullable: false })
@Column({
name: 'created_at',
type: 'datetime',
default: () => 'CURRENT_TIMESTAMP(3)',
nullable: false,
})
createdAt: Date
@Column({ name: 'updated_at', nullable: true, default: null, type: 'datetime' })
@Column({ name: 'updated_at', type: 'datetime', nullable: true, default: null })
updatedAt: Date | null
@ManyToOne(

View File

@ -11,6 +11,7 @@ export class DltTransaction extends BaseEntity {
@Column({
name: 'message_id',
type: 'varchar',
length: 64,
nullable: true,
default: null,
@ -21,10 +22,15 @@ export class DltTransaction extends BaseEntity {
@Column({ name: 'verified', type: 'bool', nullable: false, default: false })
verified: boolean
@Column({ name: 'created_at', default: () => 'CURRENT_TIMESTAMP(3)', nullable: false })
@Column({
name: 'created_at',
type: 'datetime',
default: () => 'CURRENT_TIMESTAMP(3)',
nullable: false,
})
createdAt: Date
@Column({ name: 'verified_at', nullable: true, default: null, type: 'datetime' })
@Column({ name: 'verified_at', type: 'datetime', nullable: true, default: null })
verifiedAt: Date | null
@OneToOne(

View File

@ -1,3 +1,4 @@
/* eslint-disable no-use-before-define */
import { Decimal } from 'decimal.js-light'
import { BaseEntity, Column, Entity, JoinColumn, OneToOne, PrimaryGeneratedColumn } from 'typeorm'
import { DecimalTransformer } from '../../src/typeorm/DecimalTransformer'

View File

@ -1,3 +1,4 @@
/* eslint-disable no-use-before-define */
import { Decimal } from 'decimal.js-light'
import { BaseEntity, Column, Entity, PrimaryGeneratedColumn } from 'typeorm'
import { DecimalTransformer } from '../../src/typeorm/DecimalTransformer'
@ -7,13 +8,13 @@ export class PendingTransaction extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ name: 'state', unsigned: true, nullable: false })
@Column({ name: 'state', type: 'int', unsigned: true, nullable: false })
state: number
@Column({ type: 'int', unsigned: true, unique: true, nullable: true, default: null })
previous: number | null
@Column({ name: 'type_id', unsigned: true, nullable: false })
@Column({ name: 'type_id', type: 'int', unsigned: true, nullable: false })
typeId: number
@Column({
@ -68,13 +69,13 @@ export class PendingTransaction extends BaseEntity {
})
decayStart: Date | null
@Column({ length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
memo: string
@Column({ name: 'creation_date', type: 'datetime', nullable: true, default: null })
creationDate: Date | null
@Column({ name: 'user_id', unsigned: true, nullable: false })
@Column({ name: 'user_id', type: 'bigint', unsigned: true, nullable: false })
userId: number
@Column({

View File

@ -1,3 +1,4 @@
/* eslint-disable no-use-before-define */
import { Decimal } from 'decimal.js-light'
import { BaseEntity, Column, Entity, JoinColumn, OneToOne, PrimaryGeneratedColumn } from 'typeorm'
import { DecimalTransformer } from '../../src/typeorm/DecimalTransformer'
@ -12,7 +13,7 @@ export class Transaction extends BaseEntity {
@Column({ type: 'int', unsigned: true, unique: true, nullable: true, default: null })
previous: number | null
@Column({ name: 'type_id', unsigned: true, nullable: false })
@Column({ name: 'type_id', type: 'int', unsigned: true, nullable: false })
typeId: number
@Column({
@ -67,13 +68,13 @@ export class Transaction extends BaseEntity {
})
decayStart: Date | null
@Column({ length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
memo: string
@Column({ name: 'creation_date', type: 'datetime', nullable: true, default: null })
creationDate: Date | null
@Column({ name: 'user_id', unsigned: true, nullable: false })
@Column({ name: 'user_id', type: 'bigint', unsigned: true, nullable: false })
userId: number
@Column({

View File

@ -20,7 +20,7 @@ export class Contribution extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ unsigned: true, nullable: false, name: 'user_id' })
@Column({ type: 'bigint', unsigned: true, nullable: false, name: 'user_id' })
userId: number
@ManyToOne(
@ -39,7 +39,7 @@ export class Contribution extends BaseEntity {
@Column({ type: 'datetime', nullable: false, name: 'contribution_date' })
contributionDate: Date
@Column({ length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 255, nullable: false, collation: 'utf8mb4_unicode_ci' })
memo: string
@Column({
@ -51,26 +51,27 @@ export class Contribution extends BaseEntity {
})
amount: Decimal
@Column({ unsigned: true, nullable: true, name: 'moderator_id' })
@Column({ type: 'bigint', unsigned: true, nullable: true, name: 'moderator_id' })
moderatorId: number
@Column({ unsigned: true, nullable: true, name: 'contribution_link_id' })
@Column({ type: 'bigint', unsigned: true, nullable: true, name: 'contribution_link_id' })
contributionLinkId: number
@Column({ unsigned: true, nullable: true, name: 'confirmed_by' })
@Column({ type: 'bigint', unsigned: true, nullable: true, name: 'confirmed_by' })
confirmedBy: number
@Column({ nullable: true, name: 'confirmed_at' })
@Column({ type: 'datetime', nullable: true, name: 'confirmed_at' })
confirmedAt: Date
@Column({ unsigned: true, nullable: true, name: 'denied_by' })
@Column({ type: 'bigint', unsigned: true, nullable: true, name: 'denied_by' })
deniedBy: number
@Column({ nullable: true, name: 'denied_at' })
@Column({ type: 'datetime', nullable: true, name: 'denied_at' })
deniedAt: Date
@Column({
name: 'contribution_type',
type: 'varchar',
length: 12,
nullable: false,
collation: 'utf8mb4_unicode_ci',
@ -79,25 +80,26 @@ export class Contribution extends BaseEntity {
@Column({
name: 'contribution_status',
type: 'varchar',
length: 12,
nullable: false,
collation: 'utf8mb4_unicode_ci',
})
contributionStatus: string
@Column({ unsigned: true, nullable: true, name: 'transaction_id' })
@Column({ type: 'bigint', unsigned: true, nullable: true, name: 'transaction_id' })
transactionId: number
@Column({ nullable: true, name: 'updated_at' })
@Column({ type: 'datetime', nullable: true, name: 'updated_at' })
updatedAt: Date
@Column({ nullable: true, unsigned: true, name: 'updated_by', type: 'int' })
@Column({ type: 'bigint', nullable: true, unsigned: true, name: 'updated_by' })
updatedBy: number | null
@DeleteDateColumn({ name: 'deleted_at' })
@DeleteDateColumn({ type: 'datetime', name: 'deleted_at' })
deletedAt: Date | null
@DeleteDateColumn({ unsigned: true, nullable: true, name: 'deleted_by' })
@DeleteDateColumn({ type: 'bigint', unsigned: true, nullable: true, name: 'deleted_by' })
deletedBy: number
@OneToMany(

View File

@ -21,7 +21,7 @@ export class ContributionMessage extends BaseEntity {
id: number
@Index()
@Column({ name: 'contribution_id', unsigned: true, nullable: false })
@Column({ name: 'contribution_id', type: 'bigint', unsigned: true, nullable: false })
contributionId: number
@ManyToOne(
@ -31,7 +31,7 @@ export class ContributionMessage extends BaseEntity {
@JoinColumn({ name: 'contribution_id' })
contribution: Contribution
@Column({ name: 'user_id', unsigned: true, nullable: false })
@Column({ name: 'user_id', type: 'bigint', unsigned: true, nullable: false })
userId: number
@ManyToOne(
@ -41,7 +41,7 @@ export class ContributionMessage extends BaseEntity {
@JoinColumn({ name: 'user_id' })
user: User
@Column({ length: 2000, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 2000, nullable: false, collation: 'utf8mb4_unicode_ci' })
message: string
@CreateDateColumn()
@ -55,10 +55,10 @@ export class ContributionMessage extends BaseEntity {
@DeleteDateColumn({ name: 'deleted_at' })
deletedAt: Date | null
@Column({ name: 'deleted_by', default: null, unsigned: true, nullable: true })
@Column({ name: 'deleted_by', type: 'bigint', default: null, unsigned: true, nullable: true })
deletedBy: number
@Column({ length: 12, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({ type: 'varchar', length: 12, nullable: false, collation: 'utf8mb4_unicode_ci' })
type: string
@Column({ name: 'is_moderator', type: 'bool', nullable: false, default: false })

View File

@ -17,6 +17,7 @@ export class UserContact extends BaseEntity {
@Column({
name: 'type',
type: 'varchar',
length: 100,
nullable: true,
default: null,
@ -33,7 +34,13 @@ export class UserContact extends BaseEntity {
@Column({ name: 'user_id', type: 'int', unsigned: true, nullable: false })
userId: number
@Column({ length: 255, unique: true, nullable: false, collation: 'utf8mb4_unicode_ci' })
@Column({
type: 'varchar',
length: 255,
unique: true,
nullable: false,
collation: 'utf8mb4_unicode_ci',
})
email: string
@Column({ name: 'gms_publish_email', type: 'bool', nullable: false, default: false })
@ -42,10 +49,22 @@ export class UserContact extends BaseEntity {
@Column({ name: 'email_verification_code', type: 'bigint', unsigned: true, unique: true })
emailVerificationCode: string
@Column({ name: 'email_opt_in_type_id' })
@Column({
name: 'email_opt_in_type_id',
type: 'int',
unsigned: true,
nullable: false,
default: 0,
})
emailOptInTypeId: number
@Column({ name: 'email_resend_count' })
@Column({
name: 'email_resend_count',
type: 'int',
unsigned: true,
nullable: false,
default: 0,
})
emailResendCount: number
@Column({ name: 'email_checked', type: 'bool', nullable: false, default: false })
@ -53,6 +72,7 @@ export class UserContact extends BaseEntity {
@Column({
name: 'country_code',
type: 'varchar',
length: 255,
unique: false,
nullable: true,
@ -60,13 +80,23 @@ export class UserContact extends BaseEntity {
})
countryCode: string
@Column({ length: 255, unique: false, nullable: true, collation: 'utf8mb4_unicode_ci' })
@Column({
type: 'varchar',
length: 255,
unique: false,
nullable: true,
collation: 'utf8mb4_unicode_ci',
})
phone: string
@Column({ name: 'gms_publish_phone', type: 'int', unsigned: true, nullable: false, default: 0 })
gmsPublishPhone: number
@CreateDateColumn({ name: 'created_at', default: () => 'CURRENT_TIMESTAMP(3)', nullable: false })
@CreateDateColumn({
name: 'created_at',
default: () => 'CURRENT_TIMESTAMP(3)',
nullable: false,
})
createdAt: Date
@UpdateDateColumn({

View File

@ -21,10 +21,10 @@ export class FederatedCommunity extends BaseEntity {
@Column({ name: 'public_key', type: 'binary', length: 32, default: null, nullable: true })
publicKey: Buffer
@Column({ name: 'api_version', length: 10, nullable: false })
@Column({ name: 'api_version', type: 'varchar', length: 10, nullable: false })
apiVersion: string
@Column({ name: 'end_point', length: 255, nullable: false })
@Column({ name: 'end_point', type: 'varchar', length: 255, nullable: false })
endPoint: string
@Column({ name: 'last_announced_at', type: 'datetime', nullable: true })

View File

@ -27,6 +27,7 @@ export class User extends BaseEntity {
@Column({
name: 'gradido_id',
type: 'char',
length: 36,
nullable: false,
collation: 'utf8mb4_unicode_ci',
@ -51,6 +52,7 @@ export class User extends BaseEntity {
@Column({
name: 'alias',
type: 'varchar',
length: 20,
nullable: true,
default: null,
@ -70,6 +72,7 @@ export class User extends BaseEntity {
@Column({
name: 'first_name',
type: 'varchar',
length: 255,
nullable: true,
default: null,
@ -79,6 +82,7 @@ export class User extends BaseEntity {
@Column({
name: 'last_name',
type: 'varchar',
length: 255,
nullable: true,
default: null,
@ -92,10 +96,15 @@ export class User extends BaseEntity {
@Column({ name: 'humhub_publish_name', type: 'int', unsigned: true, nullable: false, default: 0 })
humhubPublishName: number
@Column({ name: 'created_at', default: () => 'CURRENT_TIMESTAMP(3)', nullable: false })
@Column({
name: 'created_at',
type: 'datetime',
default: () => 'CURRENT_TIMESTAMP(3)',
nullable: false,
})
createdAt: Date
@DeleteDateColumn({ name: 'deleted_at', nullable: true })
@DeleteDateColumn({ name: 'deleted_at', type: 'datetime', nullable: true })
deletedAt: Date | null
@Column({ type: 'bigint', default: 0, unsigned: true })
@ -110,7 +119,13 @@ export class User extends BaseEntity {
})
passwordEncryptionType: number
@Column({ length: 4, default: 'de', collation: 'utf8mb4_unicode_ci', nullable: false })
@Column({
type: 'varchar',
length: 4,
default: 'de',
collation: 'utf8mb4_unicode_ci',
nullable: false,
})
language: string
@Column({ type: 'bool', default: false })
@ -126,19 +141,19 @@ export class User extends BaseEntity {
@JoinColumn({ name: 'user_id' })
userRoles: UserRole[]
@Column({ name: 'referrer_id', type: 'int', unsigned: true, nullable: true, default: null })
@Column({ name: 'referrer_id', type: 'bigint', unsigned: true, nullable: true, default: null })
referrerId?: number | null
@Column({
name: 'contribution_link_id',
type: 'int',
type: 'bigint',
unsigned: true,
nullable: true,
default: null,
})
contributionLinkId?: number | null
@Column({ name: 'publisher_id', default: 0 })
@Column({ name: 'publisher_id', type: 'bigint', unsigned: true, default: 0 })
publisherId: number
@Column({ name: 'gms_allowed', type: 'bool', default: true })

View File

@ -21,7 +21,7 @@ export class Community extends BaseEntity {
@Column({ name: 'foreign', type: 'bool', nullable: false, default: true })
foreign: boolean
@Column({ name: 'url', length: 255, nullable: false })
@Column({ name: 'url', type: 'varchar', length: 255, nullable: false })
url: string
@Column({ name: 'public_key', type: 'binary', length: 32, nullable: false })

View File

@ -0,0 +1,28 @@
import { build } from 'esbuild'
import fs from 'node:fs'
import { latestDbVersion } from './src/config/detectLastDBVersion'
build({
entryPoints: ['entity/index.ts'],
bundle: true,
target: 'node18.20.7',
platform: 'node',
packages: 'external',
outdir: './build',
plugins: [
{
// hardcode last db version string into index.ts, before parsing
name: 'replace-latest-db-version-import',
setup(build) {
build.onLoad({ filter: /index\.ts$/ }, async (args) => {
let source = await fs.promises.readFile(args.path, 'utf8')
source = source.replace(
/import\s*\{\s*latestDbVersion\s*\}\s*from\s*['"][^'"]+['"]/,
`const latestDbVersion = "${latestDbVersion}";`,
)
return { contents: source, loader: 'ts' }
})
},
},
],
})

View File

@ -2,12 +2,12 @@
"name": "database",
"version": "2.5.2",
"description": "Gradido Database Tool to execute database migrations",
"main": "./build/entity/index.js",
"types": "./build/entity/index.d.ts",
"main": "./build/index.js",
"types": "./entity/index.ts",
"exports": {
".": {
"import": "./build/entity/index.js",
"require": "./build/entity/index.js"
"import": "./build/index.js",
"require": "./build/index.js"
}
},
"repository": "https://github.com/gradido/gradido/database",
@ -15,8 +15,8 @@
"license": "Apache-2.0",
"private": false,
"scripts": {
"build": "mkdirp build/src/config/ && ncp src/config build/src/config && tsc --build",
"clean": "tsc --build --clean",
"build": "tsx ./esbuild.config.ts",
"typecheck": "tsc --noEmit",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write",
"clear": "cross-env TZ=UTC tsx src/index.ts clear",
@ -32,22 +32,19 @@
"@types/faker": "^5.5.9",
"@types/geojson": "^7946.0.13",
"@types/node": "^17.0.21",
"mkdirp": "^3.0.1",
"ncp": "^2.0.0",
"prettier": "^2.8.7",
"ts-node": "^10.9.2",
"tsx": "^4.19.3",
"typescript": "^4.9.5"
},
"dependencies": {
"@types/uuid": "^8.3.4",
"cross-env": "^7.0.3",
"decimal.js-light": "^2.5.1",
"esbuild": "^0.25.2",
"dotenv": "^10.0.0",
"geojson": "^0.5.0",
"mysql2": "^2.3.0",
"reflect-metadata": "^0.1.13",
"ts-mysql-migrate": "^1.0.2",
"tsx": "^4.19.4",
"typeorm": "^0.3.16",
"uuid": "^8.3.2",
"wkx": "^0.5.0"

View File

@ -1,5 +1,6 @@
import { Connection, createConnection } from 'mysql2/promise'
import { Connection } from 'mysql2/promise'
import { CONFIG } from './config'
import { connectToDatabaseServer } from './prepare'
export async function truncateTables(connection: Connection) {
const [tables] = await connection.query('SHOW TABLES')
@ -26,13 +27,13 @@ export async function truncateTables(connection: Connection) {
}
export async function clearDatabase() {
const connection = await createConnection({
host: CONFIG.DB_HOST,
port: CONFIG.DB_PORT,
user: CONFIG.DB_USER,
password: CONFIG.DB_PASSWORD,
database: CONFIG.DB_DATABASE,
})
const connection = await connectToDatabaseServer(
CONFIG.DB_CONNECT_RETRY_COUNT,
CONFIG.DB_CONNECT_RETRY_DELAY_MS,
)
if (!connection) {
throw new Error('Could not connect to database server')
}
await truncateTables(connection)

View File

@ -11,11 +11,17 @@ const constants = {
}
const database = {
DB_HOST: process.env.DB_HOST || 'localhost',
DB_CONNECT_RETRY_COUNT: process.env.DB_CONNECT_RETRY_COUNT
? Number.parseInt(process.env.DB_CONNECT_RETRY_COUNT)
: 15,
DB_CONNECT_RETRY_DELAY_MS: process.env.DB_CONNECT_RETRY_DELAY_MS
? Number.parseInt(process.env.DB_CONNECT_RETRY_DELAY_MS)
: 500,
DB_HOST: process.env.DB_HOST ?? 'localhost',
DB_PORT: process.env.DB_PORT ? Number.parseInt(process.env.DB_PORT) : 3306,
DB_USER: process.env.DB_USER || 'root',
DB_PASSWORD: process.env.DB_PASSWORD || '',
DB_DATABASE: process.env.DB_DATABASE || 'gradido_community',
DB_USER: process.env.DB_USER ?? 'root',
DB_PASSWORD: process.env.DB_PASSWORD ?? '',
DB_DATABASE: process.env.DB_DATABASE ?? 'gradido_community',
}
const migrations = {

View File

@ -10,19 +10,25 @@ export enum DatabaseState {
SAME_VERSION = 'SAME_VERSION',
}
async function connectToDatabaseServer(): Promise<Connection | null> {
try {
return await createConnection({
host: CONFIG.DB_HOST,
port: CONFIG.DB_PORT,
user: CONFIG.DB_USER,
password: CONFIG.DB_PASSWORD,
})
} catch (e) {
// biome-ignore lint/suspicious/noConsole: no logger present
console.log('could not connect to database server', e)
return null
export async function connectToDatabaseServer(
maxRetries: number,
delayMs: number,
): Promise<Connection | null> {
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
return await createConnection({
host: CONFIG.DB_HOST,
port: CONFIG.DB_PORT,
user: CONFIG.DB_USER,
password: CONFIG.DB_PASSWORD,
})
} catch (e) {
// biome-ignore lint/suspicious/noConsole: no logger present
console.log(`could not connect to database server, retry in ${delayMs} ms`, e)
}
await new Promise((resolve) => setTimeout(resolve, delayMs))
}
return null
}
async function convertJsToTsInMigrations(connection: Connection): Promise<number> {
@ -36,7 +42,10 @@ async function convertJsToTsInMigrations(connection: Connection): Promise<number
}
export const getDatabaseState = async (): Promise<DatabaseState> => {
const connection = await connectToDatabaseServer()
const connection = await connectToDatabaseServer(
CONFIG.DB_CONNECT_RETRY_COUNT,
CONFIG.DB_CONNECT_RETRY_DELAY_MS,
)
if (!connection) {
return DatabaseState.NOT_CONNECTED
}
@ -91,7 +100,7 @@ export const getDatabaseState = async (): Promise<DatabaseState> => {
// check if the database is up to date
const [rows] = await connection.query<RowDataPacket[]>(
`SELECT * FROM ${CONFIG.MIGRATIONS_TABLE} ORDER BY version DESC LIMIT 1`,
`SELECT fileName FROM ${CONFIG.MIGRATIONS_TABLE} ORDER BY version DESC LIMIT 1`,
)
if (rows.length === 0) {
return DatabaseState.LOWER_VERSION

View File

@ -145,22 +145,22 @@ sudo /etc/init.d/nginx restart
# helper functions
log_step() {
local message="$1"
echo -e "\e[34m$message\e[0m" # > /dev/tty # blue in console
echo -e "\e[34m$message\e[0m" > /dev/tty # blue in console
echo "<p style="color:blue">$message</p>" >> "$UPDATE_HTML" # blue in html
}
log_error() {
local message="$1"
echo -e "\e[31m$message\e[0m" # > /dev/tty # red in console
echo -e "\e[31m$message\e[0m" > /dev/tty # red in console
echo "<span style="color:red">$message</span>" >> "$UPDATE_HTML" # red in html
}
log_warn() {
local message="$1"
echo -e "\e[33m$message\e[0m" # > /dev/tty # orange in console
echo -e "\e[33m$message\e[0m" > /dev/tty # orange in console
echo "<span style="color:orange">$message</span>" >> "$UPDATE_HTML" # orange in html
}
log_success() {
local message="$1"
echo -e "\e[32m$message\e[0m" # > /dev/tty # green in console
echo -e "\e[32m$message\e[0m" > /dev/tty # green in console
echo "<p style="color:green">$message</p>" >> "$UPDATE_HTML" # green in html
}
@ -256,6 +256,7 @@ MODULES=(
if [ "$FAST_MODE" = false ] ; then
log_step 'Clean tmp, bun and yarn cache'
# Clean tmp folder - remove yarn files
# ignore error/warnings, we want only to remove all yarn files
find /tmp -name "yarn--*" -exec rm -r {} \; || true
# Clean user cache folder
rm -Rf ~/.cache/yarn

View File

@ -1,8 +1,9 @@
##################################################################################
# BASE ###########################################################################
##################################################################################
FROM node:18.20.7-alpine3.21 as base
#FROM ubuntu:latest as base
FROM node:18.20.7-bookworm-slim as base
#FROM node:18.20.7-alpine3.21 as base
# change to alpine after sodium-native ship with native alpine build
# ENVs (available in production aswell, can be overwritten by commandline or env file)
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
@ -17,6 +18,9 @@ ENV BUILD_COMMIT="0000000"
ENV NODE_ENV="production"
## App relevant Envs
#ENV PORT="5000"
## Timezone
ENV TZ=UTC
ENV DB_HOST=mariadb
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
@ -46,64 +50,74 @@ LABEL maintainer="support@gradido.net"
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
##################################################################################
# Base with turbo ################################################################
##################################################################################
FROM base as turbo-base
RUN apk update && apk add --no-cache libc6-compat \
&& yarn global add turbo@^2 \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
##################################################################################
# BUN ############################################################################
##################################################################################
FROM base as bun-base
RUN apt update && apt install -y --no-install-recommends ca-certificates curl bash unzip
#RUN apk update && apk add --no-cache curl tar bash
RUN curl -fsSL https://bun.sh/install | bash
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
##################################################################################
# Development ####################################################################
##################################################################################
FROM bun-base AS development
# Run command
CMD /bin/sh -c "bun install --filter dht-node --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2 \
&& turbo dht-node#dev --env-mode=loose"
##################################################################################
# Basic Image with bun setup and project and source code #########################
##################################################################################
FROM bun-base as bun-base-src
COPY --chown=app:app . .
##################################################################################
# BUILDER (create partly monorepo only with data needed by dht-node) #############
##################################################################################
FROM turbo-base as builder
FROM bun-base-src as build
COPY --chown=app:app . .
RUN turbo prune dht-node --docker
RUN bun install --filter dht-node --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2
RUN turbo dht-node#build dht-node#typecheck --env-mode=loose
##################################################################################
# INSTALLER (create production image) ############################################
##################################################################################
FROM turbo-base AS installer
# First install the dependencies (as they change less often)
COPY --chown=app:app --from=builder /app/out/json/ .
RUN yarn install --frozen-lockfile --production=false \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# Build the project
COPY --chown=app:app --from=builder /app/out/full/ .
RUN turbo build
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM installer as test
FROM build as test
ENV DB_HOST=mariadb
# Run command
CMD /bin/sh -c "turbo dht-node#test --env-mode=loose"
##################################################################################
# install only node modules needed for running bundle ############################
##################################################################################
FROM bun-base-src as production-node-modules
# add node_modules from production_node_modules
RUN bun install --filter dht-node --production --frozen-lockfile --no-cache \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& ./scripts/clean-prebuilds.sh
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
##################################################################################
FROM base as production
# Copy "binary"-files from build image
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/dht-node/build/src/index.js ./index.js
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/dht-node/build/index.js ./index.js
# We also install the native node_modules which cannot be bundled
# TODO: find a elegant way to use the right versions from yarn.lock
RUN yarn add udx-native@1.5.3 sodium-native@4.0.0 \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# add node_modules from production_node_modules
COPY --chown=app:app --from=production-node-modules ${DOCKER_WORKDIR}/node_modules ./node_modules
# Copy log4js-config.json to provide log configuration
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/dht-node/log4js-config.json ./log4js-config.json
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/dht-node/log4js-config.json ./log4js-config.json
# Run command
CMD ["node", "index.js"]
CMD ["node", "index.js"]

17
dht-node/README.md Normal file
View File

@ -0,0 +1,17 @@
# DHT-Node
## Bun-Compatibility
### Crash on NAPI module using `uv_interface_addresses`
Bun crashes when a NAPI module tries to call `uv_interface_addresses`, a libuv function currently unsupported:
Bun is working hard to support all NAPI module calls
## Production Build
Package.json dependencies contain only node_modules which cannot be bundled because of native node modules or needed for run start script. They are manually picked from @hyperswarm/dht
dependencies. The versions should be updated, if @hyperswarm/dht is updated.
The goal is to get a really small footprint for the production image. It is also possible to use in bare_metal setup.
### Bare Metal minimal setup
For a minimal bare metal production setup, look into [Dockerfile](Dockerfile) in the production step.

View File

@ -0,0 +1,14 @@
import { build } from 'esbuild'
build({
entryPoints: ['src/index.ts'],
outdir: 'build',
platform: 'node',
target: 'node18.20.7',
bundle: true,
keepNames: true,
// legalComments: 'inline',
external: ['dht-rpc', 'sodium-universal'],
minify: true,
sourcemap: false,
})

View File

@ -1,5 +1,5 @@
{
"name": "dht-node",
"name": "dht-node",
"version": "2.5.2",
"description": "Gradido dht-node module",
"main": "src/index.ts",
@ -8,39 +8,40 @@
"license": "Apache-2.0",
"private": false,
"scripts": {
"build": "tsc --build",
"clean": "tsc --build --clean",
"start": "cross-env TZ=UTC TS_NODE_BASEURL=./build node -r tsconfig-paths/register build/src/index.js",
"dev": "cross-env TZ=UTC nodemon -w src --ext ts --exec ts-node -r dotenv/config -r tsconfig-paths/register src/index.ts",
"build": "tsx esbuild.config.ts",
"start": "cross-env TZ=UTC NODE_ENV=production node build/index.js",
"dev": "cross-env TZ=UTC tsx watch src/index.ts",
"typecheck": "tsc --noEmit",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write",
"test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_dht jest --runInBand --forceExit --detectOpenHandles"
},
"dependencies": {
"@hyperswarm/dht": "^6.4.4",
"config-schema": "*",
"cross-env": "^7.0.3",
"database": "*",
"dotenv": "10.0.0",
"joi": "^17.13.3",
"log4js": "^6.7.1",
"nodemon": "^2.0.20",
"tsconfig-paths": "^4.1.2",
"typeorm": "^0.3.16",
"uuid": "^8.3.2"
"dht-rpc": "6.18.1",
"sodium-universal": "4.0.1",
"cross-env": "^7.0.3"
},
"devDependencies": {
"@types/jest": "27.5.1",
"@types/node": "^17.0.21",
"@types/uuid": "^8.3.4",
"jest": "27.5.1",
"@biomejs/biome": "1.9.4",
"@types/dotenv": "^8.2.0",
"@hyperswarm/dht": "6.5.1",
"@types/dotenv": "^8.2.3",
"@types/jest": "27.5.1",
"@types/joi": "^17.2.3",
"prettier": "^2.8.7",
"@types/node": "^17.0.45",
"@types/uuid": "^8.3.4",
"config-schema": "*",
"database": "*",
"dotenv": "10.0.0",
"esbuild": "^0.25.3",
"jest": "27.5.1",
"joi": "^17.13.3",
"log4js": "^6.9.1",
"prettier": "^2.8.8",
"ts-jest": "27.1.4",
"ts-node": "^10.9.2",
"typescript": "^4.9.4"
"tsx": "^4.19.4",
"typeorm": "^0.3.22",
"typescript": "^4.9.5",
"uuid": "^8.3.2"
},
"engines": {
"node": ">=18"

View File

@ -18,6 +18,12 @@ const server = {
}
const database = {
DB_CONNECT_RETRY_COUNT: process.env.DB_CONNECT_RETRY_COUNT
? Number.parseInt(process.env.DB_CONNECT_RETRY_COUNT)
: 15,
DB_CONNECT_RETRY_DELAY_MS: process.env.DB_CONNECT_RETRY_DELAY_MS
? Number.parseInt(process.env.DB_CONNECT_RETRY_DELAY_MS)
: 500,
DB_HOST: process.env.DB_HOST ?? 'localhost',
DB_PORT: process.env.DB_PORT ? Number.parseInt(process.env.DB_PORT) : 3306,
DB_USER: process.env.DB_USER ?? 'root',

View File

@ -1,6 +1,8 @@
import {
COMMUNITY_DESCRIPTION,
COMMUNITY_NAME,
DB_CONNECT_RETRY_COUNT,
DB_CONNECT_RETRY_DELAY_MS,
DB_DATABASE,
DB_HOST,
DB_PASSWORD,
@ -19,6 +21,8 @@ export const schema = Joi.object({
COMMUNITY_NAME,
COMMUNITY_DESCRIPTION,
DB_DATABASE,
DB_CONNECT_RETRY_COUNT,
DB_CONNECT_RETRY_DELAY_MS,
DB_HOST,
DB_PASSWORD,
DB_PORT,

View File

@ -2,23 +2,11 @@ import { startDHT } from '@/dht_node/index'
import { CONFIG } from './config'
import { logger } from './server/logger'
import { checkDBVersion } from './typeorm/DBVersion'
import { connection } from './typeorm/connection'
import { checkDBVersionUntil } from './typeorm/DBVersion'
async function main() {
// open mysql connection
const con = await connection()
if (!con || !con.isConnected) {
logger.fatal(`Couldn't open connection to database!`)
throw new Error(`Fatal: Couldn't open connection to database`)
}
// check for correct database version
const dbVersion = await checkDBVersion(CONFIG.DB_VERSION)
if (!dbVersion) {
logger.fatal('Fatal: Database Version incorrect')
throw new Error('Fatal: Database Version incorrect')
}
await checkDBVersionUntil(CONFIG.DB_CONNECT_RETRY_COUNT, CONFIG.DB_CONNECT_RETRY_DELAY_MS)
logger.debug(`dhtseed set by CONFIG.FEDERATION_DHT_SEED=${CONFIG.FEDERATION_DHT_SEED}`)
logger.info(
`starting Federation on ${CONFIG.FEDERATION_DHT_TOPIC} ${

View File

@ -2,6 +2,33 @@ import { Migration } from 'database'
import { logger } from '@/server/logger'
import { CONFIG } from '@/config'
import { Connection as DbConnection } from 'typeorm'
import { connection as connectionFunc } from './connection'
async function checkDBVersionUntil(maxRetries: number, delayMs: number): Promise<DbConnection> {
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
const connection = await connectionFunc()
if (connection?.isInitialized) {
const dbVersion = await checkDBVersion(CONFIG.DB_VERSION)
if (dbVersion) {
logger.info('Database connection and version check succeeded.')
return connection
}
}
} catch (err) {
logger.warn(`Attempt ${attempt}: Waiting for DB...`, err)
}
await new Promise((resolve) => setTimeout(resolve, delayMs))
}
logger.fatal(
`Fatal: Could not connect to database or version check failed after ${maxRetries} attempts.`,
)
throw new Error('Fatal: Database not ready.')
}
const getDBVersion = async (): Promise<string | null> => {
try {
const [dbVersion] = await Migration.find({ order: { version: 'DESC' }, take: 1 })
@ -25,4 +52,4 @@ const checkDBVersion = async (DB_VERSION: string): Promise<boolean> => {
return true
}
export { checkDBVersion, getDBVersion }
export { checkDBVersion, getDBVersion, checkDBVersionUntil }

View File

@ -1,10 +1,7 @@
import { entities } from 'database'
import { checkDBVersionUntil } from '@/typeorm/DBVersion'
import { CONFIG } from '@/config'
import { connection } from '@/typeorm/connection'
import { checkDBVersion } from '@/typeorm/DBVersion'
import { logger } from './testSetup'
export const headerPushMock = jest.fn((t) => {
context.token = t.value
@ -27,20 +24,7 @@ export const cleanDB = async () => {
}
export const testEnvironment = async () => {
// open mysql connection
const con = await connection()
if (!con || !con.isConnected) {
logger.fatal(`Couldn't open connection to database!`)
throw new Error(`Fatal: Couldn't open connection to database`)
}
// check for correct database version
const dbVersion = await checkDBVersion(CONFIG.DB_VERSION)
if (!dbVersion) {
logger.fatal('Fatal: Database Version incorrect')
throw new Error('Fatal: Database Version incorrect')
}
return { con }
return { con: await checkDBVersionUntil(CONFIG.DB_CONNECT_RETRY_COUNT, CONFIG.DB_CONNECT_RETRY_DELAY_MS) }
}
export const resetEntity = async (entity: any) => {

View File

@ -1,43 +1,5 @@
services:
########################################################
# Gradido ##############################################
########################################################
gradido:
image: gradido/gradido:local-development
build:
context: ./
dockerfile: ./Dockerfile
target: development
args:
BUILD_COMMIT: ${BUILD_COMMIT}
BUILD_COMMIT_SHORT: ${BUILD_COMMIT_SHORT}
BUILD_VERSION: ${BUILD_VERSION}
depends_on:
- mariadb
networks:
- internal-net
- external-net
ports:
- ${BACKEND_PORT:-4000}:${BACKEND_PORT:-4000}
- ${FEDERATION_PORT:-5010}:${FEDERATION_PORT:-5010}
- ${FRONTEND_MODULE_PORT:-3000}:${FRONTEND_MODULE_PORT:-3000}
- ${ADMIN_MODULE_PORT:-8080}:${ADMIN_MODULE_PORT:-8080}
environment:
# Envs used in Dockerfile
# - DOCKER_WORKDIR="/app"
# - PORT=4000
- BUILD_DATE
- BUILD_VERSION
- BUILD_COMMIT
- NODE_ENV=production
- DB_HOST=mariadb
volumes:
- ./logs/backend:/logs/backend
- gradido_node_modules:/app/node_modules
- .:/app
########################################################
# FRONTEND #############################################
########################################################
@ -46,15 +8,19 @@ services:
image: gradido/frontend:local-development
build:
target: development
networks:
- external-net
- internal-net
environment:
- NODE_ENV=development
# - DEBUG=true
volumes:
# This makes sure the docker container has its own node modules.
# Therefore it is possible to have a different node version on the host machine
- frontend_node_modules:/app/node_modules
- node_modules_frontend:/app/node_modules
- turbo_cache:/tmp/turbo
# bind the local folder to the docker to allow live reload
- ./frontend:/app
- .:/app
########################################################
# ADMIN INTERFACE ######################################
@ -64,15 +30,18 @@ services:
image: gradido/admin:local-development
build:
target: development
networks:
- external-net
- internal-net
environment:
- NODE_ENV=development
# - DEBUG=true
volumes:
# This makes sure the docker container has its own node modules.
# Therefore it is possible to have a different node version on the host machine
- admin_node_modules:/app/node_modules
- node_modules_admin:/app/node_modules
- turbo_cache:/tmp/turbo
# bind the local folder to the docker to allow live reload
- ./admin:/app
- .:/app
########################################################
# BACKEND ##############################################
@ -86,19 +55,14 @@ services:
- external-net
- internal-net
environment:
- NODE_ENV="development"
- NODE_ENV=development
volumes:
# This makes sure the docker container has its own node modules.
# Therefore it is possible to have a different node version on the host machine
- backend_node_modules:/app/node_modules
- backend_database_node_modules:/database/node_modules
- backend_database_build:/database/build
- backend_config_node_modules:/config/node_modules
- backend_config_build:/config/build
- node_modules_backend:/app/node_modules
- turbo_cache:/tmp/turbo
# bind the local folder to the docker to allow live reload
- ./backend:/app
- ./database:/database
- ./config:/config
- .:/app
########################################################
# DHT-NODE #############################################
@ -112,19 +76,14 @@ services:
- external-net
- internal-net
environment:
- NODE_ENV="development"
- NODE_ENV=development
volumes:
# This makes sure the docker container has its own node modules.
# Therefore it is possible to have a different node version on the host machine
- dht_node_modules:/app/node_modules
- dht_database_node_modules:/database/node_modules
- dht_database_build:/database/build
- dht_config_node_modules:/config/node_modules
- dht_config_build:/config/build
- node_modules_dht:/app/node_modules
- turbo_cache:/tmp/turbo
# bind the local folder to the docker to allow live reload
- ./dht-node:/app
- ./database:/database
- ./config:/config
- .:/app
########################################################
# DLT-CONNECTOR ########################################
@ -159,19 +118,14 @@ services:
- external-net
- internal-net
environment:
- NODE_ENV="development"
- NODE_ENV=development
volumes:
# This makes sure the docker container has its own node modules.
# Therefore it is possible to have a different node version on the host machine
- federation_node_modules:/app/node_modules
- federation_database_node_modules:/database/node_modules
- federation_database_build:/database/build
- federation_config_node_modules:/config/node_modules
- federation_config_build:/config/build
- node_modules_federation:/app/node_modules
- turbo_cache:/tmp/turbo
# bind the local folder to the docker to allow live reload
- ./federation:/app
- ./database:/database
- ./config:/config
- .:/app
########################################################
# DATABASE ##############################################
@ -185,12 +139,15 @@ services:
image: gradido/database:local-up
build:
target: up
profiles:
- up
environment:
- NODE_ENV="development"
- NODE_ENV=development
volumes:
# This makes sure the docker container has its own node modules.
# Therefore it is possible to have a different node version on the host machine
- database_node_modules:/app/node_modules
- node_modules_database:/app/node_modules
- turbo_cache:/tmp/turbo
# bind the local folder to the docker to allow live reload
- ./database:/app
@ -227,7 +184,13 @@ services:
#########################################################
## NGINX ################################################
#########################################################
# nginx:
nginx:
build:
context: ./nginx/
depends_on:
- backend
- frontend
- admin
#########################################################
## PHPMYADMIN ###########################################
@ -261,25 +224,15 @@ services:
- external-net
volumes:
admin_node_modules:
frontend_node_modules:
gradido_node_modules:
backend_node_modules:
backend_database_node_modules:
backend_database_build:
backend_config_node_modules:
backend_config_build:
dht_node_modules:
dht_database_node_modules:
dht_database_build:
dht_config_node_modules:
dht_config_build:
node_modules:
node_modules_dht:
node_modules_admin:
node_modules_frontend:
node_modules_backend:
node_modules_federation:
node_modules_database:
dlt_connector_modules:
federation_node_modules:
federation_database_node_modules:
federation_database_build:
federation_config_node_modules:
federation_config_build:
database_node_modules:
dlt-database_node_modules:
dlt-database_build:
dlt-database_build:
turbo_cache:
turbo_cache_dht:

View File

@ -5,47 +5,13 @@
services:
########################################################
# Gradido ##############################################
########################################################
gradido:
image: gradido/gradido:local-production
build:
context: ./
dockerfile: ./Dockerfile
target: production
args:
BUILD_COMMIT: ${BUILD_COMMIT}
BUILD_COMMIT_SHORT: ${BUILD_COMMIT_SHORT}
BUILD_VERSION: ${BUILD_VERSION}
depends_on:
- mariadb
networks:
- internal-net
- external-net
ports:
- ${BACKEND_PORT:-4000}:${BACKEND_PORT:-4000}
- ${FEDERATION_PORT:-5010}:${FEDERATION_PORT:-5010}
- ${FRONTEND_MODULE_PORT:-3000}:${FRONTEND_MODULE_PORT:-3000}
- ${ADMIN_MODULE_PORT:-8080}:${ADMIN_MODULE_PORT:-8080}
environment:
# Envs used in Dockerfile
# - DOCKER_WORKDIR="/app"
# - PORT=4000
- BUILD_DATE
- BUILD_VERSION
- BUILD_COMMIT
- NODE_ENV=production
- DB_HOST=mariadb
volumes:
- ./logs:/logs
########################################################
# FRONTEND #############################################
# contain also admin in production #####################
########################################################
frontend:
# name the image so that it cannot be found in a DockerHub repository, otherwise it will not be built locally from the 'dockerfile' but pulled from there
image: gradido/frontend:local-production
image: gradido/frontend:local-production
build:
context: ./
dockerfile: ./frontend/Dockerfile
@ -54,13 +20,10 @@ services:
BUILD_COMMIT: ${BUILD_COMMIT}
BUILD_COMMIT_SHORT: ${BUILD_COMMIT_SHORT}
BUILD_VERSION: ${BUILD_VERSION}
profiles:
- single
networks:
- external-net
- internal-net
ports:
- 3000:3000
- ${FRONTEND_MODULE_PORT:-3000}:3000
environment:
# Envs used in Dockerfile
# - DOCKER_WORKDIR="/app"
@ -72,6 +35,8 @@ services:
# env_file:
# - ./.env
# - ./frontend/.env
volumes:
- ./logs/nginx/frontend:/var/log/nginx
########################################################
# ADMIN INTERFACE ######################################
@ -83,28 +48,18 @@ services:
context: ./
dockerfile: ./admin/Dockerfile
target: production
args:
BUILD_COMMIT: ${BUILD_COMMIT}
BUILD_COMMIT_SHORT: ${BUILD_COMMIT_SHORT}
BUILD_VERSION: ${BUILD_VERSION}
profiles:
- single
networks:
- external-net
- internal-net
ports:
- 8080:80
- ${ADMIN_MODULE_PORT:-8080}:8080
environment:
# Envs used in Dockerfile
# - DOCKER_WORKDIR="/app"
# - PORT=8090
# - BUILD_DATE="1970-01-01T00:00:00.00Z"
# - BUILD_VERSION="0.0.0.0"
# - BUILD_COMMIT="0000000"
- NODE_ENV=production
# env_file:
# - ./.env
# - ./admin/.env
- BUILD_COMMIT=${BUILD_COMMIT}
- BUILD_COMMIT_SHORT=${BUILD_COMMIT_SHORT}
- BUILD_VERSION=${BUILD_VERSION}
# - DEBUG=true
volumes:
- ./logs/nginx/admin:/var/log/nginx
#########################################################
## MARIADB ##############################################
@ -133,12 +88,10 @@ services:
context: ./
dockerfile: ./backend/Dockerfile
target: production
profiles:
- single
networks:
- internal-net
ports:
- 4000:4000
- ${BACKEND_PORT:-4000}:4000
depends_on:
- mariadb
environment:
@ -148,7 +101,7 @@ services:
- BUILD_DATE
- BUILD_VERSION
- BUILD_COMMIT
- NODE_ENV="production"
- NODE_ENV=production
- DB_HOST=mariadb
# Application only envs
#env_file:
@ -169,8 +122,6 @@ services:
context: ./
dockerfile: ./dht-node/Dockerfile
target: production
profiles:
- single
networks:
- internal-net
- external-net
@ -185,7 +136,7 @@ services:
- BUILD_DATE
- BUILD_VERSION
- BUILD_COMMIT
- NODE_ENV="production"
- NODE_ENV=production
- DB_HOST=mariadb
# Application only envs
#env_file:
@ -239,23 +190,20 @@ services:
context: ./
dockerfile: ./federation/Dockerfile
target: production
profiles:
- single
networks:
- internal-net
- external-net
ports:
- 5010:5010
- ${FEDERATION_PORT:-5010}:5010
depends_on:
- mariadb
environment:
# Envs used in Dockerfile
# - DOCKER_WORKDIR="/app"
- PORT=5010
- BUILD_DATE
- BUILD_VERSION
- BUILD_COMMIT
- NODE_ENV="production"
- NODE_ENV=production
- DB_HOST=mariadb
# Application only envs
#env_file:
@ -274,8 +222,6 @@ services:
context: .
dockerfile: ./database/Dockerfile
target: up
profiles:
- database
depends_on:
- mariadb
networks:
@ -287,7 +233,7 @@ services:
- BUILD_DATE
- BUILD_VERSION
- BUILD_COMMIT
- NODE_ENV="production"
- NODE_ENV=production
- DB_HOST=mariadb
# Application only envs
#env_file:
@ -331,7 +277,8 @@ services:
- external-net
- internal-net
depends_on:
- gradido
- backend
- frontend
ports:
- 80:80
volumes:

View File

@ -29,9 +29,14 @@
"dependencies": {
"@badeball/cypress-cucumber-preprocessor": "^18.0.1",
"@cypress/browserify-preprocessor": "^3.0.2",
"cypress": "^12.16.0",
"jwt-decode": "^3.1.2",
"multiple-cucumber-html-reporter": "^3.4.0",
"typescript": "^4.7.4"
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^5.38.0",
"@typescript-eslint/parser": "^5.38.0",
"cypress": "^12.16.0",
"eslint": "^8.23.1",
"eslint-config-prettier": "^8.3.0",
"eslint-config-standard": "^16.0.3",
@ -41,9 +46,6 @@
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^4.2.1",
"eslint-plugin-promise": "^6.1.1",
"jwt-decode": "^3.1.2",
"multiple-cucumber-html-reporter": "^3.4.0",
"prettier": "^2.7.1",
"typescript": "^4.7.4"
"prettier": "^2.7.1"
}
}

View File

@ -1,7 +1,9 @@
##################################################################################
# BASE ###########################################################################
##################################################################################
FROM node:18.20.7-alpine3.21 as base
FROM node:18.20.7-bookworm-slim as base
#FROM node:18.20.7-alpine3.21 as base
# change to alpine after sodium-native ship with native alpine build
# ENVs (available in production aswell, can be overwritten by commandline or env file)
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
@ -13,10 +15,13 @@ ENV BUILD_VERSION="0.0.0.0"
## We cannot do `$(git rev-parse --short HEAD)` here so we default to 0000000
ENV BUILD_COMMIT="0000000"
## SET NODE_ENV
ENV NODE_ENV="production"
ENV NODE_ENV=production
## App relevant Envs
ENV PORT="5010"
# ENV PORT="${env.FEDERATION_PORT}"
## Timezone
ENV TZ=UTC
ENV DB_HOST=mariadb
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
@ -43,60 +48,75 @@ EXPOSE ${PORT}
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
##################################################################################
# Base with turbo ################################################################
##################################################################################
FROM base as turbo-base
RUN apk update && apk add --no-cache libc6-compat \
&& yarn global add turbo@^2 \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
##################################################################################
# BUILDER (create partly monorepo only with data needed by federation) ###########
# BUN ############################################################################
##################################################################################
FROM turbo-base as builder
FROM base as bun-base
RUN apt update && apt install -y --no-install-recommends ca-certificates curl bash unzip
#RUN apk update && apk add --no-cache curl tar bash
RUN curl -fsSL https://bun.sh/install | bash
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
##################################################################################
# Development ####################################################################
##################################################################################
FROM bun-base AS development
# Run command
CMD /bin/sh -c "bun install --filter federation --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2 \
&& turbo federation#dev --env-mode=loose"
##################################################################################
# Basic Image with bun setup and project and source code #########################
##################################################################################
FROM bun-base as bun-base-src
COPY --chown=app:app . .
RUN turbo prune federation --docker
##################################################################################
# INSTALLER (create production image) ############################################
# Build ##########################################################################
##################################################################################
FROM turbo-base AS installer
FROM bun-base-src as build
# First install the dependencies (as they change less often)
COPY --chown=app:app --from=builder /app/out/json/ .
RUN yarn install --frozen-lockfile --production=false \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# Build the project
COPY --chown=app:app --from=builder /app/out/full/ .
RUN turbo build
RUN bun install --filter federation --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2
RUN turbo federation#build federation#typecheck --env-mode=loose
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM installer as test
ENV DB_HOST=mariadb
FROM build as test
# Run command
CMD /bin/sh -c "turbo federation#test --env-mode=loose"
##################################################################################
# install only node modules needed for running bundle ############################
##################################################################################
FROM bun-base-src as production-node-modules
# add node_modules from production_node_modules
RUN bun install --filter federation --production --frozen-lockfile --no-cache \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& ./scripts/clean-prebuilds.sh
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
##################################################################################
FROM base as production
# Copy "binary"-files from build image
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/federation/build/src/index.js ./index.js
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/federation/build/index.js ./index.js
# add node_modules from production_node_modules
COPY --chown=app:app --from=production-node-modules ${DOCKER_WORKDIR}/node_modules ./node_modules
# Copy log4js-config.json to provide log configuration
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/federation/log4js-config.json ./log4js-config.json
COPY --chown=app:app --from=build ${DOCKER_WORKDIR}/federation/log4js-config.json ./log4js-config.json
# Run command
CMD ["node", "index.js"]

13
federation/README.md Normal file
View File

@ -0,0 +1,13 @@
# Federation
## Bun-Compatibility
### Known Issue: Bun's --minify breaks mysql2 compatibility
```
error: Received packet in the wrong sequence.
fatal: true,
code: "PROTOCOL_INCORRECT_PACKET_SEQUENCE"
```
This issue seems to be caused by bun aggressively optimizing or minifying binary operations in the mysql2 authentication layer (Buffer, crypto, xor, etc.), resulting in corrupted packet handling.

View File

@ -0,0 +1,15 @@
import { esbuildDecorators } from '@anatine/esbuild-decorators'
import { build } from 'esbuild'
build({
entryPoints: ['src/index.ts'],
outdir: 'build',
platform: 'node',
target: 'node18.20.7',
bundle: true,
keepNames: true,
// legalComments: 'inline',
external: ['sodium-native'],
plugins: [esbuildDecorators()],
minify: true,
})

View File

@ -6,23 +6,41 @@
"repository": "https://github.com/gradido/gradido/federation",
"author": "Gradido Academy - https://www.gradido.net",
"license": "Apache-2.0",
"private": false,
"scripts": {
"build": "tsc --build",
"clean": "tsc --build --clean",
"start": "cross-env TZ=UTC TS_NODE_BASEURL=./build node -r tsconfig-paths/register build/src/index.js",
"build": "ts-node ./esbuild.config.ts",
"start": "cross-env TZ=UTC NODE_ENV=production node build/index.js",
"start:bun": "cross-env TZ=UTC NODE_ENV=production bun build/index.js",
"dev": "cross-env TZ=UTC nodemon -w src --ext ts,json,css -r tsconfig-paths/register src/index.ts",
"dev:bun": "cross-env TZ=UTC bun --hot src/index.ts",
"typecheck": "tsc --noEmit",
"test": "cross-env TZ=UTC NODE_ENV=development DB_DATABASE=gradido_test_federation jest --runInBand --forceExit --detectOpenHandles",
"dev": "cross-env TZ=UTC nodemon -w src --ext ts --exec ts-node -r dotenv/config -r tsconfig-paths/register src/index.ts",
"lint": "biome check --error-on-warnings .",
"lint:fix": "biome check --error-on-warnings . --write"
},
"dependencies": {
"cross-env": "^7.0.3",
"sodium-native": "^3.4.1"
},
"devDependencies": {
"@anatine/esbuild-decorators": "^0.2.19",
"@biomejs/biome": "1.9.4",
"@swc/cli": "^0.7.3",
"@swc/core": "^1.11.24",
"@swc/helpers": "^0.5.17",
"@types/express": "4.17.21",
"@types/jest": "27.0.2",
"@types/lodash.clonedeep": "^4.5.6",
"@types/node": "^17.0.21",
"@types/sodium-native": "^2.3.5",
"@types/uuid": "^8.3.4",
"apollo-server-express": "^2.25.2",
"apollo-server-testing": "2.25.2",
"await-semaphore": "0.1.3",
"class-validator": "^0.13.2",
"config-schema": "*",
"cors": "2.8.5",
"cross-env": "^7.0.3",
"database": "*",
"decimal.js-light": "^2.5.1",
"dotenv": "10.0.0",
@ -31,38 +49,24 @@
"graphql": "15.10.1",
"graphql-request": "5.0.0",
"graphql-scalars": "^1.24.2",
"graphql-tag": "^2.12.6",
"helmet": "^7.1.0",
"jest": "27.2.4",
"joi": "^17.13.3",
"lodash.clonedeep": "^4.5.0",
"log4js": "^6.7.1",
"reflect-metadata": "^0.1.13",
"sodium-native": "^3.3.0",
"type-graphql": "^1.1.1",
"typeorm": "^0.3.16",
"uuid": "8.3.2"
},
"devDependencies": {
"@biomejs/biome": "1.9.4",
"@types/express": "4.17.21",
"@types/jest": "27.0.2",
"@types/lodash.clonedeep": "^4.5.6",
"@types/node": "^17.0.21",
"@types/sodium-native": "^2.3.5",
"@types/uuid": "^8.3.4",
"apollo-server-testing": "2.25.2",
"graphql-tag": "^2.12.6",
"jest": "27.2.4",
"nodemon": "^2.0.7",
"prettier": "^3.5.3",
"reflect-metadata": "^0.1.13",
"ts-jest": "27.0.5",
"ts-node": "^10.9.2",
"tsconfig-paths": "^4.1.1",
"typescript": "^4.9.5"
"type-graphql": "^1.1.1",
"typeorm": "^0.3.16",
"typescript": "^4.9.5",
"uuid": "8.3.2"
},
"nodemonConfig": {
"ignore": [
"**/*.test.ts"
]
"ignore": ["**/*.test.ts"]
},
"engines": {
"node": ">=18"

View File

@ -30,6 +30,12 @@ const server = {
PRODUCTION: process.env.NODE_ENV === 'production',
}
const database = {
DB_CONNECT_RETRY_COUNT: process.env.DB_CONNECT_RETRY_COUNT
? Number.parseInt(process.env.DB_CONNECT_RETRY_COUNT)
: 15,
DB_CONNECT_RETRY_DELAY_MS: process.env.DB_CONNECT_RETRY_DELAY_MS
? Number.parseInt(process.env.DB_CONNECT_RETRY_DELAY_MS)
: 500,
DB_HOST: process.env.DB_HOST ?? 'localhost',
DB_PORT: process.env.DB_PORT ? Number.parseInt(process.env.DB_PORT) : 3306,
DB_USER: process.env.DB_USER ?? 'root',

View File

@ -1,4 +1,6 @@
import {
DB_CONNECT_RETRY_COUNT,
DB_CONNECT_RETRY_DELAY_MS,
DB_DATABASE,
DB_HOST,
DB_PASSWORD,
@ -17,6 +19,8 @@ import Joi from 'joi'
export const schema = Joi.object({
DB_DATABASE,
DB_CONNECT_RETRY_COUNT,
DB_CONNECT_RETRY_DELAY_MS,
DB_HOST,
DB_PASSWORD,
DB_PORT,

View File

@ -0,0 +1,9 @@
import { NonEmptyArray } from 'type-graphql'
import { AuthenticationResolver } from './resolver/AuthenticationResolver'
import { PublicCommunityInfoResolver } from './resolver/PublicCommunityInfoResolver'
import { PublicKeyResolver } from './resolver/PublicKeyResolver'
import { SendCoinsResolver } from './resolver/SendCoinsResolver'
export const getApiResolvers = (): NonEmptyArray<Function> => {
return [AuthenticationResolver, PublicCommunityInfoResolver, PublicKeyResolver, SendCoinsResolver]
}

View File

@ -35,7 +35,7 @@ describe('PublicKeyResolver', () => {
beforeEach(async () => {
const homeCom = new DbFederatedCommunity()
homeCom.foreign = false
homeCom.apiVersion = '1_0'
homeCom.apiVersion = '1_1'
homeCom.endPoint = 'endpoint-url'
homeCom.publicKey = Buffer.from(
'9f6dcd0d985cc7105cd71c3417d9c291b126c8ca90513197de02191f928ef713',

View File

@ -1,6 +1,21 @@
import { Resolver } from 'type-graphql'
import { PublicKeyResolver as PublicKeyResolver_1_0 } from '../../1_0/resolver/PublicKeyResolver'
import { federationLogger as logger } from '@/server/logger'
import { FederatedCommunity as DbFederatedCommunity } from 'database'
import { Query, Resolver } from 'type-graphql'
import { GetPublicKeyResult } from '../../1_0/model/GetPublicKeyResult'
@Resolver()
export class PublicKeyResolver extends PublicKeyResolver_1_0 {}
export class PublicKeyResolver {
@Query(() => GetPublicKeyResult)
async getPublicKey(): Promise<GetPublicKeyResult> {
logger.debug(`getPublicKey() via apiVersion=1_0 ...`)
const homeCom = await DbFederatedCommunity.findOneOrFail({
where: {
foreign: false,
apiVersion: '1_1',
},
})
const publicKeyHex = homeCom.publicKey.toString('hex')
logger.debug(`getPublicKey()-1_1... return publicKey=${publicKeyHex}`)
return new GetPublicKeyResult(publicKeyHex)
}
}

View File

@ -0,0 +1,9 @@
import { NonEmptyArray } from 'type-graphql'
import { AuthenticationResolver } from '../1_0/resolver/AuthenticationResolver'
import { PublicCommunityInfoResolver } from '../1_0/resolver/PublicCommunityInfoResolver'
import { SendCoinsResolver } from '../1_0/resolver/SendCoinsResolver'
import { PublicKeyResolver } from './resolver/PublicKeyResolver'
export const getApiResolvers = (): NonEmptyArray<Function> => {
return [AuthenticationResolver, PublicCommunityInfoResolver, PublicKeyResolver, SendCoinsResolver]
}

View File

@ -1,9 +1,20 @@
import path from 'node:path'
import { federationLogger as logger } from '@/server/logger'
import { NonEmptyArray } from 'type-graphql'
// config
import { CONFIG } from '../../config'
import { getApiResolvers as getApiResolvers_1_0 } from './1_0/schema'
import { getApiResolvers as getApiResolvers_1_1 } from './1_1/schema'
export const getApiResolvers = (): string => {
export const getApiResolvers = (): NonEmptyArray<Function> => {
logger.info(`getApiResolvers...${CONFIG.FEDERATION_API}`)
return path.join(__dirname, `./${CONFIG.FEDERATION_API}/resolver/*Resolver.{ts,js}`)
if (CONFIG.FEDERATION_API === '1_0') {
return getApiResolvers_1_0()
}
if (CONFIG.FEDERATION_API === '1_1') {
return getApiResolvers_1_1()
}
throw new Error(`Unknown API version: ${CONFIG.FEDERATION_API}`)
}

View File

@ -8,7 +8,7 @@ import { DecimalScalar } from './scalar/Decimal'
export const schema = async (): Promise<GraphQLSchema> => {
return await buildSchema({
resolvers: [getApiResolvers()],
resolvers: getApiResolvers(),
// authChecker: isAuthorized,
scalarsMap: [{ type: Decimal, scalar: DecimalScalar }],
/*

View File

@ -3,18 +3,13 @@ import 'reflect-metadata'
import { ApolloServer } from 'apollo-server-express'
import express, { Express, RequestHandler } from 'express'
import { checkDBVersion } from '@/typeorm/DBVersion'
// database
import { connection } from '@/typeorm/connection'
import { checkDBVersionUntil } from '@/typeorm/DBVersion'
// server
import cors from './cors'
// import serverContext from './context'
import { plugins } from './plugins'
// config
import { CONFIG } from '@/config'
// graphql
import { schema } from '@/graphql/schema'
@ -22,6 +17,7 @@ import { schema } from '@/graphql/schema'
// import { elopageWebhook } from '@/webhook/elopage'
import { Connection } from 'typeorm'
import { CONFIG } from '@/config'
import { slowDown } from 'express-slow-down'
import helmet from 'helmet'
import { Logger } from 'log4js'
@ -44,18 +40,10 @@ export const createServer = async (
logger.debug('createServer...')
// open mysql connection
const con = await connection()
if (!con || !con.isConnected) {
logger.fatal(`Couldn't open connection to database!`)
throw new Error(`Fatal: Couldn't open connection to database`)
}
// check for correct database version
const dbVersion = await checkDBVersion(CONFIG.DB_VERSION)
if (!dbVersion) {
logger.fatal('Fatal: Database Version incorrect')
throw new Error('Fatal: Database Version incorrect')
}
const con = await checkDBVersionUntil(
CONFIG.DB_CONNECT_RETRY_COUNT,
CONFIG.DB_CONNECT_RETRY_DELAY_MS,
)
// Express Server
const app = express()

View File

@ -1,5 +1,31 @@
import { CONFIG } from '@/config'
import { federationLogger as logger } from '@/server/logger'
import { Migration } from 'database'
import { Connection as DbConnection } from 'typeorm'
import { connection as connectionFunc } from './connection'
async function checkDBVersionUntil(maxRetries: number, delayMs: number): Promise<DbConnection> {
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
const connection = await connectionFunc()
if (connection?.isInitialized) {
const dbVersion = await checkDBVersion(CONFIG.DB_VERSION)
if (dbVersion) {
logger.info('Database connection and version check succeeded.')
return connection
}
}
} catch (err) {
logger.warn(`Attempt ${attempt}: Waiting for DB...`, err)
}
await new Promise((resolve) => setTimeout(resolve, delayMs))
}
logger.fatal(
`Fatal: Could not connect to database or version check failed after ${maxRetries} attempts.`,
)
throw new Error('Fatal: Database not ready.')
}
const getDBVersion = async (): Promise<string | null> => {
try {
@ -24,4 +50,4 @@ const checkDBVersion = async (DB_VERSION: string): Promise<boolean> => {
return true
}
export { checkDBVersion, getDBVersion }
export { checkDBVersion, getDBVersion, checkDBVersionUntil }

View File

@ -83,5 +83,8 @@
/* Advanced Options */
"skipLibCheck": true, /* Skip type checking of declaration files. */
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
},
"ts-node": {
"swc": true
}
}

View File

@ -9,16 +9,20 @@ ENV DOCKER_WORKDIR="/app"
## We Cannot do `$(date -u +'%Y-%m-%dT%H:%M:%SZ')` here so we use unix timestamp=0
ENV BUILD_DATE="1970-01-01T00:00:00.00Z"
## We cannot do $(npm run version).${BUILD_NUMBER} here so we default to 0.0.0.0
# TODO: get the actually git commit hash into docker
ARG BUILD_VERSION
ENV BUILD_VERSION=${BUILD_VERSION}
ENV BUILD_VERSION=${BUILD_VERSION:-'broken'}
ARG BUILD_COMMIT
ENV BUILD_COMMIT=${BUILD_COMMIT}
ENV BUILD_COMMIT=${BUILD_COMMIT:-'deadbeefdeadbeefdeadbeefdeadbeefdeadbeef'}
ARG BUILD_COMMIT_SHORT
ENV BUILD_COMMIT_SHORT=${BUILD_COMMIT_SHORT}
ENV BUILD_COMMIT_SHORT=${BUILD_COMMIT_SHORT:-'deadbeef'}
## SET NODE_ENV
ENV NODE_ENV="production"
ARG NODE_ENV=production
ENV NODE_ENV=${NODE_ENV}
## App relevant Envs
ENV PORT="3000"
## Timezone
ENV TZ=UTC
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
@ -35,7 +39,9 @@ LABEL maintainer="support@ogradido.net"
# Install Additional Software
## install: node-gyp dependencies
RUN apk --no-cache add g++ make python3
#RUN apk --no-cache add g++ make python3
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
# Settings
## Expose Container Port
@ -46,56 +52,57 @@ RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
##################################################################################
# Base with turbo ################################################################
# BUN ############################################################################
##################################################################################
FROM base as turbo-base
FROM base as bun-base
RUN apk update && apk add --no-cache libc6-compat \
&& yarn global add turbo@^2 \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
RUN apk update && apk add --no-cache curl tar bash
RUN curl -fsSL https://bun.sh/install | bash
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
##################################################################################
# BUILDER (create partly monorepo only with data needed by frontend) #############
# Development ####################################################################
##################################################################################
FROM turbo-base as builder
FROM bun-base AS development
COPY --chown=app:app . .
RUN turbo prune frontend --docker
# used for getting git commit hash direct from .git
RUN apk update && apk add --no-cache git
# Run command
CMD /bin/sh -c "bun install --filter frontend --no-cache --frozen-lockfile \
&& bun install --global --no-cache --no-save turbo@^2 \
&& turbo frontend#dev --env-mode=loose"
##################################################################################
# INSTALLER (create production image) ############################################
##################################################################################
FROM turbo-base AS installer
FROM bun-base AS build
# First install the dependencies (as they change less often)
COPY --chown=app:app --from=builder /app/out/json/ .
RUN yarn install --frozen-lockfile --production=false \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# Build the project
COPY --chown=app:app --from=builder /app/out/full/ .
RUN turbo build --env-mode=loose
COPY --chown=app:app . .
RUN bun install --filter frontend --no-cache --frozen-lockfile \
&& bun install --global turbo@^2
RUN turbo frontend#build --env-mode=loose
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM installer as test
FROM build as test
# Run command
CMD /bin/sh -c "turbo frontend#test --env-mode=loose"
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
##################################################################################
FROM lipanski/docker-static-website:latest as production
FROM nginx:1.28.0-alpine3.21-slim as production
# tiny static webserver
# https://lipanski.com/posts/smallest-docker-image-static-website
COPY ./nginx/frontend.conf /etc/nginx/conf.d/default.conf
WORKDIR /app
# copy builded frontend files
COPY --from=installer /app/frontend/build/ .
COPY --from=build /app/frontend/build/ .

View File

@ -4,7 +4,6 @@
"private": true,
"scripts": {
"dev": "concurrently \"yarn watch-scss\" \"vite\"",
"prebuild": "yarn compile-scss",
"build": "vite build",
"start": "vite preview",
"postbuild": "uname | grep -q Linux && find build -type f -regex '.*\\.\\(html\\|js\\|css\\|svg\\|json\\)' -exec gzip -9 -k {} + || echo 'Skip precompress on non-Linux'",

View File

@ -1,11 +1,11 @@
const {
import {
APP_VERSION,
BUILD_COMMIT,
BUILD_COMMIT_SHORT,
COMMUNITY_DESCRIPTION,
COMMUNITY_LOCATION,
COMMUNITY_NAME,
COMMUNITY_SUPPORT_MAIL,
COMMUNITY_LOCATION,
COMMUNITY_URL,
DEBUG,
DECAY_START_TIME,
@ -14,8 +14,8 @@ const {
HUMHUB_ACTIVE,
NODE_ENV,
PRODUCTION,
} = require('config-schema')
const Joi = require('joi')
} from 'config-schema'
import Joi from 'joi'
// console.log(commonSchema)

View File

@ -11,11 +11,10 @@
"cache": false,
"persistent": true
},
"dev": {
"dependsOn": ["compile-scss"]
},
"build": {
"dependsOn": ["compile-scss"]
"dependsOn": ["compile-scss", "^build"],
"outputs": ["build/**"],
"cache": true
}
}
}

Some files were not shown because too many files have changed in this diff Show More