Merge pull request #3478 from gradido/yarn_workspaces_turbo

refactor(other): update to yarn workspaces and turbo
This commit is contained in:
einhornimmond 2025-05-15 18:48:53 +02:00 committed by GitHub
commit c12d635efa
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
291 changed files with 17968 additions and 1566 deletions

View File

@ -33,6 +33,12 @@ admin: &admin
backend: &backend
- 'backend/**/*'
config: &config
- 'config-schema/**/*'
database: &database
- 'database/**/*'
dht_node: &dht_node
- 'dht-node/**/*'

View File

@ -10,6 +10,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
admin: ${{ steps.changes.outputs.admin }}
config: ${{ steps.changes.outputs.config }}
steps:
- uses: actions/checkout@v3.3.0
@ -23,7 +24,7 @@ jobs:
build_test:
if: needs.files-changed.outputs.admin == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.admin == 'true'
name: Docker Build Test - Admin Interface
needs: files-changed
runs-on: ubuntu-latest
@ -33,52 +34,68 @@ jobs:
uses: actions/checkout@v3
- name: Admin Interface | Build 'test' image
run: docker build -f ./admin/Dockerfile --target test -t "gradido/admin:test" --build-arg NODE_ENV="test" .
run: docker build -f ./admin/Dockerfile --target production -t "gradido/admin:production" --build-arg NODE_ENV="production" --build-arg BUILD_COMMIT=$(git rev-parse HEAD) --build-arg BUILD_COMMIT_SHORT=$(git rev-parse --short HEAD) .
unit_test:
if: needs.files-changed.outputs.admin == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.admin == 'true'
name: Unit Tests - Admin Interface
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune admin with turbos help
run: turbo prune admin
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Admin Interface | Unit tests
run: cd admin && yarn global add node-gyp && yarn && yarn run test
id: test
run: |
cd out && turbo admin#test admin#lint
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint:
if: needs.files-changed.outputs.admin == 'true'
name: Lint - Admin Interface
needs: files-changed
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Admin Interface | Lint
run: cd admin && yarn global add node-gyp && yarn && yarn run lint
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
stylelint:
if: needs.files-changed.outputs.admin == 'true'
name: Stylelint - Admin Interface
needs: files-changed
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Admin Interface | Stylelint
run: cd admin && yarn global add node-gyp && yarn && yarn run stylelint
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
locales:
if: needs.files-changed.outputs.admin == 'true'
name: Locales - Admin Interface
needs: files-changed
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Admin Interface | Locales
run: cd admin && yarn global add node-gyp && yarn && yarn run locales
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi

View File

@ -8,6 +8,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
backend: ${{ steps.changes.outputs.backend }}
config: ${{ steps.changes.outputs.config }}
database: ${{ steps.changes.outputs.database }}
docker-compose: ${{ steps.changes.outputs.docker-compose }}
mariadb: ${{ steps.changes.outputs.mariadb }}
@ -23,7 +24,7 @@ jobs:
list-files: shell
build_test:
if: needs.files-changed.outputs.backend == 'true'
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.database == 'true'
name: Docker Build Test - Backend
needs: files-changed
runs-on: ubuntu-latest
@ -35,49 +36,56 @@ jobs:
run: docker build -f ./backend/Dockerfile --target test -t "gradido/backend:test" .
unit_test:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true'
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true' || needs.files-changed.outputs.config == 'true'
name: Unit tests - Backend
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Backend | install and build
run: cd database && yarn && yarn build && cd ../config && yarn && cd ../backend && yarn && yarn build
- name: Install turbo
run: yarn global add turbo@^2
- name: wait for database to be ready
- name: Prune backend with turbos help
run: turbo prune backend
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Wait for MariaDB to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Backend | prepare database
run: cd database && yarn up:backend_test
- name: Backend | Unit tests
run: cd backend && yarn test
id: test
run: |
cd out && turbo backend#lint backend#test
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint:
if: needs.files-changed.outputs.backend == 'true'
name: Lint - Backend
needs: files-changed
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Backend | Lint
run: cd database && yarn && cd ../backend && yarn && yarn run lint
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
locales:
if: needs.files-changed.outputs.backend == 'true'
name: Locales - Backend
needs: files-changed
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Backend | Locales
run: cd backend && yarn && yarn locales
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi

43
.github/workflows/test_config.yml vendored Normal file
View File

@ -0,0 +1,43 @@
name: Gradido Config Schema Test CI
on: push
jobs:
files-changed:
name: Detect File Changes - Config-Schema
runs-on: ubuntu-latest
outputs:
config: ${{ steps.changes.outputs.config }}
docker-compose: ${{ steps.changes.outputs.docker-compose }}
steps:
- uses: actions/checkout@v3.3.0
- name: Check for config-schema file changes
uses: dorny/paths-filter@v2.11.1
id: changes
with:
token: ${{ github.token }}
filters: .github/file-filters.yml
list-files: shell
build:
name: typecheck and lint - Config-Schema
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.docker-compose == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune config with turbos help
run: turbo prune config-schema
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: typecheck and lint
run: cd out && turbo typecheck lint

View File

@ -30,8 +30,8 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Database | Build 'test_up' image
run: docker build --target test_up -t "gradido/database:test_up" database/
- name: Database | Build 'up' image
run: docker build --target up -t "gradido/database:up" -f database/Dockerfile .
database_migration_test:
if: needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true'
@ -42,15 +42,32 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: docker-compose mariadb
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Database | docker-compose
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach mariadb
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune database with turbos help
run: turbo prune database
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Wait for MariaDB to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Database | up
run: docker compose -f docker-compose.yml up --no-deps database
run: cd out && turbo up
- name: Database | reset
run: docker compose -f docker-compose.yml -f docker-compose.reset.yml up --no-deps database
run: cd out && turbo reset
lint:
if: needs.files-changed.outputs.database == 'true'
name: Lint - Database Up
@ -60,5 +77,14 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune database with turbos help
run: turbo prune database
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Database | Lint
run: cd database && yarn && yarn run lint
run: cd out && turbo lint

View File

@ -7,13 +7,14 @@ jobs:
name: Detect File Changes - DHT Node
runs-on: ubuntu-latest
outputs:
config: ${{ steps.changes.outputs.config }}
database: ${{ steps.changes.outputs.database }}
dht_node: ${{ steps.changes.outputs.dht_node }}
docker-compose: ${{ steps.changes.outputs.docker-compose }}
steps:
- uses: actions/checkout@v3.3.0
- name: Check for frontend file changes
- name: Check for dht-node, config-schema, database, docker-compose file changes
uses: dorny/paths-filter@v2.11.1
id: changes
with:
@ -23,49 +24,58 @@ jobs:
build:
name: Docker Build Test - DHT Node
if: needs.files-changed.outputs.dht_node == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.dht_node == 'true' || needs.files-changed.outputs.docker-compose == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Build 'test' image
- name: Build 'test' image (typecheck & build)
run: docker build --target test -t "gradido/dht-node:test" -f dht-node/Dockerfile .
lint:
name: Lint - DHT Node
if: needs.files-changed.outputs.dht_node == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: DHT-Node | Lint
run: cd database && yarn && cd ../dht-node && yarn && yarn run lint
unit_test:
name: Unit Tests - DHT Node
if: needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.dht_node == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.mariadb == 'true'
needs: [files-changed, build]
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.dht_node == 'true' || needs.files-changed.outputs.docker-compose == 'true'
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: DHT-Node | install and build
run: cd database && yarn && yarn build && cd ../config && yarn && cd ../dht-node && yarn && yarn build
- name: Install turbo
run: yarn global add turbo@^2
- name: wait for database to be ready
- name: Prune dht-node with turbos help
run: turbo prune dht-node
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Wait for MariaDB to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: DHT-Node | prepare database
run: cd database && yarn up:dht_test
- name: DHT-Node | Unit tests
run: cd dht-node && yarn test
- name: run unit test & lint & build
id: test
run: |
cd out && turbo dht-node#lint dht-node#test
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint:
name: Lint - DHT Node
if: needs.files-changed.outputs.dht_node == 'true'
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi

View File

@ -8,40 +8,64 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Boot up test system | docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach mariadb
- name: install bun
uses: oven-sh/setup-bun@v2
- name: Boot up test system | docker-compose mariadb mailserver
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach mariadb mailserver
- name: Prepare test system
run: |
sudo chown runner:docker -R *
cd database && yarn && yarn build
cd ../config && yarn
cd ../backend && yarn
bun install
sudo cp ./nginx/e2e-test.conf /etc/nginx/sites-available/default
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Boot up test system | seed backend
run: bun turbo seed
- name: Moving logs after seeding
run: |
mkdir -p /home/runner/work/gradido/gradido/logs/backend/seed
mv /home/runner/work/gradido/gradido/logs/backend/*.log /home/runner/work/gradido/gradido/logs/backend/seed/
- name: Boot up test system | docker-compose backend, frontend
run: |
cd backend
cp .env.test_e2e .env
cd ..
bun turbo backend#build frontend#build
bun turbo backend#start frontend#start &
- name: End-to-end tests | prepare
run: |
wget --no-verbose -O /opt/cucumber-json-formatter "https://github.com/cucumber/json-formatter/releases/download/v19.0.0/cucumber-json-formatter-linux-386"
chmod +x /opt/cucumber-json-formatter
sudo ln -fs /opt/cucumber-json-formatter /usr/bin/cucumber-json-formatter
cd e2e-tests/
yarn
bun install
- name: wait for database to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Boot up test system | seed backend
- name: wait for frontend and backend to be ready
run: |
cd database && yarn dev_reset
cd ../backend && yarn seed
- name: Boot up test system | docker-compose backend, frontend, admin, nginx, mailserver
until nc -z 127.0.0.1 3000; do echo waiting for frontend; sleep 1; done;
until nc -z 127.0.0.1 4000; do echo waiting for backend; sleep 1; done;
- name: Start local nginx webserver
run: |
cd backend
cp .env.test_e2e .env
cd ..
docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps backend frontend admin nginx mailserver
sudo nginx -t
sudo systemctl start nginx
- name: wait for nginx to be ready
run: until nc -z 127.0.0.1 80; do echo waiting for nginx; sleep 1; done;
- name: End-to-end tests | run tests
id: e2e-tests
@ -67,3 +91,18 @@ jobs:
with:
name: cypress-report-pr-#${{ steps.pr.outputs.number }}
path: /home/runner/work/gradido/gradido/e2e-tests/cypress/reports/cucumber_html_report
- name: End-to-end tests | if tests failed, upload video
id: e2e-video
if: ${{ failure() && steps.e2e-tests.conclusion == 'failure' }}
uses: actions/upload-artifact@v4
with:
name: cypress-videos-pr-#${{ steps.pr.outputs.number }}
path: /home/runner/work/gradido/gradido/e2e-tests/cypress/videos
- name: End-to-end tests | if tests failed, upload logs
if: ${{ failure() && steps.e2e-tests.conclusion == 'failure' }}
uses: actions/upload-artifact@v4
with:
name: backend-logs-pr-#${{ steps.pr.outputs.number }}
path: /home/runner/work/gradido/gradido/logs/backend

View File

@ -7,6 +7,8 @@ jobs:
name: Detect File Changes - Federation
runs-on: ubuntu-latest
outputs:
config: ${{ steps.changes.outputs.config }}
database: ${{ steps.changes.outputs.database }}
docker-compose: ${{ steps.changes.outputs.docker-compose }}
federation: ${{ steps.changes.outputs.federation }}
steps:
@ -30,57 +32,50 @@ jobs:
uses: actions/checkout@v3
- name: Build `test` image
run: |
docker build --target test -t "gradido/federation:test" -f federation/Dockerfile .
docker save "gradido/federation:test" > /tmp/federation.tar
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: docker-federation-test
path: /tmp/federation.tar
lint:
name: Lint - Federation
if: needs.files-changed.outputs.federation == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Lint
run: cd federation && yarn && yarn run lint
run: docker build --target test -t "gradido/federation:test" -f federation/Dockerfile .
unit_test:
name: Unit Tests - Federation
if: needs.files-changed.outputs.database == 'true' || needs.files-changed.outputs.docker-compose == 'true' || needs.files-changed.outputs.federation == 'true' || needs.files-changed.outputs.mariadb == 'true'
needs: [files-changed, build]
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
- name: Download Docker Image
uses: actions/download-artifact@v4
with:
name: docker-federation-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/federation.tar
- name: docker-compose mariadb
run: docker compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Install turbo
run: yarn global add turbo@^2
- name: Federation | install and build
run: cd database && yarn && yarn build && cd ../config && yarn && cd ../federation && yarn && yarn build
- name: Prune federation with turbos help
run: turbo prune federation
- name: wait for database to be ready
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Wait for MariaDB to be ready
run: docker run --rm --network gradido_internal-net busybox sh -c 'until nc -z mariadb 3306; do echo waiting for db; sleep 1; done;'
- name: Federation | prepare database
run: cd database && yarn up:federation_test
- name: Federation | Unit tests
run: docker run --env NODE_ENV=test --env DB_HOST=mariadb --network gradido_internal-net --rm gradido/federation:test yarn run test
id: test
run: |
cd out && turbo federation#lint federation#test
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint:
name: Lint - Federation
if: needs.files-changed.outputs.federation == 'true'
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi

View File

@ -9,6 +9,7 @@ jobs:
name: Detect File Changes - Frontend
runs-on: ubuntu-latest
outputs:
config: ${{ steps.changes.outputs.config }}
frontend: ${{ steps.changes.outputs.frontend }}
steps:
- uses: actions/checkout@v3.3.0
@ -23,7 +24,7 @@ jobs:
build_test:
if: needs.files-changed.outputs.frontend == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.frontend == 'true'
name: Docker Build Test - Frontend
needs: files-changed
runs-on: ubuntu-latest
@ -33,52 +34,62 @@ jobs:
uses: actions/checkout@v3
- name: Frontend | Build 'test' image
run: docker build -f ./frontend/Dockerfile --target test -t "gradido/frontend:test" --build-arg NODE_ENV="test" .
run: docker build -f ./frontend/Dockerfile --target production -t "gradido/frontend:production" --build-arg NODE_ENV="production" --build-arg BUILD_COMMIT=$(git rev-parse HEAD) --build-arg BUILD_COMMIT_SHORT=$(git rev-parse --short HEAD) .
unit_test:
if: needs.files-changed.outputs.frontend == 'true'
if: needs.files-changed.outputs.config == 'true' || needs.files-changed.outputs.frontend == 'true'
name: Unit Tests - Frontend
needs: files-changed
runs-on: ubuntu-latest
outputs:
test-success: ${{ steps.test.outputs.success }}
steps:
- name: Set Node.js version
uses: actions/setup-node@v4
with:
node-version: '18.20.7'
- name: Checkout code
uses: actions/checkout@v3
- name: Install turbo
run: yarn global add turbo@^2
- name: Prune frontend with turbos help
run: turbo prune frontend
- name: install dependencies
run: cd out && yarn install --frozen-lockfile --production=false
- name: Frontend | Unit tests
run: cd frontend && yarn global add node-gyp && yarn && yarn run test
id: test
run: |
cd out && turbo frontend#test frontend#lint
echo "success=$([ $? -eq 0 ] && echo true || echo false)" >> $GITHUB_OUTPUT
lint:
if: needs.files-changed.outputs.frontend == 'true'
name: Lint - Frontend
needs: files-changed
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Frontend | Lint
run: cd frontend && yarn global add node-gyp && yarn && yarn run lint
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
stylelint:
if: needs.files-changed.outputs.frontend == 'true'
name: Stylelint - Frontend
needs: files-changed
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Frontend | Stylelint
run: cd frontend && yarn global add node-gyp && yarn && yarn run stylelint
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi
locales:
if: needs.files-changed.outputs.frontend == 'true'
name: Locales - Frontend
needs: files-changed
needs: [files-changed, unit_test]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Frontend | Locales
run: cd frontend && yarn global add node-gyp && yarn && yarn run locales
- name: Check result from previous step
run: if [ "${{ needs.unit_test.outputs.test-success }}" != "true" ]; then exit 1; fi

2
.gitignore vendored
View File

@ -2,10 +2,12 @@
.project
*.log
*.bak
.turbo
/node_modules/*
messages.pot
nbproject
.metadata
/out/*
/.env
package-lock.json
/deployment/bare_metal/.env

161
Dockerfile Normal file
View File

@ -0,0 +1,161 @@
##################################################################################
# BASE ###########################################################################
##################################################################################
FROM node:18.20.7-bookworm as base
# ENVs (available in production aswell, can be overwritten by commandline or env file)
ENV TURBO_CACHE_DIR=/tmp/turbo
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
ENV DOCKER_WORKDIR="/app"
## We Cannot do `$(date -u +'%Y-%m-%dT%H:%M:%SZ')` here so we use unix timestamp=0
ENV BUILD_DATE="1970-01-01T00:00:00.00Z"
## We cannot do $(npm run version).${BUILD_NUMBER} here so we default to 0.0.0.0
ENV BUILD_VERSION="0.0.0.0"
## We cannot do `$(git rev-parse --short HEAD)` here so we default to 0000000
ARG BUILD_COMMIT
ENV BUILD_COMMIT=${BUILD_COMMIT}
## SET NODE_ENV
ENV NODE_ENV="production"
## App relevant Envs
ENV BACKEND_PORT="4000"
ENV FEDERATION_PORT="5010"
ENV FRONTEND_MODULE_PORT="3000"
ENV ADMIN_MODULE_PORT="8080"
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
LABEL org.label-schema.name="gradido:backend"
LABEL org.label-schema.description="Gradido GraphQL Backend"
LABEL org.label-schema.usage="https://github.com/gradido/gradido/blob/master/README.md"
LABEL org.label-schema.url="https://gradido.net"
LABEL org.label-schema.vcs-url="https://github.com/gradido/gradido/tree/master/backend"
LABEL org.label-schema.vcs-ref="${BUILD_COMMIT}"
LABEL org.label-schema.vendor="Gradido Community"
LABEL org.label-schema.version="${BUILD_VERSION}"
LABEL org.label-schema.schema-version="1.0"
LABEL maintainer="support@gradido.net"
# Install Additional Software
## install: git
#apk add --no-cache libc6-compat
#RUN apk --no-cache add git
# Install bun
# RUN apt-get update && apt-get install -y curl unzip
RUN curl -fsSL https://bun.sh/install | BUN_INSTALL=/usr/local bash
# Add bun to PATH
# Install turbo globally
RUN bun install --global turbo
# Add bun's global bin directory to PATH
ENV PATH="/root/.bun/bin:${PATH}"
#RUN yarn global add turbo
# Settings
## Expose Container Port
EXPOSE ${BACKEND_PORT}
EXPOSE ${FEDERATION_PORT}
EXPOSE ${FRONTEND_MODULE_PORT}
EXPOSE ${ADMIN_MODULE_PORT}
## Workdir
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
##################################################################################
# DEVELOPMENT (Connected to the local environment, to reload on demand) ##########
##################################################################################
FROM base as development
# We don't need to copy or build anything since we gonna bind to the
# local filesystem which will need a rebuild anyway
# Run command
# (for development we need to execute yarn install since the
# node_modules are on another volume and need updating)
CMD /bin/sh -c "bun install && turbo dev --env-mode=loose"
##################################################################################
# INSTALL (Does contain all node_modules) ########################################
##################################################################################
FROM base as install
# Copy everything
COPY --chown=app:app ./ ./
# yarn install
RUN bun install --frozen-lockfile --non-interactive
# try with bun, use yarn if problems occur
# go into admin folder and use yarn to install local dependencies which need to use nohoist for @vee-validate/i18n which isn't supported by bun
#RUN bun install --frozen-lockfile
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM install as test
# Run command
CMD /bin/sh -c "turbo test --env-mode=loose"
##################################################################################
# RESET DB #######################################################################
##################################################################################
FROM install as reset
# Run command
CMD /bin/sh -c "cd database && bun run reset"
##################################################################################
# BUILD (Does contain all files and is therefore bloated) ########################
##################################################################################
FROM install as build
# turbo build
RUN turbo build --env-mode=loose
##################################################################################
# PRODUCTION #####################################################################
##################################################################################
FROM build as production
# Run command
CMD /bin/sh -c "turbo start --env-mode=loose"
##################################################################################
# FINAL PRODUCTION IMAGE #########################################################
##################################################################################
FROM node:18.20.7-bookworm-slim as production2
ENV TURBO_CACHE_DIR=/tmp/turbo
ENV DOCKER_WORKDIR="/app"
ENV NODE_ENV="production"
ENV DB_HOST=mariadb
WORKDIR ${DOCKER_WORKDIR}
# Copy only the build artifacts from the previous build stage
COPY --chown=app:app --from=build /app/node_modules ./node_modules
COPY --chown=app:app --from=build /app/package.json ./package.json
COPY --chown=app:app --from=build /app/yarn.lock ./yarn.lock
COPY --chown=app:app --from=build /app/turbo.json ./turbo.json
# and Turbo cache to prevent rebuilding
COPY --chown=app:app --from=build /tmp/turbo ./tmp/turbo
RUN yarn global add turbo
COPY --chown=app:app --from=build /app/backend ./backend
COPY --chown=app:app --from=build /app/frontend ./frontend
COPY --chown=app:app --from=build /app/admin ./admin
COPY --chown=app:app --from=build /app/database ./database
COPY --chown=app:app --from=build /app/config ./config
COPY --chown=app:app --from=build /app/federation ./federation
COPY --chown=app:app --from=build /app/dht-node ./dht-node
# Ports exposen
EXPOSE ${BACKEND_PORT}
EXPOSE ${FEDERATION_PORT}
EXPOSE ${FRONTEND_MODULE_PORT}
EXPOSE ${ADMIN_MODULE_PORT}
# Command to start
CMD ["turbo", "start", "--env-mode=loose"]

159
README.md
View File

@ -10,37 +10,109 @@ The dominant financial system threatens to fail around the globe, followed by ma
Find out more about the Project on its [Website](https://gradido.net/). It is offering vast resources about the idea. The remaining document will discuss the gradido software only.
## Software requirements
## Getting Started
Currently we only support `docker` install instructions to run all services, since many different programming languages and frameworks are used.
We are still in active development, so some things might not work as expected. If you encounter any issues, please feel free to report them via the [Issue Tracker](https://github.com/gradido/gradido/issues). Your feedback is valuable as we continue to build a more sustainable financial system!
- [docker](https://www.docker.com/)
- [docker-compose]
- [yarn](https://phoenixnap.com/kb/yarn-windows)
### For Arch Linux
Install the required packages:
### Get Gradido to your local machine
Clone the Gradido repository to your local machine.
```bash
sudo pacman -S docker
sudo pacman -S docker-compose
git clone https://github.com/gradido/gradido.git
cd gradido
```
Add group `docker` and then your user to it in order to allow you to run docker without sudo
For local development, you can run Gradido with **Docker** or **natively**, depending on your preferences and system setup. If you don't have a native MariaDB or MySQL installation, Docker can be used to handle the database as well.
### Docker Setup
You can also run Gradido using Docker.
- **Development Mode (Hot-Reload)**:
```bash
sudo groupadd docker # may already exist `groupadd: group 'docker' already exists`
sudo usermod -aG docker $USER
groups # verify you have the group (requires relog)
docker compose up
```
Start the docker service:
- **Production Build**:
```bash
sudo systemctrl start docker
docker compose -f docker-compose.yml up
```
This will launch the following services as containers:
| Service | Description |
| --- | --- |
| gradido | Backend & Frontend (All Modules) |
| mariadb | MariaDB Database Server |
| nginx | Webserver acting as a reverse proxy |
#### Nginx Routing Overview
```mermaid
graph TD;
A[localhost nginx] -->|/| B[frontend port 3000]
A -->|/admin| C[Admin UI port 8080]
A -->|/graphql| D[backend port 4000]
classDef default fill:#ffdf97,stroke:#333,stroke-width:2px;
class A,B,C,D default;
```
### Database Setup
Gradido requires a running **MariaDB** or **MySQL** database instance.
By default, the application expects the following credentials:
- Database name: gradido_community (will be automatically created on startup)
- User: root
- Password: (empty)
You can either run the database **natively** on your system, or use **Docker** to spin up the database along with an optional phpMyAdmin interface:
- Run database using Docker:
```bash
docker compose up mariadb
```
- To launch phpMyAdmin along with the database:
```bash
docker compose up mariadb phpmyadmin
```
Once started, phpMyAdmin will be available at:
http://localhost:8074
### Native Setup
Install all node modules with [Bun](https://bun.sh/) and [Turborepo](https://turborepo.com/docs/getting-started/installation) (globally, for convenience):
```bash
bun install
bun install --global turbo@^2
```
If this does not work, try to use [yarn](https://classic.yarnpkg.com/en/docs/install) instead
```bash
yarn install
yarn global add turbo@^2
```
- **Development Mode (Hot-Reload)**:
Launches Gradido with hot-reloading for fast iteration.
```bash
turbo dev
```
- **Production Build**:
Builds and runs Gradido optimized for production.
A deployment script for Hetzner Cloud is available [here](./deployment/hetzner_cloud/README.md).
```bash
turbo start
```
[More Infos for using turbo](./working-native.md)
### For Windows
#### docker
@ -52,61 +124,46 @@ The installation of dockers depends on your selected product package from the [d
* In case the docker desktop will not start correctly because of previous docker installations, then please clean the used directories of previous docker installation - `C:\Users` - before you retry starting docker desktop. For further problems executing docker desktop please take a look in this description "[logs and trouble shooting](https://docs.docker.com/desktop/windows/troubleshoot/)"
* In case your docker desktop installation causes high memory consumption per vmmem process, then please take a look at this description "[vmmen process consuming too much memory (Docker Desktop)](https://dev.to/tallesl/vmmen-process-consuming-too-much-memory-docker-desktop-273p)"
#### yarn
### yarn
For the Gradido build process the yarn package manager will be used. Please download and install [yarn for windows](https://phoenixnap.com/kb/yarn-windows) by following the instructions there.
## How to run?
As soon as the software requirements are fulfilled and a docker installation is up and running then open a powershell on Windows or an other commandline prompt on Linux.
### ⚡ Workspaces and Bun Compatibility
The project now uses **Workspaces**, and work is ongoing to make all modules **Bun-compatible**. You can currently use `bun install`, but not all modules are fully Bun-compatible yet.
Create and navigate to the directory, where you want to create the Gradido runtime environment.
```
mkdir \Gradido
cd \Gradido
```
### 1. Clone Sources
Clone the repo and pull all submodules
To install bun, run:
```bash
git clone git@github.com:gradido/gradido.git
git submodule update --recursive --init
curl -fsSL https://bun.sh/install | bash
```
### 2. Install modules
You can go in each under folder (admin, frontend, database, backend, ...) and call ``yarn`` in each folder or you can call ``yarn installAll``.
### 3. Run docker-compose
Run docker-compose to bring up the development environment
To install dependencies with Bun:
```bash
docker-compose up
bun install
```
### Additional Build options
If you want to build for production you can do this aswell:
```bash
docker-compose -f docker-compose.yml up
```
Note that some modules are still not fully compatible with Bun. Therefore, continue using **Yarn** for development if you run into any issues.
## Services defined in this package
- [frontend](./frontend) Wallet frontend
- [admin](./admin) Admin interface
- [backend](./backend) GraphQL & Business logic backend
- [mariadb](./mariadb) Database backend
- [dht-node](./dht-node) DHT Node Discover other Gradido Communities
- [dlt-connector](./dlt-connector) DLT Connector (WIP), connect to blockchain
- [federation](./federation) Federation, process incoming requests from another gradido communities
- [database](./database) Contains EntityORM entities and migration code for database
- [mariadb](./mariadb) Database server
- [config-schema](./config-schema) Contains common configuration schemas
- [e2e-tests](./e2e-tests) End-to-end tests
We are currently restructuring the service to reduce dependencies and unify business logic into one place. Furthermore the databases defined for each service will be unified into one.
### Open the wallet
Once you have `docker-compose` up and running, you can open [http://localhost/](http://localhost/) and create yourself a new wallet account.
Once you have gradido up and running you can open [http://localhost/](http://localhost/) and create a new wallet account.
## How to release

View File

@ -1,17 +1,8 @@
'use strict'
'use strict';
module.exports = {
extends: ['stylelint-config-standard-scss', 'stylelint-config-recommended-vue'],
overrides: [
{
files: '**/*.{scss}',
customSyntax: 'postcss-scss',
extends: ['stylelint-config-standard-scss'],
},
{
files: '**/*.vue',
customSyntax: 'postcss-html',
extends: ['stylelint-config-recommended-vue'],
},
],
}
"extends": [
"stylelint-config-standard-scss",
"stylelint-config-recommended-vue/scss"
]
};

View File

@ -1,7 +1,7 @@
##################################################################################
# BASE ###########################################################################
##################################################################################
FROM node:18.20-alpine3.20 as base
FROM node:18.20.7-alpine3.21 as base
# ENVs (available in production aswell, can be overwritten by commandline or env file)
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
@ -9,9 +9,12 @@ ENV DOCKER_WORKDIR="/app"
## We Cannot do `$(date -u +'%Y-%m-%dT%H:%M:%SZ')` here so we use unix timestamp=0
ENV BUILD_DATE="1970-01-01T00:00:00.00Z"
## We cannot do $(npm run version).${BUILD_NUMBER} here so we default to 0.0.0.0
ENV BUILD_VERSION="0.0.0.0"
## We cannot do `$(git rev-parse --short HEAD)` here so we default to 0000000
ENV BUILD_COMMIT_SHORT="0000000"
ARG BUILD_VERSION
ENV BUILD_VERSION=${BUILD_VERSION}
ARG BUILD_COMMIT
ENV BUILD_COMMIT=${BUILD_COMMIT}
ARG BUILD_COMMIT_SHORT
ENV BUILD_COMMIT_SHORT=${BUILD_COMMIT_SHORT}
## SET NODE_ENV
ARG NODE_ENV="production"
## App relevant Envs
@ -42,67 +45,55 @@ EXPOSE ${PORT}
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
RUN mkdir -p /config
##################################################################################
# DEVELOPMENT (Connected to the local environment, to reload on demand) ##########
# Base with turbo ################################################################
##################################################################################
FROM base as development
FROM base as turbo-base
# We don't need to copy or build anything since we gonna bind to the
# local filesystem which will need a rebuild anyway
# Run command
# (for development we need to execute yarn install since the
# node_modules are on another volume and need updating)
CMD /bin/sh -c "cd /config && yarn install && cd /app && yarn && yarn run dev"
RUN apk update && apk add --no-cache libc6-compat \
&& yarn global add turbo@^2 \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
##################################################################################
# BUILD (Does contain all files and is therefore bloated) ########################
# BUILDER (create partly monorepo only with data needed by admin) ################
##################################################################################
FROM base as build
FROM turbo-base as builder
# Copy everything
COPY ./admin/ .
# Copy everything from config
COPY ./config/ ../config/
COPY --chown=app:app . .
RUN turbo prune admin --docker
# yarn install and build config
RUN cd ../config && yarn install --production=false --frozen-lockfile --non-interactive && yarn build
# yarn install admin
RUN yarn install --production=false --frozen-lockfile --non-interactive
# yarn build
RUN yarn run build
##################################################################################
# INSTALLER (create production image) ############################################
##################################################################################
FROM turbo-base AS installer
# First install the dependencies (as they change less often)
COPY --chown=app:app --from=builder /app/out/json/ .
RUN yarn install --frozen-lockfile --production=false \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# Build the project
COPY --chown=app:app --from=builder /app/out/full/ .
RUN turbo build --env-mode=loose
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM build as test
# Install Additional Software
RUN apk add --no-cache bash jq
FROM installer as test
# Run command
CMD /bin/sh -c "yarn run dev"
CMD /bin/sh -c "turbo admin#test --env-mode=loose"
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
##################################################################################
FROM base as production
FROM lipanski/docker-static-website:latest as production
# Copy "binary"-files from build image
COPY --from=build ${DOCKER_WORKDIR}/build ./build
COPY --from=build ${DOCKER_WORKDIR}/../config/build ../config/build
# We also copy the node_modules express and serve-static for the run script
COPY --from=build ${DOCKER_WORKDIR}/node_modules ./node_modules
# Copy static files
COPY --from=build ${DOCKER_WORKDIR}/public ./public
# Copy package.json for script definitions (lock file should not be needed)
COPY --from=build ${DOCKER_WORKDIR}/package.json ./package.json
# Copy run scripts run/
COPY --from=build ${DOCKER_WORKDIR}/run ./run
# tiny static webserver
# https://lipanski.com/posts/smallest-docker-image-static-website
# Run command
CMD /bin/sh -c "yarn run start"
# copy builded frontend files
COPY --from=installer /app/admin/build/ ./admin/

View File

@ -2,15 +2,14 @@
"name": "admin",
"description": "Administration Interface for Gradido",
"main": "index.js",
"author": "Moriz Wahl",
"author": "Gradido Academy - https://www.gradido.net",
"version": "2.5.2",
"license": "Apache-2.0",
"scripts": {
"start": "node run/server.js",
"dev": "vite",
"build": "vite build",
"serve": "vite preview",
"postbuild": "find build -type f -regex '.*\\.\\(html\\|js\\|css\\|svg\\|json\\)' -exec gzip -9 -k {} +",
"start": "vite preview",
"postbuild": "uname | grep -q Linux && find build -type f -regex '.*\\.\\(html\\|js\\|css\\|svg\\|json\\)' -exec gzip -9 -k {} + || echo 'Skip precompress on non-Linux'",
"lint": "eslint --max-warnings=0 --ext .js,.vue,.json .",
"stylelint": "stylelint --max-warnings=0 '**/*.{scss,vue}'",
"test": "cross-env TZ=UTC vitest run",
@ -20,34 +19,25 @@
"locales": "scripts/sort.sh"
},
"dependencies": {
"@babel/core": "^7.15.8",
"@babel/eslint-parser": "^7.24.8",
"@babel/node": "^7.15.8",
"@babel/preset-env": "^7.15.8",
"@iconify/json": "^2.2.228",
"@vitejs/plugin-vue": "3.2.0",
"@popperjs/core": "^2.11.8",
"@vitejs/plugin-vue": "^5.2.3",
"@vue/apollo-composable": "^4.0.2",
"@vue/apollo-option": "^4.0.0",
"@vue/compat": "3.4.31",
"@vue/eslint-config-prettier": "^6.0.0",
"@vue/compat": "3.5.13",
"apollo-boost": "^0.4.9",
"babel-core": "7.0.0-bridge.0",
"babel-plugin-component": "^1.1.1",
"babel-preset-env": "^1.7.0",
"babel-preset-vue": "^2.0.2",
"bootstrap": "^5.3.3",
"bootstrap-vue-next": "0.26.8",
"date-fns": "^2.29.3",
"dotenv-webpack": "^7.0.3",
"express": "^4.17.1",
"graphql": "^16.9.0",
"graphql": "^15.10.1",
"graphql-tag": "^2.12.6",
"identity-obj-proxy": "^3.0.0",
"portal-vue": "3.0.0",
"qrcanvas-vue": "3.0.0",
"regenerator-runtime": "^0.13.9",
"sass": "^1.77.8",
"vite": "3.2.10",
"unplugin-icons": "^0.19.0",
"unplugin-vue-components": "^0.27.3",
"vite": "^5.4.14",
"vite-plugin-commonjs": "^0.10.1",
"vue": "3.5.13",
"vue-apollo": "3.1.2",
@ -63,49 +53,46 @@
"@intlify/eslint-plugin-vue-i18n": "^1.4.0",
"@vitest/coverage-v8": "^2.0.5",
"@vue/compiler-sfc": "^3.4.32",
"@vue/eslint-config-prettier": "^10.2.0",
"@vue/test-utils": "^2.4.6",
"babel-plugin-transform-require-context": "^0.1.1",
"config-schema": "*",
"cross-env": "^7.0.3",
"eslint": "8.57.0",
"eslint-config-prettier": "8.10.0",
"eslint-config-standard": "^16.0.3",
"eslint-loader": "^4.0.2",
"dotenv-webpack": "^7.0.3",
"eslint": "8.57.1",
"eslint-config-prettier": "^10.1.1",
"eslint-config-standard": "^17.0.0",
"eslint-plugin-import": "^2.25.2",
"eslint-plugin-n": "^16",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "5.2.1",
"eslint-plugin-promise": "^5.1.1",
"eslint-plugin-prettier": "^5.2.3",
"eslint-plugin-promise": "^6.1.1",
"eslint-plugin-vue": "8.7.1",
"gradido-config": "../config",
"joi": "^17.13.3",
"jsdom": "^25.0.0",
"mock-apollo-client": "^1.2.1",
"postcss": "^8.4.8",
"postcss-html": "^1.3.0",
"postcss-scss": "^4.0.3",
"prettier": "^3.3.3",
"stylelint": "16.7.0",
"stylelint-config-recommended-vue": "1.5.0",
"stylelint-config-standard-scss": "13.1.0",
"unplugin-icons": "^0.19.0",
"unplugin-vue-components": "^0.27.3",
"postcss-html": "^1.8.0",
"prettier": "^3.5.3",
"sass": "^1.77.8",
"stylelint": "^16.19.1",
"stylelint-config-recommended-vue": "^1.6.0",
"stylelint-config-standard-scss": "^14.0.0",
"vite-plugin-environment": "^1.1.3",
"vite-plugin-graphql-loader": "^4.0.4",
"vitest": "^2.0.5",
"vitest-canvas-mock": "^0.3.3"
"vitest-canvas-mock": "^0.3.3",
"webpack": "^5"
},
"browserslist": [
"> 1%",
"last 2 versions",
"not ie <= 10"
],
"nodemonConfig": {
"ignore": [
"**/*.spec.js"
]
},
"resolutions": {
"strip-ansi": "6.0.1",
"string-width": "4.2.2",
"wrap-ansi": "7.0.0"
},
"engines": {
"node": ">=18"
}
}

View File

@ -469,7 +469,7 @@ describe('CommunityVisualizeItem', () => {
mocks: {
$t: (key) => key,
$i18n: {
locale: locale,
locale,
},
},
stubs: {

View File

@ -30,8 +30,8 @@ if (process.env.ADMIN_HOSTING === 'nodejs') {
const environment = {
NODE_ENV: process.env.NODE_ENV,
DEBUG: process.env.NODE_ENV !== 'production' ?? false,
PRODUCTION: process.env.NODE_ENV === 'production' ?? false,
DEBUG: process.env.NODE_ENV !== 'production',
PRODUCTION: process.env.NODE_ENV === 'production',
}
// const COMMUNITY_HOST = process.env.COMMUNITY_HOST ?? undefined
@ -48,14 +48,14 @@ const endpoints = {
}
const debug = {
DEBUG_DISABLE_AUTH: process.env.DEBUG_DISABLE_AUTH === 'true' ?? false,
DEBUG_DISABLE_AUTH: process.env.DEBUG_DISABLE_AUTH === 'true',
}
const humhub = {
HUMHUB_ACTIVE: process.env.HUMHUB_ACTIVE === 'true' || false,
HUMHUB_ACTIVE: process.env.HUMHUB_ACTIVE === 'true',
HUMHUB_API_URL: process.env.HUMHUB_API_URL ?? COMMUNITY_URL + '/community/',
}
const OPENAI_ACTIVE = process.env.OPENAI_ACTIVE === 'true' ?? false
const OPENAI_ACTIVE = process.env.OPENAI_ACTIVE === 'true'
const CONFIG = {
...version,

View File

@ -10,7 +10,7 @@ const {
NODE_ENV,
OPENAI_ACTIVE,
PRODUCTION,
} = require('gradido-config/build/src/commonSchema.js')
} = require('config-schema')
const Joi = require('joi')
module.exports = Joi.object({

View File

@ -73,7 +73,7 @@ const route = useRoute()
const { result, refetch } = useQuery(searchUsers, {
query: criteria.value,
filters: filters,
filters,
currentPage: currentPage.value,
pageSize: perPage.value,
order: 'DESC',
@ -138,7 +138,7 @@ watch(
if (newValue !== oldValue) {
await refetch({
query: criteria.value,
filters: filters,
filters,
currentPage: newValue,
pageSize: perPage.value,
order: 'DESC',

10
admin/turbo.json Normal file
View File

@ -0,0 +1,10 @@
{
"extends": ["//"],
"tasks": {
"stylelint": {},
"locales": {},
"lint": {
"dependsOn": ["stylelint", "locales"]
}
}
}

View File

@ -7,7 +7,11 @@ import IconsResolve from 'unplugin-icons/resolver'
import { BootstrapVueNextResolver } from 'bootstrap-vue-next'
import EnvironmentPlugin from 'vite-plugin-environment'
import schema from './src/config/schema'
import { validate, browserUrls } from 'gradido-config/build/src/index.js'
import { execSync } from 'node:child_process'
import { existsSync, constants } from 'node:fs'
import { validate, browserUrls } from 'config-schema'
import path from 'node:path'
import dotenv from 'dotenv'
@ -15,8 +19,6 @@ dotenv.config() // load env vars from .env
const CONFIG = require('./src/config')
const path = require('path')
export default defineConfig(async ({ command }) => {
const { vitePluginGraphqlLoader } = await import('vite-plugin-graphql-loader')
if (command === 'serve') {
@ -24,6 +26,10 @@ export default defineConfig(async ({ command }) => {
} else {
CONFIG.ADMIN_HOSTING = 'nginx'
}
if (existsSync('../.git', constants.F_OK)) {
CONFIG.BUILD_COMMIT = execSync('git rev-parse HEAD').toString().trim()
CONFIG.BUILD_COMMIT_SHORT = (CONFIG.BUILD_COMMIT ?? '0000000').slice(0, 7)
}
validate(schema, CONFIG)
// make sure that all urls used in browser have the same protocol to prevent mixed content errors
validate(browserUrls, [
@ -70,7 +76,7 @@ export default defineConfig(async ({ command }) => {
compiler: 'vue3',
}),
EnvironmentPlugin({
BUILD_COMMIT: null,
BUILD_COMMIT: CONFIG.BUILD_COMMIT ?? undefined,
PORT: CONFIG.ADMIN_MODULE_PORT ?? null, // null,
COMMUNITY_HOST: CONFIG.ADMIN_MODULE_HOST ?? null, // null,
COMMUNITY_URL: CONFIG.COMMUNITY_URL ?? null,

View File

@ -11,3 +11,4 @@ EMAIL_TEST_MODUS=false
EMAIL_TLS=false
# for testing password reset
EMAIL_CODE_REQUEST_TIME=1
EMAIL_SMTP_HOST=127.0.0.1

View File

@ -1,4 +1,4 @@
/* eslint-disable @typescript-eslint/ban-types */
declare module 'random-bigint' {
function random(bits: number, cb?: (err: Error, num: BigInt) => void): BigInt
export = random

View File

@ -1,4 +1,4 @@
// eslint-disable-next-line import/no-unresolved
export * from '@/node_modules/@types/sodium-native'
declare module 'sodium-native' {

View File

@ -1,7 +1,7 @@
##################################################################################
# BASE ###########################################################################
##################################################################################
FROM node:18.20.7-bookworm-slim as base
FROM node:18.20.7-alpine3.21 as base
# ENVs (available in production aswell, can be overwritten by commandline or env file)
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
@ -42,56 +42,51 @@ EXPOSE ${PORT}
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
RUN mkdir -p /database
RUN mkdir -p /config
##################################################################################
# DEVELOPMENT (Connected to the local environment, to reload on demand) ##########
# Base with turbo ################################################################
##################################################################################
FROM base as development
FROM base as turbo-base
# We don't need to copy or build anything since we gonna bind to the
# local filesystem which will need a rebuild anyway
# Run command
# (for development we need to execute yarn install since the
# node_modules are on another volume and need updating)
CMD /bin/sh -c "cd /database && yarn install && yarn build && cd /config && yarn install && cd /app && yarn install && yarn run dev"
RUN apk update && apk add --no-cache libc6-compat \
&& yarn global add turbo@^2 \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
##################################################################################
# BUILD (Does contain all files and is therefore bloated) ########################
# BUILDER (create partly monorepo only with data needed by backend) ##############
##################################################################################
FROM base as build
FROM turbo-base as builder
# Copy everything from backend
COPY ./backend/ ./
# Copy everything from database
COPY ./database/ ../database/
# Copy everything from config
COPY ./config/ ../config/
COPY --chown=app:app . .
RUN turbo prune backend --docker
# yarn install and build config
RUN cd ../config && yarn install --production=false --frozen-lockfile --non-interactive && yarn build
# yarn install backend
RUN yarn install --production=false --frozen-lockfile --non-interactive
##################################################################################
# INSTALLER (create production image) ############################################
##################################################################################
FROM turbo-base AS installer
# yarn install database
RUN cd ../database && yarn install --production=false --frozen-lockfile --non-interactive
# yarn build
RUN yarn build
# yarn build database
RUN cd ../database && yarn build
# First install the dependencies (as they change less often)
COPY --chown=app:app --from=builder /app/out/json/ .
RUN yarn install --frozen-lockfile --production=false \
&& rm -rf /tmp/* ~/.cache node_modules/.cache \
&& yarn cache clean
# Build the project
COPY --chown=app:app --from=builder /app/out/full/ .
RUN turbo build
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM build as test
FROM installer as test
ENV DB_HOST=mariadb
# Run command
CMD /bin/sh -c "yarn run start"
CMD /bin/sh -c "turbo backend#test --env-mode=loose"
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
@ -99,25 +94,10 @@ CMD /bin/sh -c "yarn run start"
FROM base as production
# Copy "binary"-files from build image
COPY --from=build ${DOCKER_WORKDIR}/build ./build
COPY --from=build ${DOCKER_WORKDIR}/../database/build ../database/build
COPY --from=build ${DOCKER_WORKDIR}/../config/build ../config/build
# We also copy the node_modules express and serve-static for the run script
COPY --from=build ${DOCKER_WORKDIR}/node_modules ./node_modules
COPY --from=build ${DOCKER_WORKDIR}/../database/node_modules ../database/node_modules
COPY --from=build ${DOCKER_WORKDIR}/../config/node_modules ../config/node_modules
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/backend/build/src/index.js ./index.js
# Copy static files
# COPY --from=build ${DOCKER_WORKDIR}/public ./public
# Copy package.json for script definitions (lock file should not be needed)
COPY --from=build ${DOCKER_WORKDIR}/package.json ./package.json
# Copy tsconfig.json to provide alias path definitions
COPY --from=build ${DOCKER_WORKDIR}/tsconfig.json ./tsconfig.json
# Copy log4js-config.json to provide log configuration
COPY --from=build ${DOCKER_WORKDIR}/log4js-config.json ./log4js-config.json
# Copy run scripts run/
# COPY --from=build ${DOCKER_WORKDIR}/run ./run
COPY --chown=app:app --from=installer ${DOCKER_WORKDIR}/backend/log4js-config.json ./log4js-config.json
# Run command
CMD /bin/sh -c "yarn run start"
CMD ["node", "index.js"]

View File

@ -1,5 +1,4 @@
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
// eslint-disable-next-line import/no-commonjs, import/unambiguous
module.exports = {
verbose: true,
preset: 'ts-jest',
@ -7,7 +6,7 @@ module.exports = {
collectCoverageFrom: ['src/**/*.ts', '!**/node_modules/**', '!src/seeds/**', '!build/**'],
coverageThreshold: {
global: {
lines: 76,
lines: 75,
},
},
setupFiles: ['<rootDir>/test/testSetup.ts'],
@ -25,22 +24,18 @@ module.exports = {
'@typeorm/(.*)': '<rootDir>/src/typeorm/$1',
'@test/(.*)': '<rootDir>/test/$1',
'@entity/(.*)':
// eslint-disable-next-line n/no-process-env
process.env.NODE_ENV === 'development'
? '<rootDir>/../database/entity/$1'
: '<rootDir>/../database/build/entity/$1',
'@logging/(.*)':
// eslint-disable-next-line n/no-process-env
process.env.NODE_ENV === 'development'
? '<rootDir>/../database/logging/$1'
: '<rootDir>/../database/build/logging/$1',
'@dbTools/(.*)':
// eslint-disable-next-line n/no-process-env
process.env.NODE_ENV === 'development'
? '<rootDir>/../database/src/$1'
: '<rootDir>/../database/build/src/$1',
'@config/(.*)':
// eslint-disable-next-line n/no-process-env
process.env.NODE_ENV === 'development'
? '<rootDir>/../config/src/$1'
: '<rootDir>/../config/build/$1',

View File

@ -1,10 +1,10 @@
{
"name": "gradido-backend",
"name": "backend",
"version": "2.5.2",
"description": "Gradido unified backend providing an API-Service for Gradido Transactions",
"main": "src/index.ts",
"repository": "https://github.com/gradido/gradido/backend",
"author": "Ulf Gebhardt",
"author": "Gradido Academy - https://www.gradido.net",
"license": "Apache-2.0",
"private": false,
"scripts": {
@ -27,17 +27,17 @@
"await-semaphore": "^0.1.3",
"axios": "^0.21.1",
"class-validator": "^0.13.1",
"config-schema": "*",
"cors": "^2.8.5",
"cross-env": "^7.0.3",
"database": "*",
"decimal.js-light": "^2.5.1",
"dotenv": "^10.0.0",
"email-templates": "^10.0.1",
"express": "^4.17.1",
"express": "^4.17.21",
"express-slow-down": "^2.0.1",
"gradido-config": "file:../config",
"gradido-database": "file:../database",
"graphql": "^15.5.1",
"graphql-parse-resolve-info": "^4.13.0",
"graphql": "^15.10.1",
"graphql-parse-resolve-info": "^4.13.1",
"graphql-request": "5.0.0",
"graphql-type-json": "0.3.2",
"helmet": "^5.1.1",
@ -45,7 +45,7 @@
"joi": "^17.13.3",
"jose": "^4.14.4",
"lodash.clonedeep": "^4.5.0",
"log4js": "^6.4.6",
"log4js": "^6.7.1",
"mysql2": "^2.3.0",
"nodemailer": "^6.6.5",
"openai": "^4.87.3",
@ -55,18 +55,18 @@
"sodium-native": "^3.4.1",
"type-graphql": "^1.1.1",
"typed-rest-client": "^1.8.11",
"typeorm": "^0.3.16",
"uuid": "^8.3.2",
"workerpool": "^9.2.0",
"xregexp": "^5.1.1"
},
"devDependencies": {
"@types/email-templates": "^10.0.4",
"@types/express": "^4.17.21",
"@biomejs/biome": "1.9.4",
"@types/email-templates": "^10.0.1",
"@types/express": "^4.17.12",
"@types/faker": "^5.5.9",
"@types/i18n": "^0.13.4",
"@types/jest": "^27.0.2",
"@types/joi": "^17.2.3",
"@types/jest": "27.0.2",
"@types/lodash.clonedeep": "^4.5.6",
"@types/node": "^17.0.21",
"@types/nodemailer": "^6.4.4",
@ -75,15 +75,15 @@
"apollo-server-testing": "^2.25.2",
"faker": "^5.5.3",
"graphql-tag": "^2.12.6",
"jest": "^27.2.4",
"jest": "27.2.4",
"klicktipp-api": "^1.0.2",
"mkdirp": "^3.0.1",
"ncp": "^2.0.0",
"nodemon": "^2.0.7",
"prettier": "^2.8.7",
"ts-jest": "^27.0.5",
"prettier": "^3.5.3",
"ts-jest": "27.0.5",
"ts-node": "^10.9.2",
"tsconfig-paths": "^3.14.0",
"tsconfig-paths": "^4.1.1",
"typescript": "^4.9.5"
},
"nodemonConfig": {
@ -92,6 +92,6 @@
]
},
"engines": {
"node": ">=14"
"node": ">=18"
}
}

View File

@ -1,6 +1,6 @@
import { Connection } from '@dbTools/typeorm'
import { Transaction as DbTransaction } from '@entity/Transaction'
import { Transaction as DbTransaction } from 'database'
import { Decimal } from 'decimal.js-light'
import { Connection } from 'typeorm'
import { cleanDB, testEnvironment } from '@test/helpers'
@ -111,7 +111,7 @@ describe('transmitTransaction', () => {
await DltConnectorClient.getInstance()?.transmitTransaction(localTransaction)
} catch (e) {
expect(e).toMatchObject(
new LogError('invalid transaction type id: ' + localTransaction.typeId.toString()),
new LogError(`invalid transaction type id: ${localTransaction.typeId.toString()}`),
)
}
})

View File

@ -1,4 +1,4 @@
import { Transaction as DbTransaction } from '@entity/Transaction'
import { Transaction as DbTransaction } from 'database'
import { GraphQLClient, gql } from 'graphql-request'
import { CONFIG } from '@/config'

View File

@ -1,4 +1,4 @@
import { User as DbUser } from '@entity/User'
import { User as DbUser } from 'database'
// import { createTestClient } from 'apollo-server-testing'
// import { createGmsUser } from '@/apis/gms/GmsClient'

View File

@ -1,4 +1,4 @@
import { User as dbUser } from '@entity/User'
import { User as dbUser } from 'database'
import { PublishNameLogic } from '@/data/PublishName.logic'
// import { GmsPublishLocationType } from '@/graphql/enum/GmsPublishLocationType'

View File

@ -1,5 +1,5 @@
import { IsNull, Not } from '@dbTools/typeorm'
import { User } from '@entity/User'
import { User } from 'database'
import { IsNull, Not } from 'typeorm'
import { CONFIG } from '@/config'
import { LogError } from '@/server/LogError'

View File

@ -1,4 +1,4 @@
import { ProjectBranding } from '@entity/ProjectBranding'
import { ProjectBranding } from 'database'
import { SignJWT } from 'jose'
import { IRequestOptions, IRestResponse, RestClient } from 'typed-rest-client'

View File

@ -1,5 +1,4 @@
import { User } from '@entity/User'
import { UserContact } from '@entity/UserContact'
import { User, UserContact } from 'database'
import { IRestResponse } from 'typed-rest-client'
import { GetUser } from '@/apis/humhub/model/GetUser'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { isHumhubUserIdenticalToDbUser } from '@/apis/humhub/compareHumhubUserDbUser'
import { GetUser } from '@/apis/humhub/model/GetUser'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { Account } from './model/Account'
import { GetUser } from './model/GetUser'

View File

@ -1,4 +1,4 @@
import { AbstractLoggingView } from '@logging/AbstractLogging.view'
import { AbstractLoggingView } from 'database'
import { Account } from '@/apis/humhub/model/Account'

View File

@ -1,4 +1,4 @@
import { AbstractLoggingView } from '@logging/AbstractLogging.view'
import { AbstractLoggingView } from 'database'
import { PostUser } from '@/apis/humhub/model/PostUser'

View File

@ -1,4 +1,4 @@
import { AbstractLoggingView } from '@logging/AbstractLogging.view'
import { AbstractLoggingView } from 'database'
import { Profile } from '@/apis/humhub/model/Profile'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { Account } from './Account'
import { Profile } from './Profile'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { convertGradidoLanguageToHumhub } from '@/apis/humhub/convertLanguage'
import { PublishNameLogic } from '@/data/PublishName.logic'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { AbstractUser } from './AbstractUser'

View File

@ -1,5 +1,4 @@
import { User } from '@entity/User'
import { UserContact } from '@entity/UserContact'
import { User, UserContact } from 'database'
import { v4 as uuidv4 } from 'uuid'
import { PublishNameType } from '@/graphql/enum/PublishNameType'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { CONFIG } from '@/config'
import { PublishNameLogic } from '@/data/PublishName.logic'

View File

@ -1,5 +1,4 @@
import { User } from '@entity/User'
import { UserContact } from '@entity/UserContact'
import { User, UserContact } from 'database'
import { GetUser } from './model/GetUser'
import { ExecutedHumhubAction, syncUser } from './syncUser'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { LogError } from '@/server/LogError'
import { backendLogger as logger } from '@/server/logger'

View File

@ -1,5 +1,4 @@
import { OpenaiThreads } from '@entity/OpenaiThreads'
import { User } from '@entity/User'
import { OpenaiThreads, User } from 'database'
import { OpenAI } from 'openai'
import { Message } from 'openai/resources/beta/threads/messages'

View File

@ -1,7 +1,7 @@
// ATTENTION: DO NOT PUT ANY SECRETS IN HERE (or the .env)
import { validate } from '@config/index'
import { latestDbVersion } from '@dbTools/config/detectLastDBVersion'
import { validate } from 'config-schema'
import { latestDbVersion } from 'database'
import { Decimal } from 'decimal.js-light'
import dotenv from 'dotenv'

View File

@ -24,7 +24,7 @@ import {
OPENAI_ACTIVE,
PRODUCTION,
TYPEORM_LOGGING_RELATIVE_PATH,
} from '@config/commonSchema'
} from 'config-schema'
import Joi from 'joi'
export const schema = Joi.object({

View File

@ -1,4 +1,4 @@
import { Contribution } from '@entity/Contribution'
import { Contribution } from 'database'
import { Decimal } from 'decimal.js-light'
import {

View File

@ -1,6 +1,4 @@
import { Contribution } from '@entity/Contribution'
import { ContributionMessage } from '@entity/ContributionMessage'
import { User } from '@entity/User'
import { Contribution, ContributionMessage, User } from 'database'
import { ContributionMessageType } from '@/graphql/enum/ContributionMessageType'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import { v4 as uuidv4 } from 'uuid'
import { PublishNameType } from '@/graphql/enum/PublishNameType'

View File

@ -1,4 +1,4 @@
import { User } from '@entity/User'
import { User } from 'database'
import XRegExp from 'xregexp'
import { PublishNameType } from '@/graphql/enum/PublishNameType'
@ -92,7 +92,7 @@ export class PublishNameLogic {
? this.getUsernameFromAlias()
: this.isUsernameFromInitials(publishNameType)
? this.getUsernameFromInitials()
: (this.getFirstName(publishNameType) + ' ' + this.getLastName(publishNameType)).trim()
: `${this.getFirstName(publishNameType)} ${this.getLastName(publishNameType)}`.trim()
}
public getUsernameFromInitials(): string {

View File

@ -1,5 +1,4 @@
import { User } from '@entity/User'
import { UserRole } from '@entity/UserRole'
import { User, UserRole } from 'database'
import { RoleNames } from '@enum/RoleNames'

View File

@ -1,6 +1,6 @@
import { Connection } from '@dbTools/typeorm'
import { ApolloServerTestClient } from 'apollo-server-testing'
import { Decimal } from 'decimal.js-light'
import { Connection } from 'typeorm'
import { testEnvironment } from '@test/helpers'
import { i18n as localization, logger } from '@test/testSetup'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { ContributionLink as DbContributionLink } from '@entity/ContributionLink'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { ContributionLink as DbContributionLink, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { ContributionLink as DbContributionLink } from '@entity/ContributionLink'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { ContributionLink as DbContributionLink, Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { ContributionLink as DbContributionLink } from '@entity/ContributionLink'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { ContributionLink as DbContributionLink, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,7 +1,9 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { ContributionMessage as DbContributionMessage } from '@entity/ContributionMessage'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import {
Contribution as DbContribution,
ContributionMessage as DbContributionMessage,
Event as DbEvent,
User as DbUser,
} from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,8 +1,10 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { ContributionLink as DbContributionLink } from '@entity/ContributionLink'
import { Event as DbEvent } from '@entity/Event'
import { Transaction as DbTransaction } from '@entity/Transaction'
import { User as DbUser } from '@entity/User'
import {
Contribution as DbContribution,
ContributionLink as DbContributionLink,
Event as DbEvent,
Transaction as DbTransaction,
User as DbUser,
} from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,7 +1,9 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { ContributionMessage as DbContributionMessage } from '@entity/ContributionMessage'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import {
Contribution as DbContribution,
ContributionMessage as DbContributionMessage,
Event as DbEvent,
User as DbUser,
} from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Contribution as DbContribution, Event as DbEvent, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { TransactionLink as DbTransactionLink } from '@entity/TransactionLink'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, TransactionLink as DbTransactionLink, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { TransactionLink as DbTransactionLink } from '@entity/TransactionLink'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, TransactionLink as DbTransactionLink, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { TransactionLink as DbTransactionLink } from '@entity/TransactionLink'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, TransactionLink as DbTransactionLink, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { Transaction as DbTransaction } from '@entity/Transaction'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, Transaction as DbTransaction, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,6 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { Transaction as DbTransaction } from '@entity/Transaction'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, Transaction as DbTransaction, User as DbUser } from 'database'
import { Decimal } from 'decimal.js-light'
import { Event } from './Event'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Event as DbEvent } from '@entity/Event'
import { User as DbUser } from '@entity/User'
import { Event as DbEvent, User as DbUser } from 'database'
import { Event } from './Event'
import { EventType } from './EventType'

View File

@ -1,10 +1,12 @@
import { Contribution as DbContribution } from '@entity/Contribution'
import { ContributionLink as DbContributionLink } from '@entity/ContributionLink'
import { ContributionMessage as DbContributionMessage } from '@entity/ContributionMessage'
import { Event as DbEvent } from '@entity/Event'
import { Transaction as DbTransaction } from '@entity/Transaction'
import { TransactionLink as DbTransactionLink } from '@entity/TransactionLink'
import { User as DbUser } from '@entity/User'
import {
Contribution as DbContribution,
ContributionLink as DbContributionLink,
ContributionMessage as DbContributionMessage,
Event as DbEvent,
Transaction as DbTransaction,
TransactionLink as DbTransactionLink,
User as DbUser,
} from 'database'
import { Decimal } from 'decimal.js-light'
import { EventType } from './EventType'

View File

@ -1,5 +1,4 @@
import { Community as DbCommunity } from '@entity/Community'
import { FederatedCommunity as DbFederatedCommunity } from '@entity/FederatedCommunity'
import { Community as DbCommunity, FederatedCommunity as DbFederatedCommunity } from 'database'
import { validate as validateUUID, version as versionUUID } from 'uuid'
import { CONFIG } from '@/config'

View File

@ -1,4 +1,4 @@
import { FederatedCommunity as DbFederatedCommunity } from '@entity/FederatedCommunity'
import { FederatedCommunity as DbFederatedCommunity } from 'database'
import { GraphQLClient } from 'graphql-request'
import { backendLogger as logger } from '@/server/logger'

View File

@ -1,4 +1,4 @@
import { FederatedCommunity as DbFederatedCommunity } from '@entity/FederatedCommunity'
import { FederatedCommunity as DbFederatedCommunity } from 'database'
import { GraphQLClient } from 'graphql-request'
import { getPublicCommunityInfo } from '@/federation/client/1_0/query/getPublicCommunityInfo'

View File

@ -1,4 +1,4 @@
import { FederatedCommunity as DbFederatedCommunity } from '@entity/FederatedCommunity'
import { FederatedCommunity as DbFederatedCommunity } from 'database'
import { GraphQLClient } from 'graphql-request'
import { LogError } from '@/server/LogError'

View File

@ -1,4 +1,4 @@
import { AbstractLoggingView } from '@logging/AbstractLogging.view'
import { AbstractLoggingView } from 'database'
import { PublicCommunityInfo } from '@/federation/client/1_0/model/PublicCommunityInfo'

View File

@ -1,4 +1,4 @@
import { AbstractLoggingView } from '@logging/AbstractLogging.view'
import { AbstractLoggingView } from 'database'
import { SendCoinsArgs } from '@/federation/client/1_0/model/SendCoinsArgs'

View File

@ -1,4 +1,4 @@
import { AbstractLoggingView } from '@logging/AbstractLogging.view'
import { AbstractLoggingView } from 'database'
import { SendCoinsResult } from '@/federation/client/1_0/model/SendCoinsResult'

View File

@ -1,4 +1,4 @@
import { FederatedCommunity as DbFederatedCommunity } from '@entity/FederatedCommunity'
import { FederatedCommunity as DbFederatedCommunity } from 'database'
import { AuthenticationClient as V1_0_AuthenticationClient } from '@/federation/client/1_0/AuthenticationClient'

View File

@ -1,4 +1,4 @@
import { FederatedCommunity as DbFederatedCommunity } from '@entity/FederatedCommunity'
import { FederatedCommunity as DbFederatedCommunity } from 'database'
import { FederationClient as V1_0_FederationClient } from '@/federation/client/1_0/FederationClient'

View File

@ -1,4 +1,4 @@
import { FederatedCommunity as DbFederatedCommunity } from '@entity/FederatedCommunity'
import { FederatedCommunity as DbFederatedCommunity } from 'database'
import { SendCoinsClient as V1_0_SendCoinsClient } from '@/federation/client/1_0/SendCoinsClient'

View File

@ -1,8 +1,8 @@
import { Connection } from '@dbTools/typeorm'
import { FederatedCommunity as DbFederatedCommunity } from '@entity/FederatedCommunity'
import { ApolloServerTestClient } from 'apollo-server-testing'
import { FederatedCommunity as DbFederatedCommunity } from 'database'
import { GraphQLClient } from 'graphql-request'
import { Response } from 'graphql-request/dist/types'
import { Connection } from 'typeorm'
import { cleanDB, testEnvironment } from '@test/helpers'
import { logger } from '@test/testSetup'

Some files were not shown because too many files have changed in this diff Show More