mirror of
https://github.com/IT4Change/gradido.git
synced 2025-12-13 07:45:54 +00:00
Merge branch 'master' into ADMINBEREICH-first-step
This commit is contained in:
commit
3184639292
45
.github/workflows/test.yml
vendored
45
.github/workflows/test.yml
vendored
@ -173,7 +173,7 @@ jobs:
|
|||||||
##########################################################################
|
##########################################################################
|
||||||
- name: mariadb | Build `test` image
|
- name: mariadb | Build `test` image
|
||||||
run: |
|
run: |
|
||||||
docker build --target mariadb_server_test -t "gradido/mariadb:test" -f ./mariadb/Dockerfile ./
|
docker build --target mariadb_server -t "gradido/mariadb:test" -f ./mariadb/Dockerfile ./
|
||||||
docker save "gradido/mariadb:test" > /tmp/mariadb.tar
|
docker save "gradido/mariadb:test" > /tmp/mariadb.tar
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -399,7 +399,7 @@ jobs:
|
|||||||
report_name: Coverage Frontend
|
report_name: Coverage Frontend
|
||||||
type: lcov
|
type: lcov
|
||||||
result_path: ./coverage/lcov.info
|
result_path: ./coverage/lcov.info
|
||||||
min_coverage: 83
|
min_coverage: 85
|
||||||
token: ${{ github.token }}
|
token: ${{ github.token }}
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
@ -450,7 +450,7 @@ jobs:
|
|||||||
unit_test_backend:
|
unit_test_backend:
|
||||||
name: Unit tests - Backend
|
name: Unit tests - Backend
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [build_test_backend]
|
needs: [build_test_backend,build_test_mariadb]
|
||||||
steps:
|
steps:
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# CHECKOUT CODE ##########################################################
|
# CHECKOUT CODE ##########################################################
|
||||||
@ -460,6 +460,13 @@ jobs:
|
|||||||
##########################################################################
|
##########################################################################
|
||||||
# DOWNLOAD DOCKER IMAGES #################################################
|
# DOWNLOAD DOCKER IMAGES #################################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
- name: Download Docker Image (Mariadb)
|
||||||
|
uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: docker-mariadb-test
|
||||||
|
path: /tmp
|
||||||
|
- name: Load Docker Image
|
||||||
|
run: docker load < /tmp/mariadb.tar
|
||||||
- name: Download Docker Image (Backend)
|
- name: Download Docker Image (Backend)
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v2
|
||||||
with:
|
with:
|
||||||
@ -470,10 +477,11 @@ jobs:
|
|||||||
##########################################################################
|
##########################################################################
|
||||||
# UNIT TESTS BACKEND #####################################################
|
# UNIT TESTS BACKEND #####################################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
- name: backend | Unit tests
|
- name: backend | docker-compose
|
||||||
run: |
|
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb database
|
||||||
docker run -v ~/coverage:/app/coverage --rm gradido/backend:test yarn run test
|
- name: backend Unit tests | test
|
||||||
cp -r ~/coverage ./coverage
|
run: cd database && yarn && cd ../backend && yarn && yarn test
|
||||||
|
# run: docker-compose -f docker-compose.yml -f docker-compose.test.yml exec -T backend yarn test
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# COVERAGE CHECK BACKEND #################################################
|
# COVERAGE CHECK BACKEND #################################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
@ -482,8 +490,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
report_name: Coverage Backend
|
report_name: Coverage Backend
|
||||||
type: lcov
|
type: lcov
|
||||||
result_path: ./coverage/lcov.info
|
result_path: ./backend/coverage/lcov.info
|
||||||
min_coverage: 1
|
min_coverage: 41
|
||||||
token: ${{ github.token }}
|
token: ${{ github.token }}
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
@ -506,10 +514,16 @@ jobs:
|
|||||||
--health-timeout=3s
|
--health-timeout=3s
|
||||||
--health-retries=4
|
--health-retries=4
|
||||||
steps:
|
steps:
|
||||||
- name: Debug service
|
# - name: Debug service
|
||||||
run: echo "$(docker ps)"
|
# run: echo "$(docker ps)"
|
||||||
- name: Debug container choosing script
|
#- name: Debug container choosing script
|
||||||
run: echo "$(docker container ls | grep mariadb | awk '{ print $1 }')"
|
# run: echo "$(docker container ls | grep mariadb | awk '{ print $1 }')"
|
||||||
|
- name: get mariadb container id
|
||||||
|
run: echo "::set-output name=id::$(docker container ls | grep mariadb | awk '{ print $1 }')"
|
||||||
|
id: mariadb_container
|
||||||
|
- name: get automatic created network
|
||||||
|
run: echo "::set-output name=id::$(docker network ls | grep github_network | awk '{ print $1 }')"
|
||||||
|
id: network
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# CHECKOUT CODE ##########################################################
|
# CHECKOUT CODE ##########################################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
@ -517,6 +531,11 @@ jobs:
|
|||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
|
# Database migration
|
||||||
|
- name: Start database migration
|
||||||
|
run: |
|
||||||
|
docker build --target production_up -t "gradido/database:production_up" database/
|
||||||
|
docker run --network ${{ steps.network.outputs.id }} --name=database --env NODE_ENV=production --env DB_HOST=mariadb --env DB_DATABASE=gradido_community_test -d gradido/database:production_up
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# Build Login-Server Test Docker image ###################################
|
# Build Login-Server Test Docker image ###################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -2,7 +2,6 @@
|
|||||||
/node_modules/*
|
/node_modules/*
|
||||||
.vscode
|
.vscode
|
||||||
messages.pot
|
messages.pot
|
||||||
.skeema
|
|
||||||
nbproject
|
nbproject
|
||||||
.metadata
|
.metadata
|
||||||
/.env
|
/.env
|
||||||
|
|||||||
7
.vscode/extensions.json
vendored
Normal file
7
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"recommendations": [
|
||||||
|
"streetsidesoftware.code-spell-checker",
|
||||||
|
"dbaeumer.vscode-eslint",
|
||||||
|
"esbenp.prettier-vscode"
|
||||||
|
]
|
||||||
|
}
|
||||||
66
README.md
66
README.md
@ -8,33 +8,73 @@ The Gradido model can create global prosperity and peace
|
|||||||
The Corona crisis has fundamentally changed our world within a very short time.
|
The Corona crisis has fundamentally changed our world within a very short time.
|
||||||
The dominant financial system threatens to fail around the globe, followed by mass insolvencies, record unemployment and abject poverty. Only with a sustainable new monetary system can humanity master these challenges of the 21st century. The Gradido Academy for Bionic Economy has developed such a system.
|
The dominant financial system threatens to fail around the globe, followed by mass insolvencies, record unemployment and abject poverty. Only with a sustainable new monetary system can humanity master these challenges of the 21st century. The Gradido Academy for Bionic Economy has developed such a system.
|
||||||
|
|
||||||
|
Find out more about the Project on its [Website](https://gradido.net/). It is offering vast resources about the idea. The remaining document will discuss the gradido software only.
|
||||||
## Software requirements
|
## Software requirements
|
||||||
|
|
||||||
Currently we only support `docker` as environment to run all services, since many different programming languages and frameworks are used.
|
Currently we only support `docker` install instructions to run all services, since many different programming languages and frameworks are used.
|
||||||
|
|
||||||
- [docker](https://www.docker.com/)
|
- [docker](https://www.docker.com/)
|
||||||
|
- [docker-compose]
|
||||||
|
|
||||||
|
### For Arch Linux
|
||||||
|
Install the required packages:
|
||||||
|
```bash
|
||||||
|
sudo pacman -S docker
|
||||||
|
sudo pacman -S docker-compose
|
||||||
|
```
|
||||||
|
|
||||||
|
Add group `docker` and then your user to it in order to allow you to run docker without sudo
|
||||||
|
```bash
|
||||||
|
sudo groupadd docker # may already exist `groupadd: group 'docker' already exists`
|
||||||
|
sudo usermod -aG docker $USER
|
||||||
|
groups # verify you have the group (requires relog)
|
||||||
|
```
|
||||||
|
|
||||||
|
Start the docker service:
|
||||||
|
```bash
|
||||||
|
sudo systemctrl start docker
|
||||||
|
```
|
||||||
|
|
||||||
## How to run?
|
## How to run?
|
||||||
|
|
||||||
1. Clone the repo and pull all submodules
|
### 1. Clone Sources
|
||||||
|
Clone the repo and pull all submodules
|
||||||
```bash
|
```bash
|
||||||
git clone git@github.com:gradido/gradido.git
|
git clone git@github.com:gradido/gradido.git
|
||||||
git submodule update --recursive --init
|
git submodule update --recursive --init
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Run docker compose
|
### 2. Run docker-compose
|
||||||
1. Run docker compose for the debug build
|
Run docker-compose to bring up the development environment
|
||||||
|
```bash
|
||||||
|
docker-compose up
|
||||||
|
```
|
||||||
|
### Additional Build options
|
||||||
|
If you want to build for production you can do this aswell:
|
||||||
|
```bash
|
||||||
|
docker-compose -f docker-compose.yml up
|
||||||
|
```
|
||||||
|
|
||||||
```bash
|
## Services defined in this package
|
||||||
docker-compose up
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Or run docker compose in production build
|
- [frontend](./frontend) Wallet frontend
|
||||||
|
- [backend](./backend) GraphQL & Business logic backend
|
||||||
|
- [mariadb](./mariadb) Database backend
|
||||||
|
- [login_server](./login_server) User credential storage & business logic backend
|
||||||
|
- [community_server](./community_server/) Business logic backend
|
||||||
|
|
||||||
```bash
|
We are currently restructuring the service to reduce dependencies and unify business logic into one place. Furthermore the databases defined for each service will be unified into one.
|
||||||
docker-compose -f docker-compose.yml up
|
|
||||||
```
|
### Open the wallet
|
||||||
|
|
||||||
|
Once you have `docker-compose` up and running, you can open [http://localhost/vue](http://localhost/vue) and create yourself a new wallet account.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
| Problem | Issue | Solution | Description |
|
||||||
|
| ------- | ----- | -------- | ----------- |
|
||||||
|
| docker-compose raises database connection errors | [#1062](https://github.com/gradido/gradido/issues/1062) | End `ctrl+c` and restart the `docker-compose up` after a successful build | Several Database connection related errors occur in the docker-compose log. |
|
||||||
|
| Wallet page is empty | [#1063](https://github.com/gradido/gradido/issues/1063) | Accept Cookies and Local Storage in your Browser | The page stays empty when navigating to [http://localhost/vue](http://localhost/vue) |
|
||||||
|
|
||||||
## Useful Links
|
## Useful Links
|
||||||
|
|
||||||
|
|||||||
@ -10,6 +10,14 @@ DB_PORT=3306
|
|||||||
DB_USER=root
|
DB_USER=root
|
||||||
DB_PASSWORD=
|
DB_PASSWORD=
|
||||||
DB_DATABASE=gradido_community
|
DB_DATABASE=gradido_community
|
||||||
|
|
||||||
|
#EMAIL=true
|
||||||
|
#EMAIL_USERNAME=
|
||||||
|
#EMAIL_SENDER=
|
||||||
|
#EMAIL_PASSWORD=
|
||||||
|
#EMAIL_SMTP_URL=
|
||||||
|
#EMAIL_SMTP_PORT=587
|
||||||
|
|
||||||
#KLICKTIPP_USER=
|
#KLICKTIPP_USER=
|
||||||
#KLICKTIPP_PASSWORD=
|
#KLICKTIPP_PASSWORD=
|
||||||
#KLICKTIPP_APIKEY_DE=
|
#KLICKTIPP_APIKEY_DE=
|
||||||
|
|||||||
@ -85,7 +85,7 @@ RUN cd ../database && yarn run build
|
|||||||
FROM build as test
|
FROM build as test
|
||||||
|
|
||||||
# Run command
|
# Run command
|
||||||
CMD /bin/sh -c "yarn run dev"
|
CMD /bin/sh -c "yarn run start"
|
||||||
|
|
||||||
##################################################################################
|
##################################################################################
|
||||||
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
|
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
|
||||||
|
|||||||
7120
backend/package-lock.json
generated
7120
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -13,11 +13,12 @@
|
|||||||
"start": "node build/index.js",
|
"start": "node build/index.js",
|
||||||
"dev": "nodemon -w src --ext ts --exec ts-node src/index.ts",
|
"dev": "nodemon -w src --ext ts --exec ts-node src/index.ts",
|
||||||
"lint": "eslint . --ext .js,.ts",
|
"lint": "eslint . --ext .js,.ts",
|
||||||
"test": "jest --coverage"
|
"test": "jest --runInBand --coverage "
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/jest": "^27.0.2",
|
"@types/jest": "^27.0.2",
|
||||||
"apollo-server-express": "^2.25.2",
|
"apollo-server-express": "^2.25.2",
|
||||||
|
"apollo-server-testing": "^2.25.2",
|
||||||
"axios": "^0.21.1",
|
"axios": "^0.21.1",
|
||||||
"class-validator": "^0.13.1",
|
"class-validator": "^0.13.1",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
@ -29,6 +30,7 @@
|
|||||||
"libsodium-wrappers": "^0.7.9",
|
"libsodium-wrappers": "^0.7.9",
|
||||||
"module-alias": "^2.2.2",
|
"module-alias": "^2.2.2",
|
||||||
"mysql2": "^2.3.0",
|
"mysql2": "^2.3.0",
|
||||||
|
"nodemailer": "^6.6.5",
|
||||||
"reflect-metadata": "^0.1.13",
|
"reflect-metadata": "^0.1.13",
|
||||||
"ts-jest": "^27.0.5",
|
"ts-jest": "^27.0.5",
|
||||||
"type-graphql": "^1.1.1",
|
"type-graphql": "^1.1.1",
|
||||||
@ -38,6 +40,8 @@
|
|||||||
"@types/express": "^4.17.12",
|
"@types/express": "^4.17.12",
|
||||||
"@types/jsonwebtoken": "^8.5.2",
|
"@types/jsonwebtoken": "^8.5.2",
|
||||||
"@types/libsodium-wrappers": "^0.7.9",
|
"@types/libsodium-wrappers": "^0.7.9",
|
||||||
|
"@types/node": "^16.10.3",
|
||||||
|
"@types/nodemailer": "^6.4.4",
|
||||||
"@typescript-eslint/eslint-plugin": "^4.28.0",
|
"@typescript-eslint/eslint-plugin": "^4.28.0",
|
||||||
"@typescript-eslint/parser": "^4.28.0",
|
"@typescript-eslint/parser": "^4.28.0",
|
||||||
"eslint": "^7.29.0",
|
"eslint": "^7.29.0",
|
||||||
@ -53,6 +57,6 @@
|
|||||||
"typescript": "^4.3.4"
|
"typescript": "^4.3.4"
|
||||||
},
|
},
|
||||||
"_moduleAliases": {
|
"_moduleAliases": {
|
||||||
"@entity" : "../database/build/entity"
|
"@entity": "../database/build/entity"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -39,9 +39,18 @@ const community = {
|
|||||||
process.env.COMMUNITY_DESCRIPTION || 'Die lokale Entwicklungsumgebung von Gradido.',
|
process.env.COMMUNITY_DESCRIPTION || 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const email = {
|
||||||
|
EMAIL: process.env.EMAIL === 'true' || false,
|
||||||
|
EMAIL_USERNAME: process.env.EMAIL_USERNAME || 'gradido_email',
|
||||||
|
EMAIL_SENDER: process.env.EMAIL_SENDER || 'info@gradido.net',
|
||||||
|
EMAIL_PASSWORD: process.env.EMAIL_PASSWORD || 'xxx',
|
||||||
|
EMAIL_SMTP_URL: process.env.EMAIL_SMTP_URL || 'gmail.com',
|
||||||
|
EMAIL_SMTP_PORT: process.env.EMAIL_SMTP_PORT || '587',
|
||||||
|
}
|
||||||
|
|
||||||
// This is needed by graphql-directive-auth
|
// This is needed by graphql-directive-auth
|
||||||
process.env.APP_SECRET = server.JWT_SECRET
|
process.env.APP_SECRET = server.JWT_SECRET
|
||||||
|
|
||||||
const CONFIG = { ...server, ...database, ...klicktipp, ...community }
|
const CONFIG = { ...server, ...database, ...klicktipp, ...community, ...email }
|
||||||
|
|
||||||
export default CONFIG
|
export default CONFIG
|
||||||
|
|||||||
@ -4,7 +4,4 @@ import { ArgsType, Field } from 'type-graphql'
|
|||||||
export default class CheckUsernameArgs {
|
export default class CheckUsernameArgs {
|
||||||
@Field(() => String)
|
@Field(() => String)
|
||||||
username: string
|
username: string
|
||||||
|
|
||||||
@Field(() => Number, { nullable: true })
|
|
||||||
groupId?: number
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,21 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
|
||||||
import { ObjectType, Field } from 'type-graphql'
|
|
||||||
|
|
||||||
@ObjectType()
|
|
||||||
export class CheckUsernameResponse {
|
|
||||||
constructor(json: any) {
|
|
||||||
this.state = json.state
|
|
||||||
this.msg = json.msg
|
|
||||||
this.groupId = json.group_id
|
|
||||||
}
|
|
||||||
|
|
||||||
@Field(() => String)
|
|
||||||
state: string
|
|
||||||
|
|
||||||
@Field(() => String)
|
|
||||||
msg?: string
|
|
||||||
|
|
||||||
@Field(() => Number)
|
|
||||||
groupId?: number
|
|
||||||
}
|
|
||||||
123
backend/src/graphql/resolver/CommunityResolver.test.ts
Normal file
123
backend/src/graphql/resolver/CommunityResolver.test.ts
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
||||||
|
|
||||||
|
import { createTestClient } from 'apollo-server-testing'
|
||||||
|
import createServer from '../../server/createServer'
|
||||||
|
import CONFIG from '../../config'
|
||||||
|
|
||||||
|
jest.mock('../../config')
|
||||||
|
|
||||||
|
let query: any
|
||||||
|
|
||||||
|
// to do: We need a setup for the tests that closes the connection
|
||||||
|
let con: any
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const server = await createServer({})
|
||||||
|
con = server.con
|
||||||
|
query = createTestClient(server.apollo).query
|
||||||
|
})
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await con.close()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('CommunityResolver', () => {
|
||||||
|
const getCommunityInfoQuery = `
|
||||||
|
query {
|
||||||
|
getCommunityInfo {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
url
|
||||||
|
registerUrl
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
const communities = `
|
||||||
|
query {
|
||||||
|
communities {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
url
|
||||||
|
description
|
||||||
|
registerUrl
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
describe('getCommunityInfo', () => {
|
||||||
|
it('returns the default values', async () => {
|
||||||
|
expect(query({ query: getCommunityInfoQuery })).resolves.toMatchObject({
|
||||||
|
data: {
|
||||||
|
getCommunityInfo: {
|
||||||
|
name: 'Gradido Entwicklung',
|
||||||
|
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
|
url: 'http://localhost/vue/',
|
||||||
|
registerUrl: 'http://localhost/vue/register',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('communities', () => {
|
||||||
|
describe('PRODUCTION = false', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
CONFIG.PRODUCTION = false
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns three communities', async () => {
|
||||||
|
expect(query({ query: communities })).resolves.toMatchObject({
|
||||||
|
data: {
|
||||||
|
communities: [
|
||||||
|
{
|
||||||
|
id: 1,
|
||||||
|
name: 'Gradido Entwicklung',
|
||||||
|
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
|
url: 'http://localhost/vue/',
|
||||||
|
registerUrl: 'http://localhost/vue/register-community',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 2,
|
||||||
|
name: 'Gradido Staging',
|
||||||
|
description: 'Der Testserver der Gradido-Akademie.',
|
||||||
|
url: 'https://stage1.gradido.net/vue/',
|
||||||
|
registerUrl: 'https://stage1.gradido.net/vue/register-community',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 3,
|
||||||
|
name: 'Gradido-Akademie',
|
||||||
|
description: 'Freies Institut für Wirtschaftsbionik.',
|
||||||
|
url: 'https://gradido.net',
|
||||||
|
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('PRODUCTION = true', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
CONFIG.PRODUCTION = true
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns one community', async () => {
|
||||||
|
expect(query({ query: communities })).resolves.toMatchObject({
|
||||||
|
data: {
|
||||||
|
communities: [
|
||||||
|
{
|
||||||
|
id: 3,
|
||||||
|
name: 'Gradido-Akademie',
|
||||||
|
description: 'Freies Institut für Wirtschaftsbionik.',
|
||||||
|
url: 'https://gradido.net',
|
||||||
|
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@ -1,8 +1,10 @@
|
|||||||
|
/* eslint-disable new-cap */
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
||||||
|
|
||||||
import { Resolver, Query, Args, Authorized, Ctx, Mutation } from 'type-graphql'
|
import { Resolver, Query, Args, Authorized, Ctx, Mutation } from 'type-graphql'
|
||||||
import { getCustomRepository } from 'typeorm'
|
import { getCustomRepository, getConnection, QueryRunner } from 'typeorm'
|
||||||
|
import { createTransport } from 'nodemailer'
|
||||||
|
|
||||||
import CONFIG from '../../config'
|
import CONFIG from '../../config'
|
||||||
|
|
||||||
@ -22,12 +24,205 @@ import { TransactionRepository } from '../../typeorm/repository/Transaction'
|
|||||||
import { User as dbUser } from '@entity/User'
|
import { User as dbUser } from '@entity/User'
|
||||||
import { UserTransaction as dbUserTransaction } from '@entity/UserTransaction'
|
import { UserTransaction as dbUserTransaction } from '@entity/UserTransaction'
|
||||||
import { Transaction as dbTransaction } from '@entity/Transaction'
|
import { Transaction as dbTransaction } from '@entity/Transaction'
|
||||||
|
import { TransactionSendCoin as dbTransactionSendCoin } from '@entity/TransactionSendCoin'
|
||||||
|
import { Balance as dbBalance } from '@entity/Balance'
|
||||||
|
|
||||||
import { apiPost } from '../../apis/HttpRequest'
|
import { apiPost } from '../../apis/HttpRequest'
|
||||||
import { roundFloorFrom4, roundCeilFrom4 } from '../../util/round'
|
import { roundFloorFrom4, roundCeilFrom4 } from '../../util/round'
|
||||||
import { calculateDecay, calculateDecayWithInterval } from '../../util/decay'
|
import { calculateDecay, calculateDecayWithInterval } from '../../util/decay'
|
||||||
import { TransactionTypeId } from '../enum/TransactionTypeId'
|
import { TransactionTypeId } from '../enum/TransactionTypeId'
|
||||||
import { TransactionType } from '../enum/TransactionType'
|
import { TransactionType } from '../enum/TransactionType'
|
||||||
|
import { hasUserAmount, isHexPublicKey } from '../../util/validate'
|
||||||
|
import { from_hex as fromHex } from 'libsodium-wrappers'
|
||||||
|
|
||||||
|
/*
|
||||||
|
# Test
|
||||||
|
|
||||||
|
## Prepare
|
||||||
|
> sudo systemctl start docker
|
||||||
|
> docker-compose up mariadb
|
||||||
|
> DROP all databases
|
||||||
|
> docker-compose down
|
||||||
|
> docker compose up mariadb database
|
||||||
|
> verify there is exactly one database `gradido_community`
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
INSERT INTO `login_groups` (`id`, `alias`, `name`, `url`, `host`, `home`, `description`) VALUES
|
||||||
|
(1, 'docker', 'docker gradido group', 'localhost', 'nginx', '/', 'gradido test group for docker and stage2 with blockchain db');
|
||||||
|
|
||||||
|
>> Database is cool
|
||||||
|
|
||||||
|
### Start login server
|
||||||
|
> docker-compose up login-server community-server nginx
|
||||||
|
>> Login & community servers and nginx proxy are up and running
|
||||||
|
|
||||||
|
## Build database
|
||||||
|
> cd database
|
||||||
|
> yarn
|
||||||
|
> yarn build
|
||||||
|
> cd ..
|
||||||
|
>> Database has been built successful
|
||||||
|
|
||||||
|
### Start backend (no docker for debugging)
|
||||||
|
> cd backend
|
||||||
|
> yarn
|
||||||
|
> yarn dev
|
||||||
|
>> Backend is up and running
|
||||||
|
|
||||||
|
### Create users
|
||||||
|
> chromium http://localhost:4000/graphql
|
||||||
|
> mutation{createUser(email: "receiver@user.net", firstName: "Receiver", lastName: "user", password: "123!AAAb", language: "de")}
|
||||||
|
> mutation{createUser(email: "sender@user.net", firstName: "Sender", lastName: "user", password: "123!AAAb", language: "de")}
|
||||||
|
> mutation{createUser(email: "creator@user.net", firstName: "Creator", lastName: "user", password: "123!AAAb", language: "de")}
|
||||||
|
>> Verify you have 3 entries in `login_users`, `login_user_backups` and `state_users`
|
||||||
|
|
||||||
|
### make creator an admin
|
||||||
|
> INSERT INTO login_user_roles (id, user_id, role_id) VALUES (NULL, '3', '1');
|
||||||
|
> UPDATE login_users SET email_checked = 1 WHERE id = 3;
|
||||||
|
> uncomment line: 19 in community_server/src/Controller/ServerUsersController.php
|
||||||
|
> chromium http://localhost/server-users/add
|
||||||
|
> create user `creator` `123` `creator@different.net`
|
||||||
|
>> verify you have 1 entry in `server_users`
|
||||||
|
> login with user on http://localhost/server-users
|
||||||
|
> activate server user by changing the corresponding flag in the interface
|
||||||
|
> navigate to http://localhost/transaction-creations/create-multi
|
||||||
|
> create 1000GDD for user sender@user.net
|
||||||
|
> navigate to http://localhost
|
||||||
|
> login with `creator@user.net` `123!AAAb`
|
||||||
|
> confirm transaction (top right corner - click the thingy, click the green button `Transaktion abschließen`)
|
||||||
|
|
||||||
|
### the test:
|
||||||
|
> chromium http://localhost:4000/graphql
|
||||||
|
> query{login(email: "sender@user.net", password: "123!AAAb"){pubkey}}
|
||||||
|
>> copy token from network tab (inspect)
|
||||||
|
> mutation{sendCoins(email: "receiver@user.net", amount: 10.0, memo: "Hier!")}
|
||||||
|
> mutation{sendCoins(email: "receiver@user.net", amount: 10.0, memo: "Hier!")}
|
||||||
|
> Headers: {"Authorization": "Bearer ${token}"}
|
||||||
|
>> Verify via Database that stuff is as it should see `state_balance` & `transaction_send_coins`
|
||||||
|
|
||||||
|
### create decay block
|
||||||
|
> chromium http://localhost/transactions/add
|
||||||
|
> login with `creator` `123`
|
||||||
|
> select `decay start`
|
||||||
|
> press submit
|
||||||
|
> wait for at least 0.02 display of decay on user sender@user.net on old frontend, this should be aprox 10min
|
||||||
|
> chromium http://localhost:4000/graphql
|
||||||
|
> query{login(email: "sender@user.net", password: "123!AAAb"){pubkey}}
|
||||||
|
>> copy token from network tab (inspect)
|
||||||
|
> mutation{sendCoins(email: "receiver@user.net", amount: 10.0, memo: "Hier!")}
|
||||||
|
>> verify in `transaction_send_coins` that a decay was taken into account
|
||||||
|
>> same in `state_balances`
|
||||||
|
>> now check the old frontend
|
||||||
|
>>> sender@user.net should have a decay of 0.02
|
||||||
|
>>> while receiver@user.net should have zero decay on anything (old frontend)
|
||||||
|
|
||||||
|
### Export data
|
||||||
|
> docker-compose up phpmyadmin
|
||||||
|
> chromium http://localhost:8074/
|
||||||
|
> select gradido_community
|
||||||
|
> export
|
||||||
|
> select custom
|
||||||
|
> untick structure
|
||||||
|
> ok
|
||||||
|
|
||||||
|
## Results
|
||||||
|
NOTE: We decided not to write the `transaction_signatures` since its unused. This is the main difference.
|
||||||
|
NOTE: We fixed a bug in the `state_user_transactions code` with the new implementation of apollo
|
||||||
|
|
||||||
|
|
||||||
|
Master:
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Dumping data for table `state_user_transactions`
|
||||||
|
--
|
||||||
|
|
||||||
|
INSERT INTO `state_user_transactions` (`id`, `state_user_id`, `transaction_id`, `transaction_type_id`, `balance`, `balance_date`) VALUES
|
||||||
|
(1, 2, 1, 1, 10000000, '2021-11-05 12:45:18'),
|
||||||
|
(2, 2, 2, 2, 9900000, '2021-11-05 12:48:35'),
|
||||||
|
(3, 1, 2, 2, 100000, '2021-11-05 12:48:35'),
|
||||||
|
(4, 2, 3, 2, 9800000, '2021-11-05 12:49:07'),
|
||||||
|
(5, 1, 3, 2, 200000, '2021-11-05 12:49:07'),
|
||||||
|
(6, 2, 5, 2, 9699845, '2021-11-05 13:03:50'),
|
||||||
|
(7, 1, 5, 2, 99996, '2021-11-05 13:03:50');
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Dumping data for table `transactions`
|
||||||
|
--
|
||||||
|
|
||||||
|
INSERT INTO `transactions` (`id`, `state_group_id`, `transaction_type_id`, `tx_hash`, `memo`, `received`, `blockchain_type_id`) VALUES
|
||||||
|
(1, NULL, 1, 0x9ccdcd01ccb6320c09c2d1da2f0bf735a95ece0e7c1df6bbff51918fbaec061700000000000000000000000000000000, '', '2021-11-05 12:45:18', 1),
|
||||||
|
(2, NULL, 2, 0x58d7706a67fa4ff4b8038168c6be39a2963d7e28e9d3872759ad09c519fe093700000000000000000000000000000000, 'Hier!', '2021-11-05 12:48:35', 1),
|
||||||
|
(3, NULL, 2, 0x427cd214f92ef35af671129d50edc5a478c53d1e464f285b7615d9794a69f69b00000000000000000000000000000000, 'Hier!', '2021-11-05 12:49:07', 1),
|
||||||
|
(4, NULL, 9, 0x32807368f0906a21b94c072599795bc9eeab88fb565df82e85cc62a4fdcde48500000000000000000000000000000000, '', '2021-11-05 12:51:51', 1),
|
||||||
|
(5, NULL, 2, 0x75eb729e0f60a1c8cead1342955853d2440d7a2ea57dfef6d4a18bff0d94491e00000000000000000000000000000000, 'Hier!', '2021-11-05 13:03:50', 1);
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Dumping data for table `transaction_signatures`
|
||||||
|
--
|
||||||
|
|
||||||
|
INSERT INTO `transaction_signatures` (`id`, `transaction_id`, `signature`, `pubkey`) VALUES
|
||||||
|
(1, 1, 0x5888edcdcf77aaadad6d321882903bc831d7416f17213fd5020a764365b5fcb336e4c7917385a1278ea44ccdb31eac4a09e448053b5e3f8f1fe5da3baf53c008, 0xd5b20f8dee415038bfa2b6b0e1b40ff54850351109444863b04d6d28825b7b7d),
|
||||||
|
(2, 2, 0xf6fef428f8f22faf7090f7d740e6088d1d90c58ae92d757117d7d91d799e659f3a3a0c65a3fd97cbde798e761f9d23eff13e8810779a184c97c411f28e7c4608, 0xdc74a589004377ab14836dce68ce2ca34e5b17147cd78ad4b3afe8137524ae8a),
|
||||||
|
(3, 3, 0x8ebe9730c6cf61f56ef401d6f2bd229f3c298ca3c2791ee9137e4827b7af6c6d6566fca616eb1fe7adc2e4d56b5c7350ae3990c9905580630fa75ecffca8e001, 0xdc74a589004377ab14836dce68ce2ca34e5b17147cd78ad4b3afe8137524ae8a),
|
||||||
|
(4, 5, 0x50cf418f7e217391e89ab9c2879ae68d7c7c597d846b4fe1c082b5b16e5d0c85c328fbf48ad3490bcfe94f446700ae0a4b0190e76d26cc752abced58f480c80f, 0xdc74a589004377ab14836dce68ce2ca34e5b17147cd78ad4b3afe8137524ae8a);
|
||||||
|
|
||||||
|
This Feature Branch:
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Dumping data for table `state_user_transactions`
|
||||||
|
--
|
||||||
|
|
||||||
|
INSERT INTO `state_user_transactions` (`id`, `state_user_id`, `transaction_id`, `transaction_type_id`, `balance`, `balance_date`) VALUES
|
||||||
|
(1, 2, 1, 1, 10000000, '2021-11-05 00:25:46'),
|
||||||
|
(12, 2, 7, 2, 9900000, '2021-11-05 00:55:37'),
|
||||||
|
(13, 1, 7, 2, 100000, '2021-11-05 00:55:37'),
|
||||||
|
(14, 2, 8, 2, 9800000, '2021-11-05 01:00:04'),
|
||||||
|
(15, 1, 8, 2, 200000, '2021-11-05 01:00:04'),
|
||||||
|
(16, 2, 10, 2, 9699772, '2021-11-05 01:17:41'),
|
||||||
|
(17, 1, 10, 2, 299995, '2021-11-05 01:17:41');
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Dumping data for table `transactions`
|
||||||
|
--
|
||||||
|
|
||||||
|
INSERT INTO `transactions` (`id`, `state_group_id`, `transaction_type_id`, `tx_hash`, `memo`, `received`, `blockchain_type_id`) VALUES
|
||||||
|
(1, NULL, 1, 0xdd030d475479877587d927ed9024784ba62266cf1f3d87862fc98ad68f7b26e400000000000000000000000000000000, '', '2021-11-05 00:25:46', 1),
|
||||||
|
(7, NULL, 2, NULL, 'Hier!', '2021-11-05 00:55:37', 1),
|
||||||
|
(8, NULL, 2, NULL, 'Hier!', '2021-11-05 01:00:04', 1),
|
||||||
|
(9, NULL, 9, 0xb1cbedbf126aa35f5edbf06e181c415361d05228ab4da9d19a4595285a673dfa00000000000000000000000000000000, '', '2021-11-05 01:05:34', 1),
|
||||||
|
(10, NULL, 2, NULL, 'Hier!', '2021-11-05 01:17:41', 1);
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Dumping data for table `transaction_signatures`
|
||||||
|
--
|
||||||
|
|
||||||
|
INSERT INTO `transaction_signatures` (`id`, `transaction_id`, `signature`, `pubkey`) VALUES
|
||||||
|
(1, 1, 0x60d632479707e5d01cdc32c3326b5a5bae11173a0c06b719ee7b552f9fd644de1a0cd4afc207253329081d39dac1a63421f51571d836995c649fc39afac7480a, 0x48c45cb4fea925e83850f68f2fa8f27a1a4ed1bcba68cdb59fcd86adef3f52ee);
|
||||||
|
*/
|
||||||
|
|
||||||
|
const sendEMail = async (emailDef: any): Promise<boolean> => {
|
||||||
|
if (!CONFIG.EMAIL) {
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log('Emails are disabled via config')
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
const transporter = createTransport({
|
||||||
|
host: CONFIG.EMAIL_SMTP_URL,
|
||||||
|
port: Number(CONFIG.EMAIL_SMTP_PORT),
|
||||||
|
secure: false, // true for 465, false for other ports
|
||||||
|
requireTLS: true,
|
||||||
|
auth: {
|
||||||
|
user: CONFIG.EMAIL_USERNAME,
|
||||||
|
pass: CONFIG.EMAIL_PASSWORD,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
const info = await transporter.sendMail(emailDef)
|
||||||
|
if (!info.messageId) {
|
||||||
|
throw new Error('error sending notification email, but transaction succeed')
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
// Helper function
|
// Helper function
|
||||||
async function calculateAndAddDecayTransactions(
|
async function calculateAndAddDecayTransactions(
|
||||||
@ -210,6 +405,87 @@ async function listTransactions(
|
|||||||
return transactionList
|
return transactionList
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// helper helper function
|
||||||
|
async function updateStateBalance(
|
||||||
|
user: dbUser,
|
||||||
|
centAmount: number,
|
||||||
|
received: Date,
|
||||||
|
queryRunner: QueryRunner,
|
||||||
|
): Promise<dbBalance> {
|
||||||
|
const balanceRepository = getCustomRepository(BalanceRepository)
|
||||||
|
let balance = await balanceRepository.findByUser(user.id)
|
||||||
|
if (!balance) {
|
||||||
|
balance = new dbBalance()
|
||||||
|
balance.userId = user.id
|
||||||
|
balance.amount = centAmount
|
||||||
|
balance.modified = received
|
||||||
|
} else {
|
||||||
|
const decaiedBalance = await calculateDecay(balance.amount, balance.recordDate, received).catch(
|
||||||
|
() => {
|
||||||
|
throw new Error('error by calculating decay')
|
||||||
|
},
|
||||||
|
)
|
||||||
|
balance.amount = Number(decaiedBalance) + centAmount
|
||||||
|
balance.modified = new Date()
|
||||||
|
}
|
||||||
|
if (balance.amount <= 0) {
|
||||||
|
throw new Error('error new balance <= 0')
|
||||||
|
}
|
||||||
|
balance.recordDate = received
|
||||||
|
return queryRunner.manager.save(balance).catch((error) => {
|
||||||
|
throw new Error('error saving balance:' + error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// helper helper function
|
||||||
|
async function addUserTransaction(
|
||||||
|
user: dbUser,
|
||||||
|
transaction: dbTransaction,
|
||||||
|
centAmount: number,
|
||||||
|
queryRunner: QueryRunner,
|
||||||
|
): Promise<dbUserTransaction> {
|
||||||
|
let newBalance = centAmount
|
||||||
|
const userTransactionRepository = getCustomRepository(UserTransactionRepository)
|
||||||
|
const lastUserTransaction = await userTransactionRepository.findLastForUser(user.id)
|
||||||
|
if (lastUserTransaction) {
|
||||||
|
newBalance += Number(
|
||||||
|
await calculateDecay(
|
||||||
|
Number(lastUserTransaction.balance),
|
||||||
|
lastUserTransaction.balanceDate,
|
||||||
|
transaction.received,
|
||||||
|
).catch(() => {
|
||||||
|
throw new Error('error by calculating decay')
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newBalance <= 0) {
|
||||||
|
throw new Error('error new balance <= 0')
|
||||||
|
}
|
||||||
|
|
||||||
|
const newUserTransaction = new dbUserTransaction()
|
||||||
|
newUserTransaction.userId = user.id
|
||||||
|
newUserTransaction.transactionId = transaction.id
|
||||||
|
newUserTransaction.transactionTypeId = transaction.transactionTypeId
|
||||||
|
newUserTransaction.balance = newBalance
|
||||||
|
newUserTransaction.balanceDate = transaction.received
|
||||||
|
|
||||||
|
return queryRunner.manager.save(newUserTransaction).catch((error) => {
|
||||||
|
throw new Error('Error saving user transaction: ' + error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getPublicKey(email: string, sessionId: number): Promise<string | undefined> {
|
||||||
|
const result = await apiPost(CONFIG.LOGIN_API_URL + 'getUserInfos', {
|
||||||
|
session_id: sessionId,
|
||||||
|
email,
|
||||||
|
ask: ['user.pubkeyhex'],
|
||||||
|
})
|
||||||
|
if (result.success) {
|
||||||
|
return result.data.userData.pubkeyhex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Resolver()
|
@Resolver()
|
||||||
export class TransactionResolver {
|
export class TransactionResolver {
|
||||||
@Authorized()
|
@Authorized()
|
||||||
@ -252,19 +528,147 @@ export class TransactionResolver {
|
|||||||
@Args() { email, amount, memo }: TransactionSendArgs,
|
@Args() { email, amount, memo }: TransactionSendArgs,
|
||||||
@Ctx() context: any,
|
@Ctx() context: any,
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const payload = {
|
// TODO this is subject to replay attacks
|
||||||
session_id: context.sessionId,
|
// validate sender user (logged in)
|
||||||
target_email: email,
|
const userRepository = getCustomRepository(UserRepository)
|
||||||
amount: amount * 10000,
|
const senderUser = await userRepository.findByPubkeyHex(context.pubKey)
|
||||||
memo,
|
if (senderUser.pubkey.length !== 32) {
|
||||||
auto_sign: true,
|
throw new Error('invalid sender public key')
|
||||||
transaction_type: 'transfer',
|
|
||||||
blockchain_type: 'mysql',
|
|
||||||
}
|
}
|
||||||
const result = await apiPost(CONFIG.LOGIN_API_URL + 'createTransaction', payload)
|
if (!hasUserAmount(senderUser, amount)) {
|
||||||
if (!result.success) {
|
throw new Error("user hasn't enough GDD")
|
||||||
throw new Error(result.data)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// validate recipient user
|
||||||
|
// TODO: the detour over the public key is unnecessary
|
||||||
|
const recipiantPublicKey = await getPublicKey(email, context.sessionId)
|
||||||
|
if (!recipiantPublicKey) {
|
||||||
|
throw new Error('recipiant not known')
|
||||||
|
}
|
||||||
|
if (!isHexPublicKey(recipiantPublicKey)) {
|
||||||
|
throw new Error('invalid recipiant public key')
|
||||||
|
}
|
||||||
|
const recipiantUser = await userRepository.findByPubkeyHex(recipiantPublicKey)
|
||||||
|
if (!recipiantUser) {
|
||||||
|
throw new Error('Cannot find recipiant user by local send coins transaction')
|
||||||
|
} else if (recipiantUser.disabled) {
|
||||||
|
throw new Error('recipiant user account is disabled')
|
||||||
|
}
|
||||||
|
|
||||||
|
// validate amount
|
||||||
|
if (amount <= 0) {
|
||||||
|
throw new Error('invalid amount')
|
||||||
|
}
|
||||||
|
|
||||||
|
const centAmount = Math.trunc(amount * 10000)
|
||||||
|
|
||||||
|
const queryRunner = getConnection().createQueryRunner()
|
||||||
|
await queryRunner.connect()
|
||||||
|
await queryRunner.startTransaction('READ UNCOMMITTED')
|
||||||
|
try {
|
||||||
|
// transaction
|
||||||
|
let transaction = new dbTransaction()
|
||||||
|
transaction.transactionTypeId = TransactionTypeId.SEND
|
||||||
|
transaction.memo = memo
|
||||||
|
|
||||||
|
// TODO: NO! this is problematic in its construction
|
||||||
|
const insertResult = await queryRunner.manager.insert(dbTransaction, transaction)
|
||||||
|
transaction = await queryRunner.manager
|
||||||
|
.findOneOrFail(dbTransaction, insertResult.generatedMaps[0].id)
|
||||||
|
.catch((error) => {
|
||||||
|
throw new Error('error loading saved transaction: ' + error)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Insert Transaction: sender - amount
|
||||||
|
const senderUserTransactionBalance = await addUserTransaction(
|
||||||
|
senderUser,
|
||||||
|
transaction,
|
||||||
|
-centAmount,
|
||||||
|
queryRunner,
|
||||||
|
)
|
||||||
|
// Insert Transaction: recipient + amount
|
||||||
|
const recipiantUserTransactionBalance = await addUserTransaction(
|
||||||
|
recipiantUser,
|
||||||
|
transaction,
|
||||||
|
centAmount,
|
||||||
|
queryRunner,
|
||||||
|
)
|
||||||
|
|
||||||
|
// Update Balance: sender - amount
|
||||||
|
const senderStateBalance = await updateStateBalance(
|
||||||
|
senderUser,
|
||||||
|
-centAmount,
|
||||||
|
transaction.received,
|
||||||
|
queryRunner,
|
||||||
|
)
|
||||||
|
// Update Balance: recipiant + amount
|
||||||
|
const recipiantStateBalance = await updateStateBalance(
|
||||||
|
recipiantUser,
|
||||||
|
centAmount,
|
||||||
|
transaction.received,
|
||||||
|
queryRunner,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (senderStateBalance.amount !== senderUserTransactionBalance.balance) {
|
||||||
|
throw new Error('db data corrupted, sender')
|
||||||
|
}
|
||||||
|
if (recipiantStateBalance.amount !== recipiantUserTransactionBalance.balance) {
|
||||||
|
throw new Error('db data corrupted, recipiant')
|
||||||
|
}
|
||||||
|
|
||||||
|
// transactionSendCoin
|
||||||
|
const transactionSendCoin = new dbTransactionSendCoin()
|
||||||
|
transactionSendCoin.transactionId = transaction.id
|
||||||
|
transactionSendCoin.userId = senderUser.id
|
||||||
|
transactionSendCoin.senderPublic = senderUser.pubkey
|
||||||
|
transactionSendCoin.recipiantUserId = recipiantUser.id
|
||||||
|
transactionSendCoin.recipiantPublic = Buffer.from(fromHex(recipiantPublicKey))
|
||||||
|
transactionSendCoin.amount = centAmount
|
||||||
|
transactionSendCoin.senderFinalBalance = senderStateBalance.amount
|
||||||
|
await queryRunner.manager.save(transactionSendCoin).catch((error) => {
|
||||||
|
throw new Error('error saving transaction send coin: ' + error)
|
||||||
|
})
|
||||||
|
|
||||||
|
await queryRunner.manager.save(transaction).catch((error) => {
|
||||||
|
throw new Error('error saving transaction with tx hash: ' + error)
|
||||||
|
})
|
||||||
|
|
||||||
|
await queryRunner.commitTransaction()
|
||||||
|
} catch (e) {
|
||||||
|
await queryRunner.rollbackTransaction()
|
||||||
|
throw e
|
||||||
|
} finally {
|
||||||
|
await queryRunner.release()
|
||||||
|
// TODO: This is broken code - we should never correct an autoincrement index in production
|
||||||
|
// according to dario it is required tho to properly work. The index of the table is used as
|
||||||
|
// index for the transaction which requires a chain without gaps
|
||||||
|
const count = await queryRunner.manager.count(dbTransaction)
|
||||||
|
// fix autoincrement value which seems not effected from rollback
|
||||||
|
await queryRunner
|
||||||
|
.query('ALTER TABLE `transactions` auto_increment = ?', [count])
|
||||||
|
.catch((error) => {
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log('problems with reset auto increment: %o', error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// send notification email
|
||||||
|
// TODO: translate
|
||||||
|
await sendEMail({
|
||||||
|
from: 'Gradido (nicht antworten) <' + CONFIG.EMAIL_SENDER + '>',
|
||||||
|
to: recipiantUser.firstName + ' ' + recipiantUser.lastName + ' <' + recipiantUser.email + '>',
|
||||||
|
subject: 'Gradido Überweisung',
|
||||||
|
text: `Hallo ${recipiantUser.firstName} ${recipiantUser.lastName}
|
||||||
|
|
||||||
|
Du hast soeben ${amount} GDD von ${senderUser.firstName} ${senderUser.lastName} erhalten.
|
||||||
|
${senderUser.firstName} ${senderUser.lastName} schreibt:
|
||||||
|
|
||||||
|
${memo}
|
||||||
|
|
||||||
|
Bitte antworte nicht auf diese E-Mail!
|
||||||
|
|
||||||
|
Mit freundlichen Grüßen Gradido Community Server`,
|
||||||
|
})
|
||||||
|
|
||||||
return 'success'
|
return 'success'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
import { Resolver, Query, Args, Arg, Authorized, Ctx, UseMiddleware, Mutation } from 'type-graphql'
|
import { Resolver, Query, Args, Arg, Authorized, Ctx, UseMiddleware, Mutation } from 'type-graphql'
|
||||||
import { from_hex as fromHex } from 'libsodium-wrappers'
|
import { from_hex as fromHex } from 'libsodium-wrappers'
|
||||||
|
import { getCustomRepository } from 'typeorm'
|
||||||
import CONFIG from '../../config'
|
import CONFIG from '../../config'
|
||||||
import { CheckUsernameResponse } from '../model/CheckUsernameResponse'
|
|
||||||
import { LoginViaVerificationCode } from '../model/LoginViaVerificationCode'
|
import { LoginViaVerificationCode } from '../model/LoginViaVerificationCode'
|
||||||
import { SendPasswordResetEmailResponse } from '../model/SendPasswordResetEmailResponse'
|
import { SendPasswordResetEmailResponse } from '../model/SendPasswordResetEmailResponse'
|
||||||
import { UpdateUserInfosResponse } from '../model/UpdateUserInfosResponse'
|
import { UpdateUserInfosResponse } from '../model/UpdateUserInfosResponse'
|
||||||
@ -22,10 +22,10 @@ import {
|
|||||||
klicktippNewsletterStateMiddleware,
|
klicktippNewsletterStateMiddleware,
|
||||||
} from '../../middleware/klicktippMiddleware'
|
} from '../../middleware/klicktippMiddleware'
|
||||||
import { CheckEmailResponse } from '../model/CheckEmailResponse'
|
import { CheckEmailResponse } from '../model/CheckEmailResponse'
|
||||||
import { getCustomRepository } from 'typeorm'
|
|
||||||
import { UserSettingRepository } from '../../typeorm/repository/UserSettingRepository'
|
import { UserSettingRepository } from '../../typeorm/repository/UserSettingRepository'
|
||||||
import { Setting } from '../enum/Setting'
|
import { Setting } from '../enum/Setting'
|
||||||
import { UserRepository } from '../../typeorm/repository/User'
|
import { UserRepository } from '../../typeorm/repository/User'
|
||||||
|
import { LoginUser } from '@entity/LoginUser'
|
||||||
|
|
||||||
@Resolver()
|
@Resolver()
|
||||||
export class UserResolver {
|
export class UserResolver {
|
||||||
@ -64,7 +64,7 @@ export class UserResolver {
|
|||||||
userEntity.email = user.email
|
userEntity.email = user.email
|
||||||
userEntity.pubkey = Buffer.from(fromHex(user.pubkey))
|
userEntity.pubkey = Buffer.from(fromHex(user.pubkey))
|
||||||
|
|
||||||
userEntity.save().catch(() => {
|
userRepository.save(userEntity).catch(() => {
|
||||||
throw new Error('error by save userEntity')
|
throw new Error('error by save userEntity')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -144,7 +144,8 @@ export class UserResolver {
|
|||||||
dbuser.lastName = user.lastName
|
dbuser.lastName = user.lastName
|
||||||
dbuser.username = user.username
|
dbuser.username = user.username
|
||||||
|
|
||||||
dbuser.save().catch(() => {
|
const userRepository = getCustomRepository(UserRepository)
|
||||||
|
userRepository.save(dbuser).catch(() => {
|
||||||
throw new Error('error saving user')
|
throw new Error('error saving user')
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -246,7 +247,7 @@ export class UserResolver {
|
|||||||
userEntityChanged = true
|
userEntityChanged = true
|
||||||
}
|
}
|
||||||
if (userEntityChanged) {
|
if (userEntityChanged) {
|
||||||
userEntity.save().catch((error) => {
|
userRepository.save(userEntity).catch((error) => {
|
||||||
throw new Error(error)
|
throw new Error(error)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -275,15 +276,27 @@ export class UserResolver {
|
|||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
@Query(() => CheckUsernameResponse)
|
@Query(() => Boolean)
|
||||||
async checkUsername(
|
async checkUsername(@Args() { username }: CheckUsernameArgs): Promise<boolean> {
|
||||||
@Args() { username, groupId = 1 }: CheckUsernameArgs,
|
// Username empty?
|
||||||
): Promise<CheckUsernameResponse> {
|
if (username === '') {
|
||||||
const response = await apiGet(
|
throw new Error('Username must be set.')
|
||||||
CONFIG.LOGIN_API_URL + `checkUsername?username=${username}&group_id=${groupId}`,
|
}
|
||||||
)
|
|
||||||
if (!response.success) throw new Error(response.data)
|
// Do we fullfil the minimum character length?
|
||||||
return new CheckUsernameResponse(response.data)
|
const MIN_CHARACTERS_USERNAME = 2
|
||||||
|
if (username.length < MIN_CHARACTERS_USERNAME) {
|
||||||
|
throw new Error(`Username must be at minimum ${MIN_CHARACTERS_USERNAME} characters long.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const usersFound = await LoginUser.count({ username })
|
||||||
|
|
||||||
|
// Username already present?
|
||||||
|
if (usersFound !== 0) {
|
||||||
|
throw new Error(`Username "${username}" already taken.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@Query(() => CheckEmailResponse)
|
@Query(() => CheckEmailResponse)
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import isAuthorized from './directive/isAuthorized'
|
|||||||
|
|
||||||
const schema = async (): Promise<GraphQLSchema> => {
|
const schema = async (): Promise<GraphQLSchema> => {
|
||||||
return buildSchema({
|
return buildSchema({
|
||||||
resolvers: [path.join(__dirname, 'resolver', `*.{js,ts}`)],
|
resolvers: [path.join(__dirname, 'resolver', `!(*.test).{js,ts}`)],
|
||||||
authChecker: isAuthorized,
|
authChecker: isAuthorized,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,64 +1,14 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
|
||||||
import 'reflect-metadata'
|
import createServer from './server/createServer'
|
||||||
import 'module-alias/register'
|
|
||||||
import express from 'express'
|
|
||||||
import { ApolloServer } from 'apollo-server-express'
|
|
||||||
|
|
||||||
// config
|
// config
|
||||||
import CONFIG from './config'
|
import CONFIG from './config'
|
||||||
|
|
||||||
// database
|
|
||||||
import connection from './typeorm/connection'
|
|
||||||
import getDBVersion from './typeorm/getDBVersion'
|
|
||||||
|
|
||||||
// server
|
|
||||||
import cors from './server/cors'
|
|
||||||
import context from './server/context'
|
|
||||||
import plugins from './server/plugins'
|
|
||||||
|
|
||||||
// graphql
|
|
||||||
import schema from './graphql/schema'
|
|
||||||
|
|
||||||
// TODO implement
|
|
||||||
// import queryComplexity, { simpleEstimator, fieldConfigEstimator } from "graphql-query-complexity";
|
|
||||||
|
|
||||||
const DB_VERSION = '0002-add_settings'
|
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
// open mysql connection
|
const { app } = await createServer()
|
||||||
const con = await connection()
|
|
||||||
if (!con || !con.isConnected) {
|
|
||||||
throw new Error(`Couldn't open connection to database`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// check for correct database version
|
app.listen(CONFIG.PORT, () => {
|
||||||
const dbVersion = await getDBVersion()
|
|
||||||
if (!dbVersion || dbVersion.indexOf(DB_VERSION) === -1) {
|
|
||||||
throw new Error(
|
|
||||||
`Wrong database version - the backend requires '${DB_VERSION}' but found '${
|
|
||||||
dbVersion || 'None'
|
|
||||||
}'`,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Express Server
|
|
||||||
const server = express()
|
|
||||||
|
|
||||||
// cors
|
|
||||||
server.use(cors)
|
|
||||||
|
|
||||||
// Apollo Server
|
|
||||||
const apollo = new ApolloServer({
|
|
||||||
schema: await schema(),
|
|
||||||
playground: CONFIG.GRAPHIQL,
|
|
||||||
context,
|
|
||||||
plugins,
|
|
||||||
})
|
|
||||||
apollo.applyMiddleware({ app: server })
|
|
||||||
|
|
||||||
// Start Server
|
|
||||||
server.listen(CONFIG.PORT, () => {
|
|
||||||
// eslint-disable-next-line no-console
|
// eslint-disable-next-line no-console
|
||||||
console.log(`Server is running at http://localhost:${CONFIG.PORT}`)
|
console.log(`Server is running at http://localhost:${CONFIG.PORT}`)
|
||||||
if (CONFIG.GRAPHIQL) {
|
if (CONFIG.GRAPHIQL) {
|
||||||
|
|||||||
64
backend/src/server/createServer.ts
Normal file
64
backend/src/server/createServer.ts
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
||||||
|
|
||||||
|
import 'reflect-metadata'
|
||||||
|
import 'module-alias/register'
|
||||||
|
|
||||||
|
import { ApolloServer } from 'apollo-server-express'
|
||||||
|
import express from 'express'
|
||||||
|
|
||||||
|
// database
|
||||||
|
import connection from '../typeorm/connection'
|
||||||
|
import getDBVersion from '../typeorm/getDBVersion'
|
||||||
|
|
||||||
|
// server
|
||||||
|
import cors from './cors'
|
||||||
|
import serverContext from './context'
|
||||||
|
import plugins from './plugins'
|
||||||
|
|
||||||
|
// config
|
||||||
|
import CONFIG from '../config'
|
||||||
|
|
||||||
|
// graphql
|
||||||
|
import schema from '../graphql/schema'
|
||||||
|
|
||||||
|
// TODO implement
|
||||||
|
// import queryComplexity, { simpleEstimator, fieldConfigEstimator } from "graphql-query-complexity";
|
||||||
|
|
||||||
|
const DB_VERSION = '0004-login_server_data'
|
||||||
|
|
||||||
|
const createServer = async (context: any = serverContext): Promise<any> => {
|
||||||
|
// open mysql connection
|
||||||
|
const con = await connection()
|
||||||
|
if (!con || !con.isConnected) {
|
||||||
|
throw new Error(`Couldn't open connection to database`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// check for correct database version
|
||||||
|
const dbVersion = await getDBVersion()
|
||||||
|
if (!dbVersion || dbVersion.indexOf(DB_VERSION) === -1) {
|
||||||
|
throw new Error(
|
||||||
|
`Wrong database version - the backend requires '${DB_VERSION}' but found '${
|
||||||
|
dbVersion || 'None'
|
||||||
|
}'`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Express Server
|
||||||
|
const app = express()
|
||||||
|
|
||||||
|
// cors
|
||||||
|
app.use(cors)
|
||||||
|
|
||||||
|
// Apollo Server
|
||||||
|
const apollo = new ApolloServer({
|
||||||
|
schema: await schema(),
|
||||||
|
playground: CONFIG.GRAPHIQL,
|
||||||
|
context,
|
||||||
|
plugins,
|
||||||
|
})
|
||||||
|
apollo.applyMiddleware({ app })
|
||||||
|
return { apollo, app, con }
|
||||||
|
}
|
||||||
|
|
||||||
|
export default createServer
|
||||||
@ -17,4 +17,11 @@ export class UserTransactionRepository extends Repository<UserTransaction> {
|
|||||||
.offset(offset)
|
.offset(offset)
|
||||||
.getManyAndCount()
|
.getManyAndCount()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
findLastForUser(userId: number): Promise<UserTransaction | undefined> {
|
||||||
|
return this.createQueryBuilder('userTransaction')
|
||||||
|
.where('userTransaction.userId = :userId', { userId })
|
||||||
|
.orderBy('userTransaction.transactionId', 'DESC')
|
||||||
|
.getOne()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,24 +2,25 @@ import { decayFormula, calculateDecay } from './decay'
|
|||||||
|
|
||||||
describe('utils/decay', () => {
|
describe('utils/decay', () => {
|
||||||
describe('decayFormula', () => {
|
describe('decayFormula', () => {
|
||||||
it('has base 0.99999997802044727', async () => {
|
it('has base 0.99999997802044727', () => {
|
||||||
const amount = 1.0
|
const amount = 1.0
|
||||||
const seconds = 1
|
const seconds = 1
|
||||||
expect(await decayFormula(amount, seconds)).toBe(0.99999997802044727)
|
expect(decayFormula(amount, seconds)).toBe(0.99999997802044727)
|
||||||
})
|
})
|
||||||
// Not sure if the following skiped tests make sence!?
|
// Not sure if the following skiped tests make sence!?
|
||||||
it.skip('has negative decay?', async () => {
|
it('has negative decay?', async () => {
|
||||||
const amount = -1.0
|
const amount = -1.0
|
||||||
const seconds = 1
|
const seconds = 1
|
||||||
expect(await decayFormula(amount, seconds)).toBe(-0.99999997802044727)
|
expect(await decayFormula(amount, seconds)).toBe(-0.99999997802044727)
|
||||||
})
|
})
|
||||||
it.skip('has correct backward calculation', async () => {
|
it('has correct backward calculation', async () => {
|
||||||
const amount = 1.0
|
const amount = 1.0
|
||||||
const seconds = -1
|
const seconds = -1
|
||||||
expect(await decayFormula(amount, seconds)).toBe(1.0000000219795533)
|
expect(await decayFormula(amount, seconds)).toBe(1.0000000219795533)
|
||||||
})
|
})
|
||||||
it.skip('has correct forward calculation', async () => {
|
// not possible, nodejs hasn't enough accuracy
|
||||||
const amount = 1.000000219795533
|
it('has correct forward calculation', async () => {
|
||||||
|
const amount = 1.0 / 0.99999997802044727
|
||||||
const seconds = 1
|
const seconds = 1
|
||||||
expect(await decayFormula(amount, seconds)).toBe(1.0)
|
expect(await decayFormula(amount, seconds)).toBe(1.0)
|
||||||
})
|
})
|
||||||
@ -32,7 +33,7 @@ describe('utils/decay', () => {
|
|||||||
expect(await calculateDecay(1.0, oneSecondAgo, now)).toBe(0.99999997802044727)
|
expect(await calculateDecay(1.0, oneSecondAgo, now)).toBe(0.99999997802044727)
|
||||||
})
|
})
|
||||||
|
|
||||||
it.skip('returns input amount when from and to is the same', async () => {
|
it('returns input amount when from and to is the same', async () => {
|
||||||
const now = new Date()
|
const now = new Date()
|
||||||
expect(await calculateDecay(100.0, now, now)).toBe(100.0)
|
expect(await calculateDecay(100.0, now, now)).toBe(100.0)
|
||||||
})
|
})
|
||||||
|
|||||||
@ -7,6 +7,15 @@ function decayFormula(amount: number, seconds: number): number {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function calculateDecay(amount: number, from: Date, to: Date): Promise<number> {
|
async function calculateDecay(amount: number, from: Date, to: Date): Promise<number> {
|
||||||
|
if (amount === undefined || !from || !to) {
|
||||||
|
throw new Error('at least one parameter is undefined')
|
||||||
|
}
|
||||||
|
if (from === to) {
|
||||||
|
return amount
|
||||||
|
}
|
||||||
|
if (to < from) {
|
||||||
|
throw new Error('to < from, so the target date is in the past?')
|
||||||
|
}
|
||||||
// load decay start block
|
// load decay start block
|
||||||
const transactionRepository = getCustomRepository(TransactionRepository)
|
const transactionRepository = getCustomRepository(TransactionRepository)
|
||||||
const decayStartBlock = await transactionRepository.findDecayStartBlock()
|
const decayStartBlock = await transactionRepository.findDecayStartBlock()
|
||||||
|
|||||||
22
backend/src/util/round.test.ts
Normal file
22
backend/src/util/round.test.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import { roundCeilFrom4, roundFloorFrom4, roundCeilFrom2, roundFloorFrom2 } from './round'
|
||||||
|
|
||||||
|
describe('utils/round', () => {
|
||||||
|
it('roundCeilFrom4', () => {
|
||||||
|
const amount = 11617
|
||||||
|
expect(roundCeilFrom4(amount)).toBe(1.17)
|
||||||
|
})
|
||||||
|
// Not sure if the following skiped tests make sence!?
|
||||||
|
it('roundFloorFrom4', () => {
|
||||||
|
const amount = 11617
|
||||||
|
expect(roundFloorFrom4(amount)).toBe(1.16)
|
||||||
|
})
|
||||||
|
it('roundCeilFrom2', () => {
|
||||||
|
const amount = 1216
|
||||||
|
expect(roundCeilFrom2(amount)).toBe(13)
|
||||||
|
})
|
||||||
|
// not possible, nodejs hasn't enough accuracy
|
||||||
|
it('roundFloorFrom2', () => {
|
||||||
|
const amount = 1216
|
||||||
|
expect(roundFloorFrom2(amount)).toBe(12)
|
||||||
|
})
|
||||||
|
})
|
||||||
@ -1,3 +1,8 @@
|
|||||||
|
import { User as dbUser } from '@entity/User'
|
||||||
|
import { Balance as dbBalance } from '@entity/Balance'
|
||||||
|
import { getRepository } from 'typeorm'
|
||||||
|
import { calculateDecay } from './decay'
|
||||||
|
|
||||||
function isStringBoolean(value: string): boolean {
|
function isStringBoolean(value: string): boolean {
|
||||||
const lowerValue = value.toLowerCase()
|
const lowerValue = value.toLowerCase()
|
||||||
if (lowerValue === 'true' || lowerValue === 'false') {
|
if (lowerValue === 'true' || lowerValue === 'false') {
|
||||||
@ -6,4 +11,18 @@ function isStringBoolean(value: string): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
export { isStringBoolean }
|
function isHexPublicKey(publicKey: string): boolean {
|
||||||
|
return /^[0-9A-Fa-f]{64}$/i.test(publicKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function hasUserAmount(user: dbUser, amount: number): Promise<boolean> {
|
||||||
|
if (amount < 0) return false
|
||||||
|
const balanceRepository = getRepository(dbBalance)
|
||||||
|
const balance = await balanceRepository.findOne({ userId: user.id })
|
||||||
|
if (!balance) return false
|
||||||
|
|
||||||
|
const decay = await calculateDecay(balance.amount, balance.recordDate, new Date())
|
||||||
|
return decay > amount
|
||||||
|
}
|
||||||
|
|
||||||
|
export { isHexPublicKey, hasUserAmount, isStringBoolean }
|
||||||
|
|||||||
2644
backend/yarn.lock
2644
backend/yarn.lock
File diff suppressed because it is too large
Load Diff
@ -15,7 +15,8 @@ class ServerUsersController extends AppController
|
|||||||
public function initialize()
|
public function initialize()
|
||||||
{
|
{
|
||||||
parent::initialize();
|
parent::initialize();
|
||||||
$this->Auth->allow(['add', 'edit']);
|
// uncomment in devmode to add new community server admin user, but don't!!! commit it
|
||||||
|
//$this->Auth->allow(['add', 'edit']);
|
||||||
$this->Auth->deny('index');
|
$this->Auth->deny('index');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -17,7 +17,7 @@ phpServer.host = nginx
|
|||||||
loginServer.path = http://localhost/account
|
loginServer.path = http://localhost/account
|
||||||
loginServer.default_locale = de
|
loginServer.default_locale = de
|
||||||
loginServer.db.host = mariadb
|
loginServer.db.host = mariadb
|
||||||
loginServer.db.name = gradido_login
|
loginServer.db.name = gradido_community
|
||||||
loginServer.db.user = root
|
loginServer.db.user = root
|
||||||
loginServer.db.password =
|
loginServer.db.password =
|
||||||
loginServer.db.port = 3306
|
loginServer.db.port = 3306
|
||||||
|
|||||||
@ -17,7 +17,7 @@ phpServer.host = nginx
|
|||||||
loginServer.path = http://localhost/account
|
loginServer.path = http://localhost/account
|
||||||
loginServer.default_locale = de
|
loginServer.default_locale = de
|
||||||
loginServer.db.host = mariadb
|
loginServer.db.host = mariadb
|
||||||
loginServer.db.name = gradido_login_test
|
loginServer.db.name = gradido_community_test
|
||||||
loginServer.db.user = root
|
loginServer.db.user = root
|
||||||
loginServer.db.password =
|
loginServer.db.password =
|
||||||
loginServer.db.port = 3306
|
loginServer.db.port = 3306
|
||||||
|
|||||||
@ -1,6 +0,0 @@
|
|||||||
INSERT INTO `groups` (`id`, `alias`, `name`, `url`, `host`, `home`, `description`) VALUES
|
|
||||||
(1, 'docker', 'docker gradido group', 'localhost', 'nginx', '/', 'gradido test group for docker and stage2 with blockchain db');
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -24,6 +24,9 @@ export class TransactionSendCoin extends BaseEntity {
|
|||||||
@Column()
|
@Column()
|
||||||
amount: number
|
amount: number
|
||||||
|
|
||||||
|
@Column({ name: 'sender_final_balance' })
|
||||||
|
senderFinalBalance: number
|
||||||
|
|
||||||
@OneToOne(() => Transaction)
|
@OneToOne(() => Transaction)
|
||||||
@JoinColumn({ name: 'transaction_id' })
|
@JoinColumn({ name: 'transaction_id' })
|
||||||
transaction: Transaction
|
transaction: Transaction
|
||||||
|
|||||||
56
database/entity/0003-login_server_tables/LoginUser.ts
Normal file
56
database/entity/0003-login_server_tables/LoginUser.ts
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
import { BaseEntity, Entity, PrimaryGeneratedColumn, Column } from 'typeorm'
|
||||||
|
|
||||||
|
// Moriz: I do not like the idea of having two user tables
|
||||||
|
@Entity('login_users')
|
||||||
|
export class LoginUser extends BaseEntity {
|
||||||
|
@PrimaryGeneratedColumn('increment', { unsigned: true })
|
||||||
|
id: number
|
||||||
|
|
||||||
|
@Column({ length: 191, unique: true })
|
||||||
|
email: string
|
||||||
|
|
||||||
|
@Column({ name: 'first_name', length: 150 })
|
||||||
|
firstName: string
|
||||||
|
|
||||||
|
@Column({ name: 'last_name', length: 255, default: '' })
|
||||||
|
lastName: string
|
||||||
|
|
||||||
|
@Column({ length: 255, default: '' })
|
||||||
|
username: string
|
||||||
|
|
||||||
|
@Column({ default: '' })
|
||||||
|
description: string
|
||||||
|
|
||||||
|
@Column({ type: 'bigint', default: 0, unsigned: true })
|
||||||
|
password: string
|
||||||
|
|
||||||
|
@Column({ name: 'pubkey', type: 'binary', length: 32, default: null, nullable: true })
|
||||||
|
pubKey: Buffer
|
||||||
|
|
||||||
|
@Column({ name: 'privkey', type: 'binary', length: 80, default: null, nullable: true })
|
||||||
|
privKey: Buffer
|
||||||
|
|
||||||
|
@Column({ name: 'email_hash', type: 'binary', length: 32, default: null, nullable: true })
|
||||||
|
emailHash: Buffer
|
||||||
|
|
||||||
|
@Column({ name: 'created', default: () => 'CURRENT_TIMESTAMP' })
|
||||||
|
createdAt: Date
|
||||||
|
|
||||||
|
@Column({ name: 'email_checked', default: 0 })
|
||||||
|
emailChecked: boolean
|
||||||
|
|
||||||
|
@Column({ name: 'passphrase_shown', default: 0 })
|
||||||
|
passphraseShown: boolean
|
||||||
|
|
||||||
|
@Column({ length: 4, default: 'de' })
|
||||||
|
language: string
|
||||||
|
|
||||||
|
@Column({ default: 0 })
|
||||||
|
disabled: boolean
|
||||||
|
|
||||||
|
@Column({ name: 'group_id', default: 0, unsigned: true })
|
||||||
|
groupId: number
|
||||||
|
|
||||||
|
@Column({ name: 'publisher_id', default: 0 })
|
||||||
|
publisherId: number
|
||||||
|
}
|
||||||
1
database/entity/LoginUser.ts
Normal file
1
database/entity/LoginUser.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export { LoginUser } from './0003-login_server_tables/LoginUser'
|
||||||
@ -1,4 +1,5 @@
|
|||||||
import { Balance } from './Balance'
|
import { Balance } from './Balance'
|
||||||
|
import { LoginUser } from './LoginUser'
|
||||||
import { Migration } from './Migration'
|
import { Migration } from './Migration'
|
||||||
import { Transaction } from './Transaction'
|
import { Transaction } from './Transaction'
|
||||||
import { TransactionCreation } from './TransactionCreation'
|
import { TransactionCreation } from './TransactionCreation'
|
||||||
@ -9,6 +10,7 @@ import { UserTransaction } from './UserTransaction'
|
|||||||
|
|
||||||
export const entities = [
|
export const entities = [
|
||||||
Balance,
|
Balance,
|
||||||
|
LoginUser,
|
||||||
Migration,
|
Migration,
|
||||||
Transaction,
|
Transaction,
|
||||||
TransactionCreation,
|
TransactionCreation,
|
||||||
|
|||||||
@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
await queryFn(`
|
await queryFn(`
|
||||||
CREATE TABLE IF NOT EXISTS \`user_setting\` (
|
CREATE TABLE \`user_setting\` (
|
||||||
\`id\` int(10) unsigned NOT NULL AUTO_INCREMENT,
|
\`id\` int(10) unsigned NOT NULL AUTO_INCREMENT,
|
||||||
\`userId\` int(11) NOT NULL,
|
\`userId\` int(11) NOT NULL,
|
||||||
\`key\` varchar(255) NOT NULL,
|
\`key\` varchar(255) NOT NULL,
|
||||||
@ -22,5 +22,5 @@ export async function upgrade(queryFn: (query: string, values?: any[]) => Promis
|
|||||||
|
|
||||||
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
// write downgrade logic as parameter of queryFn
|
// write downgrade logic as parameter of queryFn
|
||||||
await queryFn(`DROP TABLE IF EXISTS \`user_setting\`;`)
|
await queryFn(`DROP TABLE \`user_setting\`;`)
|
||||||
}
|
}
|
||||||
|
|||||||
153
database/migrations/0003-login_server_tables.ts
Normal file
153
database/migrations/0003-login_server_tables.ts
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
/* FIRST MIGRATION
|
||||||
|
*
|
||||||
|
* This migration is special since it takes into account that
|
||||||
|
* the database can be setup already but also may not be.
|
||||||
|
* Therefore you will find all `CREATE TABLE` statements with
|
||||||
|
* a `IF NOT EXISTS`, all `INSERT` with an `IGNORE` and in the
|
||||||
|
* downgrade function all `DROP TABLE` with a `IF EXISTS`.
|
||||||
|
* This ensures compatibility for existing or non-existing
|
||||||
|
* databases.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_app_access_tokens\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int NOT NULL,
|
||||||
|
\`access_code\` bigint unsigned NOT NULL,
|
||||||
|
\`created\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
\`updated\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
PRIMARY KEY (\`id\`),
|
||||||
|
UNIQUE KEY \`access_code\` (\`access_code\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_elopage_buys\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`elopage_user_id\` int DEFAULT NULL,
|
||||||
|
\`affiliate_program_id\` int NOT NULL,
|
||||||
|
\`publisher_id\` int NOT NULL,
|
||||||
|
\`order_id\` int NOT NULL,
|
||||||
|
\`product_id\` int NOT NULL,
|
||||||
|
\`product_price\` int NOT NULL,
|
||||||
|
\`payer_email\` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
|
||||||
|
\`publisher_email\` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
|
||||||
|
\`payed\` tinyint NOT NULL,
|
||||||
|
\`success_date\` datetime NOT NULL,
|
||||||
|
\`event\` varchar(255) NOT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_email_opt_in_types\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`name\` varchar(255) NOT NULL,
|
||||||
|
\`description\` varchar(255) NOT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_email_opt_in\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int NOT NULL,
|
||||||
|
\`verification_code\` bigint unsigned NOT NULL,
|
||||||
|
\`email_opt_in_type_id\` int NOT NULL,
|
||||||
|
\`created\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
\`resend_count\` int DEFAULT '0',
|
||||||
|
\`updated\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
PRIMARY KEY (\`id\`),
|
||||||
|
UNIQUE KEY \`verification_code\` (\`verification_code\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_groups\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`alias\` varchar(190) NOT NULL,
|
||||||
|
\`name\` varchar(255) NOT NULL,
|
||||||
|
\`url\` varchar(255) NOT NULL,
|
||||||
|
\`host\` varchar(255) DEFAULT "/",
|
||||||
|
\`home\` varchar(255) DEFAULT "/",
|
||||||
|
\`description\` text,
|
||||||
|
PRIMARY KEY (\`id\`),
|
||||||
|
UNIQUE KEY \`alias\` (\`alias\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_pending_tasks\` (
|
||||||
|
\`id\` int UNSIGNED NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int UNSIGNED DEFAULT 0,
|
||||||
|
\`request\` varbinary(2048) NOT NULL,
|
||||||
|
\`created\` datetime NOT NULL,
|
||||||
|
\`finished\` datetime DEFAULT '2000-01-01 000000',
|
||||||
|
\`result_json\` text DEFAULT NULL,
|
||||||
|
\`param_json\` text DEFAULT NULL,
|
||||||
|
\`task_type_id\` int UNSIGNED NOT NULL,
|
||||||
|
\`child_pending_task_id\` int UNSIGNED DEFAULT 0,
|
||||||
|
\`parent_pending_task_id\` int UNSIGNED DEFAULT 0,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE = InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_roles\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`name\` varchar(255) NOT NULL,
|
||||||
|
\`description\` varchar(255) NOT NULL,
|
||||||
|
\`flags\` bigint NOT NULL DEFAULT '0',
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_user_backups\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int NOT NULL,
|
||||||
|
\`passphrase\` text NOT NULL,
|
||||||
|
\`mnemonic_type\` int DEFAULT '-1',
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_user_roles\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int NOT NULL,
|
||||||
|
\`role_id\` int NOT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_users\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`email\` varchar(191) NOT NULL,
|
||||||
|
\`first_name\` varchar(150) NOT NULL,
|
||||||
|
\`last_name\` varchar(255) DEFAULT '',
|
||||||
|
\`username\` varchar(255) DEFAULT '',
|
||||||
|
\`description\` text DEFAULT '',
|
||||||
|
\`password\` bigint unsigned DEFAULT '0',
|
||||||
|
\`pubkey\` binary(32) DEFAULT NULL,
|
||||||
|
\`privkey\` binary(80) DEFAULT NULL,
|
||||||
|
\`email_hash\` binary(32) DEFAULT NULL,
|
||||||
|
\`created\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
\`email_checked\` tinyint NOT NULL DEFAULT '0',
|
||||||
|
\`passphrase_shown\` tinyint NOT NULL DEFAULT '0',
|
||||||
|
\`language\` varchar(4) NOT NULL DEFAULT 'de',
|
||||||
|
\`disabled\` tinyint DEFAULT '0',
|
||||||
|
\`group_id\` int unsigned DEFAULT 0,
|
||||||
|
\`publisher_id\` int DEFAULT 0,
|
||||||
|
PRIMARY KEY (\`id\`),
|
||||||
|
UNIQUE KEY \`email\` (\`email\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
|
// write downgrade logic as parameter of queryFn
|
||||||
|
await queryFn(`DROP TABLE \`login_app_access_tokens\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_elopage_buys\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_email_opt_in_types\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_email_opt_in\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_groups\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_pending_tasks\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_roles\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_user_backups\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_user_roles\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_users\`;`)
|
||||||
|
}
|
||||||
74
database/migrations/0004-login_server_data.ts
Normal file
74
database/migrations/0004-login_server_data.ts
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
/* FIRST MIGRATION
|
||||||
|
*
|
||||||
|
* This migration is special since it takes into account that
|
||||||
|
* the database can be setup already but also may not be.
|
||||||
|
* Therefore you will find all `CREATE TABLE` statements with
|
||||||
|
* a `IF NOT EXISTS`, all `INSERT` with an `IGNORE` and in the
|
||||||
|
* downgrade function all `DROP TABLE` with a `IF EXISTS`.
|
||||||
|
* This ensures compatibility for existing or non-existing
|
||||||
|
* databases.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const LOGIN_SERVER_DB = 'gradido_login'
|
||||||
|
|
||||||
|
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
|
const loginDatabaseExists = await queryFn(`
|
||||||
|
SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${LOGIN_SERVER_DB}'
|
||||||
|
`)
|
||||||
|
if (loginDatabaseExists.length === 0) {
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log(`Skipping Login Server Database migration - Database ${LOGIN_SERVER_DB} not found`)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_app_access_tokens\` SELECT * FROM ${LOGIN_SERVER_DB}.\`app_access_tokens\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_elopage_buys\` SELECT * FROM ${LOGIN_SERVER_DB}.\`elopage_buys\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_email_opt_in_types\` SELECT * FROM ${LOGIN_SERVER_DB}.\`email_opt_in_types\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_email_opt_in\` SELECT * FROM ${LOGIN_SERVER_DB}.\`email_opt_in\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_groups\` SELECT * FROM ${LOGIN_SERVER_DB}.\`groups\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_pending_tasks\` SELECT * FROM ${LOGIN_SERVER_DB}.\`pending_tasks\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_roles\` SELECT * FROM ${LOGIN_SERVER_DB}.\`roles\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_user_backups\` SELECT * FROM ${LOGIN_SERVER_DB}.\`user_backups\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_user_roles\` SELECT * FROM ${LOGIN_SERVER_DB}.\`user_roles\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_users\` SELECT * FROM ${LOGIN_SERVER_DB}.\`users\`;
|
||||||
|
`)
|
||||||
|
|
||||||
|
// TODO clarify if we need this on non docker environment?
|
||||||
|
await queryFn(`
|
||||||
|
INSERT IGNORE INTO \`login_groups\` (\`id\`, \`alias\`, \`name\`, \`url\`, \`host\`, \`home\`, \`description\`) VALUES
|
||||||
|
(1, 'docker', 'docker gradido group', 'localhost', 'nginx', '/', 'gradido test group for docker and stage2 with blockchain db');
|
||||||
|
`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
|
// write downgrade logic as parameter of queryFn
|
||||||
|
await queryFn(`DELETE FROM \`login_app_access_tokens\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_elopage_buys\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_email_opt_in_types\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_email_opt_in\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_groups\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_pending_tasks\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_roles\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_user_backups\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_user_roles\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_users\`;`)
|
||||||
|
}
|
||||||
@ -36,8 +36,6 @@ cd $PROJECT_PATH
|
|||||||
# git checkout -f master
|
# git checkout -f master
|
||||||
git pull
|
git pull
|
||||||
cd deployment/bare_metal
|
cd deployment/bare_metal
|
||||||
echo 'update schemas' >> $UPDATE_HTML
|
|
||||||
./update_db_schemas.sh
|
|
||||||
echo 'starting with rebuilding login-server<br>' >> $UPDATE_HTML
|
echo 'starting with rebuilding login-server<br>' >> $UPDATE_HTML
|
||||||
./build_and_start_login_server.sh
|
./build_and_start_login_server.sh
|
||||||
echo 'starting with rebuilding frontend<br>' >> $UPDATE_HTML
|
echo 'starting with rebuilding frontend<br>' >> $UPDATE_HTML
|
||||||
|
|||||||
@ -1,7 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# For that to work skeema needed to be installed on system
|
|
||||||
# in login_server/skeema skeema configuration files need to be there also in the subfolders
|
|
||||||
# Update DB Schemas (only the schemas, not the data)
|
|
||||||
|
|
||||||
cd ../../login_server/skeema
|
|
||||||
skeema push --allow-unsafe
|
|
||||||
@ -91,6 +91,9 @@ services:
|
|||||||
login-server:
|
login-server:
|
||||||
build:
|
build:
|
||||||
dockerfile: Dockerfiles/ubuntu/Dockerfile.debug
|
dockerfile: Dockerfiles/ubuntu/Dockerfile.debug
|
||||||
|
networks:
|
||||||
|
- external-net
|
||||||
|
- internal-net
|
||||||
security_opt:
|
security_opt:
|
||||||
- seccomp:unconfined
|
- seccomp:unconfined
|
||||||
cap_add:
|
cap_add:
|
||||||
@ -155,22 +158,7 @@ services:
|
|||||||
- external-net
|
- external-net
|
||||||
volumes:
|
volumes:
|
||||||
- /sessions
|
- /sessions
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## skeema for updating dbs if changes happend ###########
|
|
||||||
#########################################################
|
|
||||||
skeema:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: ./skeema/Dockerfile
|
|
||||||
target: skeema_dev_run
|
|
||||||
depends_on:
|
|
||||||
- mariadb
|
|
||||||
networks:
|
|
||||||
- internal-net
|
|
||||||
volumes:
|
|
||||||
- ./login_server/skeema/gradido_login:/skeema/gradido_login
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
frontend_node_modules:
|
frontend_node_modules:
|
||||||
admin_node_modules:
|
admin_node_modules:
|
||||||
|
|||||||
@ -2,6 +2,26 @@ version: "3.4"
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
# BACKEND ##############################################
|
||||||
|
########################################################
|
||||||
|
backend:
|
||||||
|
image: gradido/backend:test
|
||||||
|
build:
|
||||||
|
target: test
|
||||||
|
networks:
|
||||||
|
- external-net
|
||||||
|
- internal-net
|
||||||
|
environment:
|
||||||
|
- NODE_ENV="test"
|
||||||
|
- DB_HOST=mariadb
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
# DATABASE #############################################
|
||||||
|
########################################################
|
||||||
|
database:
|
||||||
|
restart: always # this is very dangerous, but worth a test for the delayed mariadb startup at first run
|
||||||
|
|
||||||
#########################################################
|
#########################################################
|
||||||
## MARIADB ##############################################
|
## MARIADB ##############################################
|
||||||
#########################################################
|
#########################################################
|
||||||
@ -9,16 +29,17 @@ services:
|
|||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: ./mariadb/Dockerfile
|
dockerfile: ./mariadb/Dockerfile
|
||||||
target: mariadb_server_test
|
target: mariadb_server
|
||||||
environment:
|
environment:
|
||||||
- MARIADB_ALLOW_EMPTY_PASSWORD=1
|
- MARIADB_ALLOW_EMPTY_PASSWORD=1
|
||||||
- MARIADB_USER=root
|
- MARIADB_USER=root
|
||||||
networks:
|
networks:
|
||||||
- internal-net
|
- internal-net
|
||||||
|
- external-net
|
||||||
ports:
|
ports:
|
||||||
- 3306:3306
|
- 3306:3306
|
||||||
volumes:
|
volumes:
|
||||||
- db_test_vol:/var/lib/mysql
|
- db_test_vol:/var/lib/mysql
|
||||||
|
|
||||||
#########################################################
|
#########################################################
|
||||||
## LOGIN SERVER #########################################
|
## LOGIN SERVER #########################################
|
||||||
@ -75,19 +96,6 @@ services:
|
|||||||
- external-net
|
- external-net
|
||||||
volumes:
|
volumes:
|
||||||
- /sessions
|
- /sessions
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## skeema for updating dbs if changes happend ###########
|
|
||||||
#########################################################
|
|
||||||
skeema:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: ./skeema/Dockerfile
|
|
||||||
target: skeema_run
|
|
||||||
depends_on:
|
|
||||||
- mariadb
|
|
||||||
networks:
|
|
||||||
- internal-net
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
external-net:
|
external-net:
|
||||||
|
|||||||
@ -59,9 +59,8 @@ services:
|
|||||||
#########################################################
|
#########################################################
|
||||||
mariadb:
|
mariadb:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: ./mariadb
|
||||||
dockerfile: ./mariadb/Dockerfile
|
target: mariadb_server
|
||||||
target: mariadb_server_test
|
|
||||||
environment:
|
environment:
|
||||||
- MARIADB_ALLOW_EMPTY_PASSWORD=1
|
- MARIADB_ALLOW_EMPTY_PASSWORD=1
|
||||||
- MARIADB_USER=root
|
- MARIADB_USER=root
|
||||||
@ -182,41 +181,6 @@ services:
|
|||||||
- internal-net
|
- internal-net
|
||||||
volumes:
|
volumes:
|
||||||
- ./community_server/config/php-fpm/php-ini-overrides.ini:/etc/php/7.4/fpm/conf.d/99-overrides.ini
|
- ./community_server/config/php-fpm/php-ini-overrides.ini:/etc/php/7.4/fpm/conf.d/99-overrides.ini
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## skeema for updating dbs if changes happend ###########
|
|
||||||
#########################################################
|
|
||||||
skeema:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: ./skeema/Dockerfile
|
|
||||||
target: skeema_run
|
|
||||||
depends_on:
|
|
||||||
- mariadb
|
|
||||||
networks:
|
|
||||||
- internal-net
|
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## GRADIDO NODE v1 ######################################
|
|
||||||
#########################################################
|
|
||||||
# gradido-node:
|
|
||||||
# build:
|
|
||||||
# context: .
|
|
||||||
# dockerfile: ./gn/docker/deprecated-hedera-node/Dockerfile
|
|
||||||
# volumes:
|
|
||||||
# - ${GN_INSTANCE_FOLDER}:/opt/instance
|
|
||||||
# container_name: ${GN_CONTAINER_NAME}
|
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## GRADIDO NODE test ###################################
|
|
||||||
#########################################################
|
|
||||||
# gradido-node-test:
|
|
||||||
# build:
|
|
||||||
# context: .
|
|
||||||
# dockerfile: ./gn/docker/deprecated-hedera-node/Dockerfile
|
|
||||||
# container_name: gn-test
|
|
||||||
# working_dir: /opt/gn/build
|
|
||||||
# command: ["./unit_tests"]
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
external-net:
|
external-net:
|
||||||
|
|||||||
@ -15,7 +15,7 @@ This document describes the technical overview for the Gradido infrastructur. Be
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Database Skeema
|
### Database Skeema (outdated)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|||||||
BIN
docu/Gradido-Admin.epgz
Normal file
BIN
docu/Gradido-Admin.epgz
Normal file
Binary file not shown.
27
docu/create-coins-as-admin.md
Normal file
27
docu/create-coins-as-admin.md
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
### User creation
|
||||||
|
A user needs to be created on the login_server we do this when we create a User in the client https://$community_domain/vue/register.
|
||||||
|
|
||||||
|
### Admin user
|
||||||
|
To set a User admin we need the following SQL query on the gradido_login database:
|
||||||
|
```
|
||||||
|
INSERT INTO user_roles (id, user_id, role_id) VALUES (NULL, '1', '1');
|
||||||
|
```
|
||||||
|
user_id has to be found in users
|
||||||
|
Now when we login in on https://$community_domain/account/ we can create coins but we will be restricted cause we can't sign the creations.
|
||||||
|
|
||||||
|
### Signation account
|
||||||
|
At first we need to enable the server user account creation with uncommenting line: 19 in
|
||||||
|
community_server/src/Controller/ServerUsersController.php
|
||||||
|
```php
|
||||||
|
$this->Auth->allow(['add', 'edit']);
|
||||||
|
```
|
||||||
|
This enable us to use this action without being logged in.
|
||||||
|
To add a signation account we need to go on the following url: http://$community_domain/server-users/add
|
||||||
|
|
||||||
|
### Coin creation process
|
||||||
|
The coin creation for work is done in the following url: http://$community_domain/transaction-creations/create-multi
|
||||||
|
Where we can create coins for a number of as many users as we want excepted for our self.
|
||||||
|
Furthermore we must sign the transactions we created. Normally after clicking on the left button (Transaktion abschließen) we should be automatically forwarded to http://$community_domain/account/checkTransactions where we can do this.
|
||||||
|
If not this page can also be reached by clicking on the shield-icon with the hook in it on the Dashboard, which is only shown if at least one transaction is waiting for signing.
|
||||||
|
|
||||||
|
For debug purposes you can check the `pending_tasks` table, which is used to store the transactions which are not signed yet or had errors.
|
||||||
BIN
docu/graphics/gradido_admin.png
Normal file
BIN
docu/graphics/gradido_admin.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 104 KiB |
BIN
docu/graphics/userdetails.png
Normal file
BIN
docu/graphics/userdetails.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 80 KiB |
BIN
docu/presentation/adminarea-old-new.pdf
Normal file
BIN
docu/presentation/adminarea-old-new.pdf
Normal file
Binary file not shown.
@ -9,8 +9,9 @@ module.exports = {
|
|||||||
],
|
],
|
||||||
// coverageReporters: ['lcov', 'text'],
|
// coverageReporters: ['lcov', 'text'],
|
||||||
moduleNameMapper: {
|
moduleNameMapper: {
|
||||||
'^@/(.*)$': '<rootDir>/src/$1',
|
|
||||||
'\\.(css|less)$': 'identity-obj-proxy',
|
'\\.(css|less)$': 'identity-obj-proxy',
|
||||||
|
'\\.(scss)$': '<rootDir>/src/assets/mocks/styleMock.js',
|
||||||
|
'^@/(.*)$': '<rootDir>/src/$1',
|
||||||
},
|
},
|
||||||
transform: {
|
transform: {
|
||||||
'^.+\\.vue$': 'vue-jest',
|
'^.+\\.vue$': 'vue-jest',
|
||||||
|
|||||||
1
frontend/src/assets/mocks/styleMock.js
Normal file
1
frontend/src/assets/mocks/styleMock.js
Normal file
@ -0,0 +1 @@
|
|||||||
|
module.exports = {}
|
||||||
@ -75,9 +75,7 @@ export const sendResetPasswordEmail = gql`
|
|||||||
|
|
||||||
export const checkUsername = gql`
|
export const checkUsername = gql`
|
||||||
query($username: String!) {
|
query($username: String!) {
|
||||||
checkUsername(username: $username) {
|
checkUsername(username: $username)
|
||||||
state
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
`
|
`
|
||||||
|
|
||||||
|
|||||||
@ -96,7 +96,7 @@
|
|||||||
"conversion-gdt-euro": "Umrechnung Euro / Gradido Transform (GDT)",
|
"conversion-gdt-euro": "Umrechnung Euro / Gradido Transform (GDT)",
|
||||||
"credit": "Gutschrift",
|
"credit": "Gutschrift",
|
||||||
"factor": "Faktor",
|
"factor": "Faktor",
|
||||||
"formula": "Berechungsformel",
|
"formula": "Berechnungsformel",
|
||||||
"funding": "Zu den Förderbeiträgen",
|
"funding": "Zu den Förderbeiträgen",
|
||||||
"gdt-received": "Gradido Transform (GDT) erhalten",
|
"gdt-received": "Gradido Transform (GDT) erhalten",
|
||||||
"no-transactions": "Du hast noch keine Gradido Transform (GDT).",
|
"no-transactions": "Du hast noch keine Gradido Transform (GDT).",
|
||||||
@ -132,8 +132,8 @@
|
|||||||
},
|
},
|
||||||
"newsletter": {
|
"newsletter": {
|
||||||
"newsletter": "Newsletter",
|
"newsletter": "Newsletter",
|
||||||
"newsletterFalse": "Du bist aus Newslettersystem ausgetragen.",
|
"newsletterFalse": "Du erhältst keine Informationen per E-Mail.",
|
||||||
"newsletterTrue": "Du bist im Newslettersystem eingetragen."
|
"newsletterTrue": "Du erhältst Informationen per E-Mail."
|
||||||
},
|
},
|
||||||
"password": {
|
"password": {
|
||||||
"change-password": "Passwort ändern",
|
"change-password": "Passwort ändern",
|
||||||
|
|||||||
@ -132,8 +132,8 @@
|
|||||||
},
|
},
|
||||||
"newsletter": {
|
"newsletter": {
|
||||||
"newsletter": "Newsletter",
|
"newsletter": "Newsletter",
|
||||||
"newsletterFalse": "You are unsubscribed from newsletter system.",
|
"newsletterFalse": "You will not receive any information by e-mail.",
|
||||||
"newsletterTrue": "You are subscribed to newsletter system."
|
"newsletterTrue": "You will receive information by e-mail."
|
||||||
},
|
},
|
||||||
"password": {
|
"password": {
|
||||||
"change-password": "Change password",
|
"change-password": "Change password",
|
||||||
|
|||||||
24
frontend/src/mixins/getCommunityInfo.js
Normal file
24
frontend/src/mixins/getCommunityInfo.js
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import { communityInfo } from '../graphql/queries'
|
||||||
|
|
||||||
|
export const getCommunityInfoMixin = {
|
||||||
|
methods: {
|
||||||
|
getCommunityInfo() {
|
||||||
|
if (this.$store.state.community.name === '') {
|
||||||
|
this.$apollo
|
||||||
|
.query({
|
||||||
|
query: communityInfo,
|
||||||
|
})
|
||||||
|
.then((result) => {
|
||||||
|
this.$store.commit('community', result.data.getCommunityInfo)
|
||||||
|
return result.data.getCommunityInfo
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
this.$toasted.error(error.message)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
created() {
|
||||||
|
this.getCommunityInfo()
|
||||||
|
},
|
||||||
|
}
|
||||||
25
frontend/src/plugins/dashboard-plugin.test.js
Normal file
25
frontend/src/plugins/dashboard-plugin.test.js
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import dashboardPlugin from './dashboard-plugin.js'
|
||||||
|
import Vue from 'vue'
|
||||||
|
|
||||||
|
import GlobalComponents from './globalComponents'
|
||||||
|
import GlobalDirectives from './globalDirectives'
|
||||||
|
|
||||||
|
jest.mock('./globalComponents')
|
||||||
|
jest.mock('./globalDirectives')
|
||||||
|
|
||||||
|
jest.mock('vue')
|
||||||
|
|
||||||
|
const vueUseMock = jest.fn()
|
||||||
|
Vue.use = vueUseMock
|
||||||
|
|
||||||
|
describe('dashboard plugin', () => {
|
||||||
|
dashboardPlugin.install(Vue)
|
||||||
|
|
||||||
|
it('installs the global components', () => {
|
||||||
|
expect(vueUseMock).toBeCalledWith(GlobalComponents)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('installs the global directives', () => {
|
||||||
|
expect(vueUseMock).toBeCalledWith(GlobalDirectives)
|
||||||
|
})
|
||||||
|
})
|
||||||
@ -89,7 +89,10 @@ export const store = new Vuex.Store({
|
|||||||
token: null,
|
token: null,
|
||||||
coinanimation: true,
|
coinanimation: true,
|
||||||
newsletterState: null,
|
newsletterState: null,
|
||||||
community: null,
|
community: {
|
||||||
|
name: '',
|
||||||
|
description: '',
|
||||||
|
},
|
||||||
hasElopage: false,
|
hasElopage: false,
|
||||||
publisherId: null,
|
publisherId: null,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -59,7 +59,7 @@ export const loadAllRules = (i18nCallback) => {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
.then((result) => {
|
.then((result) => {
|
||||||
return result.data.checkUsername.state === 'success'
|
return result.data.checkUsername
|
||||||
})
|
})
|
||||||
.catch(() => {
|
.catch(() => {
|
||||||
return false
|
return false
|
||||||
|
|||||||
@ -5,10 +5,10 @@
|
|||||||
<div class="header-body text-center mb-7">
|
<div class="header-body text-center mb-7">
|
||||||
<b-row class="justify-content-center">
|
<b-row class="justify-content-center">
|
||||||
<b-col xl="5" lg="6" md="8" class="px-2">
|
<b-col xl="5" lg="6" md="8" class="px-2">
|
||||||
<h1>{{ $t('checkEmail.title') }}</h1>
|
<h1>{{ $t('site.checkEmail.title') }}</h1>
|
||||||
<div class="pb-4" v-if="!pending">
|
<div class="pb-4" v-if="!pending">
|
||||||
<span v-if="!authenticated">
|
<span v-if="!authenticated">
|
||||||
{{ $t('checkEmail.errorText') }}
|
{{ $t('site.checkEmail.errorText') }}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</b-col>
|
</b-col>
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { mount, RouterLinkStub } from '@vue/test-utils'
|
import { RouterLinkStub, mount } from '@vue/test-utils'
|
||||||
import flushPromises from 'flush-promises'
|
import flushPromises from 'flush-promises'
|
||||||
import Login from './Login'
|
import Login from './Login'
|
||||||
|
|
||||||
@ -39,10 +39,8 @@ describe('Login', () => {
|
|||||||
commit: mockStoreCommit,
|
commit: mockStoreCommit,
|
||||||
state: {
|
state: {
|
||||||
community: {
|
community: {
|
||||||
name: 'Gradido Entwicklung',
|
name: '',
|
||||||
url: 'http://localhost/vue/',
|
description: '',
|
||||||
registerUrl: 'http://localhost/vue/register',
|
|
||||||
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
|
||||||
},
|
},
|
||||||
publisherId: 12345,
|
publisherId: 12345,
|
||||||
},
|
},
|
||||||
@ -74,10 +72,6 @@ describe('Login', () => {
|
|||||||
wrapper = Wrapper()
|
wrapper = Wrapper()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('renders the Login form', () => {
|
|
||||||
expect(wrapper.find('div.login-form').exists()).toBeTruthy()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('commits the community info to the store', () => {
|
it('commits the community info to the store', () => {
|
||||||
expect(mockStoreCommit).toBeCalledWith('community', {
|
expect(mockStoreCommit).toBeCalledWith('community', {
|
||||||
name: 'test12',
|
name: 'test12',
|
||||||
@ -87,6 +81,10 @@ describe('Login', () => {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('renders the Login form', () => {
|
||||||
|
expect(wrapper.find('div.login-form').exists()).toBeTruthy()
|
||||||
|
})
|
||||||
|
|
||||||
describe('communities gives back error', () => {
|
describe('communities gives back error', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
apolloQueryMock.mockRejectedValue({
|
apolloQueryMock.mockRejectedValue({
|
||||||
@ -106,7 +104,18 @@ describe('Login', () => {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Community Data', () => {
|
describe('Community data already loaded', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks()
|
||||||
|
mocks.$store.state.community = {
|
||||||
|
name: 'Gradido Entwicklung',
|
||||||
|
url: 'http://localhost/vue/',
|
||||||
|
registerUrl: 'http://localhost/vue/register',
|
||||||
|
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
|
}
|
||||||
|
wrapper = Wrapper()
|
||||||
|
})
|
||||||
|
|
||||||
it('has a Community name', () => {
|
it('has a Community name', () => {
|
||||||
expect(wrapper.find('.test-communitydata b').text()).toBe('Gradido Entwicklung')
|
expect(wrapper.find('.test-communitydata b').text()).toBe('Gradido Entwicklung')
|
||||||
})
|
})
|
||||||
@ -116,6 +125,10 @@ describe('Login', () => {
|
|||||||
'Die lokale Entwicklungsumgebung von Gradido.',
|
'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('does not call community data update', () => {
|
||||||
|
expect(apolloQueryMock).not.toBeCalled()
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('links', () => {
|
describe('links', () => {
|
||||||
|
|||||||
@ -62,7 +62,8 @@
|
|||||||
<script>
|
<script>
|
||||||
import InputPassword from '../../components/Inputs/InputPassword'
|
import InputPassword from '../../components/Inputs/InputPassword'
|
||||||
import InputEmail from '../../components/Inputs/InputEmail'
|
import InputEmail from '../../components/Inputs/InputEmail'
|
||||||
import { login, communityInfo } from '../../graphql/queries'
|
import { login } from '../../graphql/queries'
|
||||||
|
import { getCommunityInfoMixin } from '../../mixins/getCommunityInfo'
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
name: 'login',
|
name: 'login',
|
||||||
@ -70,6 +71,7 @@ export default {
|
|||||||
InputPassword,
|
InputPassword,
|
||||||
InputEmail,
|
InputEmail,
|
||||||
},
|
},
|
||||||
|
mixins: [getCommunityInfoMixin],
|
||||||
data() {
|
data() {
|
||||||
return {
|
return {
|
||||||
form: {
|
form: {
|
||||||
@ -107,21 +109,6 @@ export default {
|
|||||||
this.$toasted.error(this.$t('error.no-account'))
|
this.$toasted.error(this.$t('error.no-account'))
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
async onCreated() {
|
|
||||||
this.$apollo
|
|
||||||
.query({
|
|
||||||
query: communityInfo,
|
|
||||||
})
|
|
||||||
.then((result) => {
|
|
||||||
this.$store.commit('community', result.data.getCommunityInfo)
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
this.$toasted.error(error.message)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
},
|
|
||||||
created() {
|
|
||||||
this.onCreated()
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|||||||
@ -5,6 +5,19 @@ import Register from './Register'
|
|||||||
|
|
||||||
const localVue = global.localVue
|
const localVue = global.localVue
|
||||||
|
|
||||||
|
const apolloQueryMock = jest.fn().mockResolvedValue({
|
||||||
|
data: {
|
||||||
|
getCommunityInfo: {
|
||||||
|
name: 'test12',
|
||||||
|
description: 'test community 12',
|
||||||
|
url: 'http://test12.test12/',
|
||||||
|
registerUrl: 'http://test12.test12/vue/register',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const toastErrorMock = jest.fn()
|
||||||
|
const mockStoreCommit = jest.fn()
|
||||||
const registerUserMutationMock = jest.fn()
|
const registerUserMutationMock = jest.fn()
|
||||||
const routerPushMock = jest.fn()
|
const routerPushMock = jest.fn()
|
||||||
|
|
||||||
@ -21,20 +34,23 @@ describe('Register', () => {
|
|||||||
},
|
},
|
||||||
$apollo: {
|
$apollo: {
|
||||||
mutate: registerUserMutationMock,
|
mutate: registerUserMutationMock,
|
||||||
|
query: apolloQueryMock,
|
||||||
},
|
},
|
||||||
$store: {
|
$store: {
|
||||||
|
commit: mockStoreCommit,
|
||||||
state: {
|
state: {
|
||||||
email: 'peter@lustig.de',
|
email: 'peter@lustig.de',
|
||||||
language: 'en',
|
language: 'en',
|
||||||
community: {
|
community: {
|
||||||
name: 'Gradido Entwicklung',
|
name: '',
|
||||||
url: 'http://localhost/vue/',
|
description: '',
|
||||||
registerUrl: 'http://localhost/vue/register',
|
|
||||||
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
|
||||||
},
|
},
|
||||||
publisherId: 12345,
|
publisherId: 12345,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
$toasted: {
|
||||||
|
error: toastErrorMock,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const stubs = {
|
const stubs = {
|
||||||
@ -50,6 +66,15 @@ describe('Register', () => {
|
|||||||
wrapper = Wrapper()
|
wrapper = Wrapper()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('commits the community info to the store', () => {
|
||||||
|
expect(mockStoreCommit).toBeCalledWith('community', {
|
||||||
|
name: 'test12',
|
||||||
|
description: 'test community 12',
|
||||||
|
url: 'http://test12.test12/',
|
||||||
|
registerUrl: 'http://test12.test12/vue/register',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it('renders the Register form', () => {
|
it('renders the Register form', () => {
|
||||||
expect(wrapper.find('div#registerform').exists()).toBeTruthy()
|
expect(wrapper.find('div#registerform').exists()).toBeTruthy()
|
||||||
})
|
})
|
||||||
@ -60,16 +85,44 @@ describe('Register', () => {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Community Data', () => {
|
describe('communities gives back error', () => {
|
||||||
it('has a Community name?', () => {
|
beforeEach(() => {
|
||||||
|
apolloQueryMock.mockRejectedValue({
|
||||||
|
message: 'Failed to get communities',
|
||||||
|
})
|
||||||
|
wrapper = Wrapper()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('toasts an error message', () => {
|
||||||
|
expect(toastErrorMock).toBeCalledWith('Failed to get communities')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Community data already loaded', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks()
|
||||||
|
mocks.$store.state.community = {
|
||||||
|
name: 'Gradido Entwicklung',
|
||||||
|
url: 'http://localhost/vue/',
|
||||||
|
registerUrl: 'http://localhost/vue/register',
|
||||||
|
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
|
}
|
||||||
|
wrapper = Wrapper()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('has a Community name', () => {
|
||||||
expect(wrapper.find('.test-communitydata b').text()).toBe('Gradido Entwicklung')
|
expect(wrapper.find('.test-communitydata b').text()).toBe('Gradido Entwicklung')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('has a Community description?', () => {
|
it('has a Community description', () => {
|
||||||
expect(wrapper.find('.test-communitydata p').text()).toBe(
|
expect(wrapper.find('.test-communitydata p').text()).toBe(
|
||||||
'Die lokale Entwicklungsumgebung von Gradido.',
|
'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('does not call community data update', () => {
|
||||||
|
expect(apolloQueryMock).not.toBeCalled()
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('links', () => {
|
describe('links', () => {
|
||||||
|
|||||||
@ -161,10 +161,12 @@ import InputEmail from '../../components/Inputs/InputEmail.vue'
|
|||||||
import InputPasswordConfirmation from '../../components/Inputs/InputPasswordConfirmation.vue'
|
import InputPasswordConfirmation from '../../components/Inputs/InputPasswordConfirmation.vue'
|
||||||
import LanguageSwitchSelect from '../../components/LanguageSwitchSelect.vue'
|
import LanguageSwitchSelect from '../../components/LanguageSwitchSelect.vue'
|
||||||
import { registerUser } from '../../graphql/mutations'
|
import { registerUser } from '../../graphql/mutations'
|
||||||
|
import { getCommunityInfoMixin } from '../../mixins/getCommunityInfo'
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
components: { InputPasswordConfirmation, InputEmail, LanguageSwitchSelect },
|
components: { InputPasswordConfirmation, InputEmail, LanguageSwitchSelect },
|
||||||
name: 'register',
|
name: 'register',
|
||||||
|
mixins: [getCommunityInfoMixin],
|
||||||
data() {
|
data() {
|
||||||
return {
|
return {
|
||||||
form: {
|
form: {
|
||||||
@ -205,12 +207,6 @@ export default {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
.then(() => {
|
.then(() => {
|
||||||
this.form.email = ''
|
|
||||||
this.form.firstname = ''
|
|
||||||
this.form.lastname = ''
|
|
||||||
this.form.password.password = ''
|
|
||||||
this.form.password.passwordRepeat = ''
|
|
||||||
this.language = ''
|
|
||||||
this.$router.push('/thx/register')
|
this.$router.push('/thx/register')
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
@ -226,7 +222,6 @@ export default {
|
|||||||
this.form.lastname = ''
|
this.form.lastname = ''
|
||||||
this.form.password.password = ''
|
this.form.password.password = ''
|
||||||
this.form.password.passwordRepeat = ''
|
this.form.password.passwordRepeat = ''
|
||||||
this.language = ''
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
computed: {
|
computed: {
|
||||||
|
|||||||
@ -3,6 +3,19 @@ import RegisterCommunity from './RegisterCommunity'
|
|||||||
|
|
||||||
const localVue = global.localVue
|
const localVue = global.localVue
|
||||||
|
|
||||||
|
const apolloQueryMock = jest.fn().mockResolvedValue({
|
||||||
|
data: {
|
||||||
|
getCommunityInfo: {
|
||||||
|
name: 'test12',
|
||||||
|
description: 'test community 12',
|
||||||
|
url: 'http://test12.test12/',
|
||||||
|
registerUrl: 'http://test12.test12/vue/register',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
const toastErrorMock = jest.fn()
|
||||||
|
const mockStoreCommit = jest.fn()
|
||||||
|
|
||||||
describe('RegisterCommunity', () => {
|
describe('RegisterCommunity', () => {
|
||||||
let wrapper
|
let wrapper
|
||||||
|
|
||||||
@ -11,16 +24,21 @@ describe('RegisterCommunity', () => {
|
|||||||
locale: 'en',
|
locale: 'en',
|
||||||
},
|
},
|
||||||
$t: jest.fn((t) => t),
|
$t: jest.fn((t) => t),
|
||||||
|
$apollo: {
|
||||||
|
query: apolloQueryMock,
|
||||||
|
},
|
||||||
$store: {
|
$store: {
|
||||||
|
commit: mockStoreCommit,
|
||||||
state: {
|
state: {
|
||||||
community: {
|
community: {
|
||||||
name: 'Gradido Entwicklung',
|
name: '',
|
||||||
url: 'http://localhost/vue/',
|
description: '',
|
||||||
registerUrl: 'http://localhost/vue/register',
|
|
||||||
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
$toasted: {
|
||||||
|
error: toastErrorMock,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const stubs = {
|
const stubs = {
|
||||||
@ -36,23 +54,56 @@ describe('RegisterCommunity', () => {
|
|||||||
wrapper = Wrapper()
|
wrapper = Wrapper()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('commits the community info to the store', () => {
|
||||||
|
expect(mockStoreCommit).toBeCalledWith('community', {
|
||||||
|
name: 'test12',
|
||||||
|
description: 'test community 12',
|
||||||
|
url: 'http://test12.test12/',
|
||||||
|
registerUrl: 'http://test12.test12/vue/register',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it('renders the Div Element "#register-community"', () => {
|
it('renders the Div Element "#register-community"', () => {
|
||||||
expect(wrapper.find('div#register-community').exists()).toBeTruthy()
|
expect(wrapper.find('div#register-community').exists()).toBeTruthy()
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Displaying the current community info', () => {
|
describe('communities gives back error', () => {
|
||||||
it('has a current community name', () => {
|
beforeEach(() => {
|
||||||
expect(wrapper.find('.header h1').text()).toBe('Gradido Entwicklung')
|
apolloQueryMock.mockRejectedValue({
|
||||||
|
message: 'Failed to get communities',
|
||||||
|
})
|
||||||
|
wrapper = Wrapper()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('has a current community description', () => {
|
it('toasts an error message', () => {
|
||||||
expect(wrapper.find('.header p').text()).toBe(
|
expect(toastErrorMock).toBeCalledWith('Failed to get communities')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Community data already loaded', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks()
|
||||||
|
mocks.$store.state.community = {
|
||||||
|
name: 'Gradido Entwicklung',
|
||||||
|
url: 'http://localhost/vue/',
|
||||||
|
registerUrl: 'http://localhost/vue/register',
|
||||||
|
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
|
}
|
||||||
|
wrapper = Wrapper()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('has a Community name', () => {
|
||||||
|
expect(wrapper.find('.justify-content-center h1').text()).toBe('Gradido Entwicklung')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('has a Community description', () => {
|
||||||
|
expect(wrapper.find('.justify-content-center p').text()).toBe(
|
||||||
'Die lokale Entwicklungsumgebung von Gradido.',
|
'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('has a current community location', () => {
|
it('does not call community data update', () => {
|
||||||
expect(wrapper.find('.header p.community-location').text()).toBe('http://localhost/vue/')
|
expect(apolloQueryMock).not.toBeCalled()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@ -49,12 +49,11 @@
|
|||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
<script>
|
<script>
|
||||||
|
import { getCommunityInfoMixin } from '../../mixins/getCommunityInfo'
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
name: 'registerCommunity',
|
name: 'registerCommunity',
|
||||||
data() {
|
mixins: [getCommunityInfoMixin],
|
||||||
return {}
|
|
||||||
},
|
|
||||||
methods: {},
|
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
<style></style>
|
<style></style>
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import { mount, RouterLinkStub } from '@vue/test-utils'
|
import { mount, RouterLinkStub } from '@vue/test-utils'
|
||||||
|
import { communities, communityInfo } from '../../graphql/queries'
|
||||||
import RegisterSelectCommunity from './RegisterSelectCommunity'
|
import RegisterSelectCommunity from './RegisterSelectCommunity'
|
||||||
|
|
||||||
const localVue = global.localVue
|
const localVue = global.localVue
|
||||||
@ -11,35 +12,48 @@ const spinnerMock = jest.fn(() => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
const apolloQueryMock = jest.fn().mockResolvedValue({
|
const apolloQueryMock = jest
|
||||||
data: {
|
.fn()
|
||||||
communities: [
|
.mockResolvedValueOnce({
|
||||||
{
|
data: {
|
||||||
id: 1,
|
getCommunityInfo: {
|
||||||
name: 'Gradido Entwicklung',
|
name: 'test12',
|
||||||
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
description: 'test community 12',
|
||||||
url: 'http://localhost/vue/',
|
url: 'http://test12.test12/',
|
||||||
registerUrl: 'http://localhost/vue/register-community',
|
registerUrl: 'http://test12.test12/vue/register',
|
||||||
},
|
},
|
||||||
{
|
},
|
||||||
id: 2,
|
})
|
||||||
name: 'Gradido Staging',
|
.mockResolvedValue({
|
||||||
description: 'Der Testserver der Gradido-Akademie.',
|
data: {
|
||||||
url: 'https://stage1.gradido.net/vue/',
|
communities: [
|
||||||
registerUrl: 'https://stage1.gradido.net/vue/register-community',
|
{
|
||||||
},
|
id: 1,
|
||||||
{
|
name: 'Gradido Entwicklung',
|
||||||
id: 3,
|
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
name: 'Gradido-Akademie',
|
url: 'http://localhost/vue/',
|
||||||
description: 'Freies Institut für Wirtschaftsbionik.',
|
registerUrl: 'http://localhost/vue/register-community',
|
||||||
url: 'https://gradido.net',
|
},
|
||||||
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
|
{
|
||||||
},
|
id: 2,
|
||||||
],
|
name: 'Gradido Staging',
|
||||||
},
|
description: 'Der Testserver der Gradido-Akademie.',
|
||||||
})
|
url: 'https://stage1.gradido.net/vue/',
|
||||||
|
registerUrl: 'https://stage1.gradido.net/vue/register-community',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 3,
|
||||||
|
name: 'Gradido-Akademie',
|
||||||
|
description: 'Freies Institut für Wirtschaftsbionik.',
|
||||||
|
url: 'https://gradido.net',
|
||||||
|
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
const toasterMock = jest.fn()
|
const toasterMock = jest.fn()
|
||||||
|
const mockStoreCommit = jest.fn()
|
||||||
|
|
||||||
describe('RegisterSelectCommunity', () => {
|
describe('RegisterSelectCommunity', () => {
|
||||||
let wrapper
|
let wrapper
|
||||||
@ -50,12 +64,11 @@ describe('RegisterSelectCommunity', () => {
|
|||||||
},
|
},
|
||||||
$t: jest.fn((t) => t),
|
$t: jest.fn((t) => t),
|
||||||
$store: {
|
$store: {
|
||||||
|
commit: mockStoreCommit,
|
||||||
state: {
|
state: {
|
||||||
community: {
|
community: {
|
||||||
name: 'Gradido Entwicklung',
|
name: '',
|
||||||
url: 'http://localhost/vue/',
|
description: '',
|
||||||
registerUrl: 'http://localhost/vue/register',
|
|
||||||
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -80,9 +93,23 @@ describe('RegisterSelectCommunity', () => {
|
|||||||
|
|
||||||
describe('mount', () => {
|
describe('mount', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks()
|
||||||
wrapper = Wrapper()
|
wrapper = Wrapper()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('calls the API to get the community info data', () => {
|
||||||
|
expect(apolloQueryMock).toBeCalledWith({
|
||||||
|
query: communityInfo,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('calls the API to get the communities data', () => {
|
||||||
|
expect(apolloQueryMock).toBeCalledWith({
|
||||||
|
query: communities,
|
||||||
|
fetchPolicy: 'network-only',
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it('renders the Div Element "#register-select-community"', () => {
|
it('renders the Div Element "#register-select-community"', () => {
|
||||||
expect(wrapper.find('div#register-select-community').exists()).toBeTruthy()
|
expect(wrapper.find('div#register-select-community').exists()).toBeTruthy()
|
||||||
})
|
})
|
||||||
@ -91,8 +118,72 @@ describe('RegisterSelectCommunity', () => {
|
|||||||
expect(spinnerMock).toBeCalled()
|
expect(spinnerMock).toBeCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('communities gives back error', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
apolloQueryMock.mockRejectedValue({
|
||||||
|
message: 'Failed to get communities',
|
||||||
|
})
|
||||||
|
wrapper = Wrapper()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('toasts an error message', () => {
|
||||||
|
expect(toasterMock).toBeCalledWith('Failed to get communities')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Community data already loaded', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks()
|
||||||
|
mocks.$store.state.community = {
|
||||||
|
name: 'Gradido Entwicklung',
|
||||||
|
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
|
url: 'http://localhost/vue/',
|
||||||
|
registerUrl: 'http://localhost/vue/register-community',
|
||||||
|
}
|
||||||
|
wrapper = Wrapper()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not call community info data when already filled', () => {
|
||||||
|
expect(apolloQueryMock).not.toBeCalledWith({
|
||||||
|
query: communityInfo,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('has a Community name', () => {
|
||||||
|
expect(wrapper.find('.card-body b').text()).toBe('Gradido Entwicklung')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('has a Community description', () => {
|
||||||
|
expect(wrapper.find('.card-body p').text()).toBe(
|
||||||
|
'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe('calls the apollo query', () => {
|
describe('calls the apollo query', () => {
|
||||||
describe('server returns data', () => {
|
describe('server returns data', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
wrapper = Wrapper()
|
||||||
|
await wrapper.setData({
|
||||||
|
communities: [
|
||||||
|
{
|
||||||
|
id: 2,
|
||||||
|
name: 'Gradido Staging',
|
||||||
|
description: 'Der Testserver der Gradido-Akademie.',
|
||||||
|
url: 'https://stage1.gradido.net/vue/',
|
||||||
|
registerUrl: 'https://stage1.gradido.net/vue/register-community',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 3,
|
||||||
|
name: 'Gradido-Akademie',
|
||||||
|
description: 'Freies Institut für Wirtschaftsbionik.',
|
||||||
|
url: 'https://gradido.net',
|
||||||
|
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it('calls the API to get the data', () => {
|
it('calls the API to get the data', () => {
|
||||||
expect(apolloQueryMock).toBeCalled()
|
expect(apolloQueryMock).toBeCalled()
|
||||||
})
|
})
|
||||||
|
|||||||
@ -7,7 +7,7 @@
|
|||||||
<b-card class="border-0 mb-0" bg-variant="primary">
|
<b-card class="border-0 mb-0" bg-variant="primary">
|
||||||
<b>{{ $store.state.community.name }}</b>
|
<b>{{ $store.state.community.name }}</b>
|
||||||
<br />
|
<br />
|
||||||
{{ $store.state.community.description }}
|
<p>{{ $store.state.community.description }}</p>
|
||||||
<br />
|
<br />
|
||||||
<router-link to="/register">
|
<router-link to="/register">
|
||||||
<b-button variant="outline-secondary">
|
<b-button variant="outline-secondary">
|
||||||
@ -24,7 +24,7 @@
|
|||||||
<b-card bg-variant="secondary">
|
<b-card bg-variant="secondary">
|
||||||
<b>{{ community.name }}</b>
|
<b>{{ community.name }}</b>
|
||||||
<br />
|
<br />
|
||||||
{{ community.description }}
|
<p>{{ community.description }}</p>
|
||||||
<br />
|
<br />
|
||||||
<b>
|
<b>
|
||||||
<small>
|
<small>
|
||||||
@ -49,6 +49,7 @@
|
|||||||
</template>
|
</template>
|
||||||
<script>
|
<script>
|
||||||
import { communities } from '../../graphql/queries'
|
import { communities } from '../../graphql/queries'
|
||||||
|
import { getCommunityInfoMixin } from '../../mixins/getCommunityInfo'
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
name: 'registerSelectCommunity',
|
name: 'registerSelectCommunity',
|
||||||
@ -58,6 +59,7 @@ export default {
|
|||||||
pending: true,
|
pending: true,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
mixins: [getCommunityInfoMixin],
|
||||||
methods: {
|
methods: {
|
||||||
async getCommunities() {
|
async getCommunities() {
|
||||||
const loader = this.$loading.show({
|
const loader = this.$loading.show({
|
||||||
|
|||||||
1
login_server/.gitignore
vendored
1
login_server/.gitignore
vendored
@ -5,6 +5,5 @@ src/cpsp/*.h
|
|||||||
src/cpsp/*.cpp
|
src/cpsp/*.cpp
|
||||||
src/cpp/proto/
|
src/cpp/proto/
|
||||||
build*/
|
build*/
|
||||||
/skeema/gradido_login/insert/crypto_key.sql
|
|
||||||
|
|
||||||
src/LOCALE/messages.pot
|
src/LOCALE/messages.pot
|
||||||
|
|||||||
@ -56,8 +56,7 @@ To update messages.pot run
|
|||||||
This will be also called by ./scripts/build_debug.sh
|
This will be also called by ./scripts/build_debug.sh
|
||||||
|
|
||||||
## database
|
## database
|
||||||
Login-Server needs a db to run, it is tested with mariadb
|
Login-Server needs a db to run, it is tested with mariadb.
|
||||||
table definitions are found in folder ./skeema/gradido_login
|
|
||||||
Currently at least one group must be present in table groups.
|
Currently at least one group must be present in table groups.
|
||||||
For example:
|
For example:
|
||||||
```sql
|
```sql
|
||||||
|
|||||||
@ -1,9 +0,0 @@
|
|||||||
CREATE TABLE `app_access_tokens` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int NOT NULL,
|
|
||||||
`access_code` bigint unsigned NOT NULL,
|
|
||||||
`created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
`updated` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
PRIMARY KEY (`id`),
|
|
||||||
UNIQUE KEY `access_code` (`access_code`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
CREATE TABLE `elopage_buys` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`elopage_user_id` int DEFAULT NULL,
|
|
||||||
`affiliate_program_id` int NOT NULL,
|
|
||||||
`publisher_id` int NOT NULL,
|
|
||||||
`order_id` int NOT NULL,
|
|
||||||
`product_id` int NOT NULL,
|
|
||||||
`product_price` int NOT NULL,
|
|
||||||
`payer_email` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
|
|
||||||
`publisher_email` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
|
|
||||||
`payed` tinyint NOT NULL,
|
|
||||||
`success_date` datetime NOT NULL,
|
|
||||||
`event` varchar(255) NOT NULL,
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
CREATE TABLE `email_opt_in` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int NOT NULL,
|
|
||||||
`verification_code` bigint unsigned NOT NULL,
|
|
||||||
`email_opt_in_type_id` int NOT NULL,
|
|
||||||
`created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
`resend_count` int DEFAULT '0',
|
|
||||||
`updated` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
PRIMARY KEY (`id`),
|
|
||||||
UNIQUE KEY `verification_code` (`verification_code`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
CREATE TABLE `email_opt_in_types` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`name` varchar(255) NOT NULL,
|
|
||||||
`description` varchar(255) NOT NULL,
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
CREATE TABLE `groups` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`alias` varchar(190) NOT NULL,
|
|
||||||
`name` varchar(255) NOT NULL,
|
|
||||||
`url` varchar(255) NOT NULL,
|
|
||||||
`host` varchar(255) DEFAULT "/",
|
|
||||||
`home` varchar(255) DEFAULT "/",
|
|
||||||
`description` text,
|
|
||||||
PRIMARY KEY (`id`),
|
|
||||||
UNIQUE KEY `alias` (`alias`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
CREATE TABLE `pending_tasks` (
|
|
||||||
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int UNSIGNED DEFAULT 0,
|
|
||||||
`request` varbinary(2048) NOT NULL,
|
|
||||||
`created` datetime NOT NULL,
|
|
||||||
`finished` datetime DEFAULT '2000-01-01 000000',
|
|
||||||
`result_json` text DEFAULT NULL,
|
|
||||||
`param_json` text DEFAULT NULL,
|
|
||||||
`task_type_id` int UNSIGNED NOT NULL,
|
|
||||||
`child_pending_task_id` int UNSIGNED DEFAULT 0,
|
|
||||||
`parent_pending_task_id` int UNSIGNED DEFAULT 0,
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE = InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
CREATE TABLE `roles` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`name` varchar(255) NOT NULL,
|
|
||||||
`description` varchar(255) NOT NULL,
|
|
||||||
`flags` bigint NOT NULL DEFAULT '0',
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
CREATE TABLE `user_backups` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int NOT NULL,
|
|
||||||
`passphrase` text NOT NULL,
|
|
||||||
`mnemonic_type` int DEFAULT '-1',
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
CREATE TABLE `user_roles` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int NOT NULL,
|
|
||||||
`role_id` int NOT NULL,
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
CREATE TABLE `users` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`email` varchar(191) NOT NULL,
|
|
||||||
`first_name` varchar(150) NOT NULL,
|
|
||||||
`last_name` varchar(255) DEFAULT '',
|
|
||||||
`username` varchar(255) DEFAULT '',
|
|
||||||
`description` text DEFAULT '',
|
|
||||||
`password` bigint unsigned DEFAULT '0',
|
|
||||||
`pubkey` binary(32) DEFAULT NULL,
|
|
||||||
`privkey` binary(80) DEFAULT NULL,
|
|
||||||
`email_hash` binary(32) DEFAULT NULL,
|
|
||||||
`created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
`email_checked` tinyint NOT NULL DEFAULT '0',
|
|
||||||
`passphrase_shown` tinyint NOT NULL DEFAULT '0',
|
|
||||||
`language` varchar(4) NOT NULL DEFAULT 'de',
|
|
||||||
`disabled` tinyint DEFAULT '0',
|
|
||||||
`group_id` int unsigned DEFAULT 0,
|
|
||||||
`publisher_id` int DEFAULT 0,
|
|
||||||
PRIMARY KEY (`id`),
|
|
||||||
UNIQUE KEY `email` (`email`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -13,6 +13,7 @@
|
|||||||
#include "JsonCreateTransaction.h"
|
#include "JsonCreateTransaction.h"
|
||||||
#include "JsonCreateUser.h"
|
#include "JsonCreateUser.h"
|
||||||
#include "JsonGetLogin.h"
|
#include "JsonGetLogin.h"
|
||||||
|
#include "JsonSignTransaction.h"
|
||||||
#include "JsonUnknown.h"
|
#include "JsonUnknown.h"
|
||||||
#include "JsonGetRunningUserTasks.h"
|
#include "JsonGetRunningUserTasks.h"
|
||||||
#include "JsonGetUsers.h"
|
#include "JsonGetUsers.h"
|
||||||
@ -77,6 +78,9 @@ Poco::Net::HTTPRequestHandler* JsonRequestHandlerFactory::createRequestHandler(c
|
|||||||
else if (url_first_part == "/checkSessionState") {
|
else if (url_first_part == "/checkSessionState") {
|
||||||
return new JsonCheckSessionState;
|
return new JsonCheckSessionState;
|
||||||
}
|
}
|
||||||
|
else if (url_first_part == "/signTransaction") {
|
||||||
|
return new JsonSignTransaction;
|
||||||
|
}
|
||||||
else if (url_first_part == "/checkUsername") {
|
else if (url_first_part == "/checkUsername") {
|
||||||
return new JsonCheckUsername;
|
return new JsonCheckUsername;
|
||||||
}
|
}
|
||||||
|
|||||||
48
login_server/src/cpp/JSONInterface/JsonSignTransaction.cpp
Normal file
48
login_server/src/cpp/JSONInterface/JsonSignTransaction.cpp
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
#include "JsonSignTransaction.h"
|
||||||
|
#include "lib/DataTypeConverter.h"
|
||||||
|
|
||||||
|
Poco::JSON::Object* JsonSignTransaction::handle(Poco::Dynamic::Var params)
|
||||||
|
{
|
||||||
|
auto result = checkAndLoadSession(params);
|
||||||
|
if (result) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string bodyBytes_base64;
|
||||||
|
auto mm = MemoryManager::getInstance();
|
||||||
|
|
||||||
|
// if is json object
|
||||||
|
if (params.type() == typeid(Poco::JSON::Object::Ptr)) {
|
||||||
|
Poco::JSON::Object::Ptr paramJsonObject = params.extract<Poco::JSON::Object::Ptr>();
|
||||||
|
/// Throws a RangeException if the value does not fit
|
||||||
|
/// into the result variable.
|
||||||
|
/// Throws a NotImplementedException if conversion is
|
||||||
|
/// not available for the given type.
|
||||||
|
/// Throws InvalidAccessException if Var is empty.
|
||||||
|
try {
|
||||||
|
paramJsonObject->get("bodyBytes").convert(bodyBytes_base64);
|
||||||
|
}
|
||||||
|
catch (Poco::Exception& ex) {
|
||||||
|
return stateError("json exception", ex.displayText());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
auto user = mSession->getNewUser();
|
||||||
|
auto keyPair = user->getGradidoKeyPair();
|
||||||
|
if (!keyPair) {
|
||||||
|
return stateError("error reading keys");
|
||||||
|
}
|
||||||
|
|
||||||
|
auto bodyBytes = DataTypeConverter::base64ToBin(bodyBytes_base64);
|
||||||
|
auto sign = keyPair->sign(bodyBytes_base64);
|
||||||
|
mm->releaseMemory(bodyBytes);
|
||||||
|
|
||||||
|
if (!sign) {
|
||||||
|
return stateError("error signing transaction");
|
||||||
|
}
|
||||||
|
auto sign_base64 = DataTypeConverter::binToBase64(sign);
|
||||||
|
mm->releaseMemory(sign);
|
||||||
|
result = stateSuccess();
|
||||||
|
result->set("sign", sign_base64);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
15
login_server/src/cpp/JSONInterface/JsonSignTransaction.h
Normal file
15
login_server/src/cpp/JSONInterface/JsonSignTransaction.h
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#ifndef __JSON_INTERFACE_JSON_SIGN_TRANSACTION_
|
||||||
|
#define __JSON_INTERFACE_JSON_SIGN_TRANSACTION_
|
||||||
|
|
||||||
|
#include "JsonRequestHandler.h"
|
||||||
|
|
||||||
|
class JsonSignTransaction : public JsonRequestHandler
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
Poco::JSON::Object* handle(Poco::Dynamic::Var params);
|
||||||
|
|
||||||
|
protected:
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // __JSON_INTERFACE_JSON_SIGN_TRANSACTION_
|
||||||
@ -58,7 +58,7 @@ bool EmailManager::init(const Poco::Util::LayeredConfiguration& cfg)
|
|||||||
void EmailManager::addEmail(model::Email* email) {
|
void EmailManager::addEmail(model::Email* email) {
|
||||||
if (mDisableEmail) {
|
if (mDisableEmail) {
|
||||||
std::string dateTimeString = Poco::DateTimeFormatter::format(Poco::DateTime(), "%d.%m.%y %H:%M:%S");
|
std::string dateTimeString = Poco::DateTimeFormatter::format(Poco::DateTime(), "%d.%m.%y %H:%M:%S");
|
||||||
std::string log_message = dateTimeString + " Email should be sended to: ";
|
std::string log_message = dateTimeString + " Email should have been sent to: ";
|
||||||
auto email_user = email->getUser();
|
auto email_user = email->getUser();
|
||||||
Poco::AutoPtr<model::table::User> email_model;
|
Poco::AutoPtr<model::table::User> email_model;
|
||||||
if (email_user) {
|
if (email_user) {
|
||||||
|
|||||||
@ -148,7 +148,7 @@ Session* SessionManager::getNewSession(int* handle)
|
|||||||
mWorkingMutex.tryLock(500);
|
mWorkingMutex.tryLock(500);
|
||||||
}
|
}
|
||||||
catch (Poco::TimeoutException &ex) {
|
catch (Poco::TimeoutException &ex) {
|
||||||
printf("[%s] exception timout mutex: %s\n", functionName, ex.displayText().data());
|
printf("[%s] exception timeout mutex: %s\n", functionName, ex.displayText().data());
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
//mWorkingMutex.lock();
|
//mWorkingMutex.lock();
|
||||||
|
|||||||
@ -69,7 +69,7 @@ namespace controller {
|
|||||||
|
|
||||||
using namespace Poco::Data::Keywords;
|
using namespace Poco::Data::Keywords;
|
||||||
Poco::Data::Statement select(session);
|
Poco::Data::Statement select(session);
|
||||||
select << "SELECT id, first_name, last_name, email, username, description, pubkey, created, email_checked, disabled, group_id FROM " << db->getTableName();
|
select << "SELECT id, first_name, last_name, email, username, description, pubkey, created, email_checked, disabled, group_id, publisher_id FROM " << db->getTableName();
|
||||||
select << " where email_checked = 0 ";
|
select << " where email_checked = 0 ";
|
||||||
select, into(resultFromDB);
|
select, into(resultFromDB);
|
||||||
if (searchString != "") {
|
if (searchString != "") {
|
||||||
@ -439,6 +439,7 @@ namespace controller {
|
|||||||
return 0;
|
return 0;
|
||||||
auto cm = ConnectionManager::getInstance();
|
auto cm = ConnectionManager::getInstance();
|
||||||
auto em = ErrorManager::getInstance();
|
auto em = ErrorManager::getInstance();
|
||||||
|
auto db = new model::table::User();
|
||||||
static const char* function_name = "User::checkIfVerificationEmailsShouldBeResend";
|
static const char* function_name = "User::checkIfVerificationEmailsShouldBeResend";
|
||||||
|
|
||||||
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
||||||
@ -446,8 +447,9 @@ namespace controller {
|
|||||||
std::vector<Poco::Tuple<int,Poco::DateTime>> results;
|
std::vector<Poco::Tuple<int,Poco::DateTime>> results;
|
||||||
int email_checked = 0;
|
int email_checked = 0;
|
||||||
int resend_count = 1;
|
int resend_count = 1;
|
||||||
select << "select u.id, v.created from users as u "
|
std::string table_name_email_opt_in = "login_email_opt_in";
|
||||||
<< "LEFT JOIN email_opt_in as v ON(u.id = v.user_id) "
|
select << "select u.id, v.created from " << db->getTableName() << " as u "
|
||||||
|
<< "LEFT JOIN " << table_name_email_opt_in << " as v ON(u.id = v.user_id) "
|
||||||
<< "where u.email_checked = ? "
|
<< "where u.email_checked = ? "
|
||||||
<< "AND v.resend_count <= ? "
|
<< "AND v.resend_count <= ? "
|
||||||
<< "ORDER BY u.id, v.created " ,
|
<< "ORDER BY u.id, v.created " ,
|
||||||
@ -519,14 +521,15 @@ namespace controller {
|
|||||||
{
|
{
|
||||||
auto cm = ConnectionManager::getInstance();
|
auto cm = ConnectionManager::getInstance();
|
||||||
auto em = ErrorManager::getInstance();
|
auto em = ErrorManager::getInstance();
|
||||||
|
auto db = new model::table::User();
|
||||||
static const char* function_name = "User::addMissingEmailHashes";
|
static const char* function_name = "User::addMissingEmailHashes";
|
||||||
|
|
||||||
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
||||||
Poco::Data::Statement select(session);
|
Poco::Data::Statement select(session);
|
||||||
std::vector<Poco::Tuple<int, std::string>> results;
|
std::vector<Poco::Tuple<int, std::string>> results;
|
||||||
|
|
||||||
select << "select id, email from users "
|
select << "select id, email from " << db->getTableName()
|
||||||
<< "where email_hash IS NULL "
|
<< " where email_hash IS NULL "
|
||||||
, Poco::Data::Keywords::into(results)
|
, Poco::Data::Keywords::into(results)
|
||||||
;
|
;
|
||||||
int result_count = 0;
|
int result_count = 0;
|
||||||
@ -556,7 +559,7 @@ namespace controller {
|
|||||||
// update db
|
// update db
|
||||||
// reuse connection, I hope it's working
|
// reuse connection, I hope it's working
|
||||||
Poco::Data::Statement update(session);
|
Poco::Data::Statement update(session);
|
||||||
update << "UPDATE users set email_hash = ? where id = ?"
|
update << "UPDATE " << db->getTableName() << " set email_hash = ? where id = ?"
|
||||||
, Poco::Data::Keywords::use(updates);
|
, Poco::Data::Keywords::use(updates);
|
||||||
int updated_count = 0;
|
int updated_count = 0;
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -19,7 +19,7 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "app_access_tokens"; }
|
const char* getTableName() const { return "login_app_access_tokens"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline Poco::UInt64 getCode() const { return mAccessCode; }
|
inline Poco::UInt64 getCode() const { return mAccessCode; }
|
||||||
|
|||||||
@ -34,7 +34,7 @@ namespace model {
|
|||||||
ElopageBuy();
|
ElopageBuy();
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "elopage_buys"; }
|
const char* getTableName() const { return "login_elopage_buys"; }
|
||||||
|
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
|
|||||||
@ -29,7 +29,7 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "email_opt_in"; }
|
const char* getTableName() const { return "login_email_opt_in"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline Poco::UInt64 getCode() const { return mEmailVerificationCode; }
|
inline Poco::UInt64 getCode() const { return mEmailVerificationCode; }
|
||||||
|
|||||||
@ -17,7 +17,7 @@ namespace model {
|
|||||||
Group(GroupTuple userTuple);
|
Group(GroupTuple userTuple);
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "groups"; }
|
const char* getTableName() const { return "login_groups"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline const std::string& getAlias() const { return mAlias; }
|
inline const std::string& getAlias() const { return mAlias; }
|
||||||
|
|||||||
@ -30,7 +30,7 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "pending_tasks"; }
|
const char* getTableName() const { return "login_pending_tasks"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
//! \brief update table row with current request
|
//! \brief update table row with current request
|
||||||
|
|||||||
@ -1,60 +0,0 @@
|
|||||||
#ifndef GRADIDO_LOGIN_SERVER_MODEL_TABLE_ROLES_INCLUDE
|
|
||||||
#define GRADIDO_LOGIN_SERVER_MODEL_TABLE_ROLES_INCLUDE
|
|
||||||
|
|
||||||
#include "ModelBase.h"
|
|
||||||
#include "Poco/Types.h"
|
|
||||||
#include "Poco/Tuple.h"
|
|
||||||
|
|
||||||
namespace model {
|
|
||||||
namespace table {
|
|
||||||
|
|
||||||
enum RoleType {
|
|
||||||
ROLE_ADMIN = 1
|
|
||||||
};
|
|
||||||
|
|
||||||
class Roles : public ModelBase
|
|
||||||
{
|
|
||||||
|
|
||||||
};
|
|
||||||
/*
|
|
||||||
typedef Poco::Tuple<int, int, Poco::UInt64, int> EmailOptInTuple;
|
|
||||||
|
|
||||||
class EmailOptIn : public ModelBase
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
EmailOptIn(const Poco::UInt64& code, int user_id, EmailOptInType type);
|
|
||||||
EmailOptIn(const Poco::UInt64& code, EmailOptInType type);
|
|
||||||
EmailOptIn(const EmailOptInTuple& tuple);
|
|
||||||
EmailOptIn();
|
|
||||||
~EmailOptIn();
|
|
||||||
|
|
||||||
// generic db operations
|
|
||||||
const char* getTableName() { return "email_opt_in"; }
|
|
||||||
std::string toString();
|
|
||||||
|
|
||||||
inline Poco::UInt64 getCode() const { return mEmailVerificationCode; }
|
|
||||||
inline int getUserId() const { return mUserId; }
|
|
||||||
inline EmailOptInType getType() const { return static_cast<EmailOptInType>(mType); }
|
|
||||||
inline void setCode(Poco::UInt64 code) { mEmailVerificationCode = code; }
|
|
||||||
inline void setUserId(int user_Id) { mUserId = user_Id; }
|
|
||||||
|
|
||||||
static const char* typeToString(EmailOptInType type);
|
|
||||||
protected:
|
|
||||||
Poco::Data::Statement _loadFromDB(Poco::Data::Session session, const std::string& fieldName);
|
|
||||||
Poco::Data::Statement _loadIdFromDB(Poco::Data::Session session);
|
|
||||||
Poco::Data::Statement _loadMultipleFromDB(Poco::Data::Session session, const std::string& fieldName);
|
|
||||||
Poco::Data::Statement _loadFromDB(Poco::Data::Session session, const std::vector<std::string>& fieldNames, MysqlConditionType conditionType = MYSQL_CONDITION_AND);
|
|
||||||
Poco::Data::Statement _insertIntoDB(Poco::Data::Session session);
|
|
||||||
|
|
||||||
int mUserId;
|
|
||||||
// data type must be a multiple of 4
|
|
||||||
Poco::UInt64 mEmailVerificationCode;
|
|
||||||
int mType;
|
|
||||||
|
|
||||||
};
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#endif //GRADIDO_LOGIN_SERVER_MODEL_TABLE_ROLES_INCLUDE
|
|
||||||
@ -83,11 +83,11 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
if (mPasswordHashed) {
|
if (mPasswordHashed) {
|
||||||
insert << "INSERT INTO users (email, first_name, last_name, username, description, password, email_hash, language, group_id, publisher_id) VALUES(?,?,?,?,?,?,?,?,?,?);",
|
insert << "INSERT INTO " << getTableName() << " (email, first_name, last_name, username, description, password, email_hash, language, group_id, publisher_id) VALUES(?,?,?,?,?,?,?,?,?,?);",
|
||||||
use(mEmail), use(mFirstName), use(mLastName), use(mUsername), use(mDescription), bind(mPasswordHashed), use(mEmailHash), use(mLanguageKey), use(mGroupId), use(mPublisherId);
|
use(mEmail), use(mFirstName), use(mLastName), use(mUsername), use(mDescription), bind(mPasswordHashed), use(mEmailHash), use(mLanguageKey), use(mGroupId), use(mPublisherId);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
insert << "INSERT INTO users (email, first_name, last_name, username, description, email_hash, language, group_id, publisher_id) VALUES(?,?,?,?,?,?,?,?,?);",
|
insert << "INSERT INTO " << getTableName() << " (email, first_name, last_name, username, description, email_hash, language, group_id, publisher_id) VALUES(?,?,?,?,?,?,?,?,?);",
|
||||||
use(mEmail), use(mFirstName), use(mLastName), use(mUsername), use(mDescription), use(mEmailHash), use(mLanguageKey), use(mGroupId), use(mPublisherId);
|
use(mEmail), use(mFirstName), use(mLastName), use(mUsername), use(mDescription), use(mEmailHash), use(mLanguageKey), use(mGroupId), use(mPublisherId);
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -103,10 +103,13 @@ namespace model {
|
|||||||
}
|
}
|
||||||
Poco::Data::Statement select(session);
|
Poco::Data::Statement select(session);
|
||||||
|
|
||||||
select << "SELECT " << getTableName() << ".id, email, first_name, last_name, username, description, password, pubkey, privkey, email_hash, created, email_checked, language, disabled, group_id, publisher_id, user_roles.role_id "
|
std::string table_name_user_roles = "login_user_roles";
|
||||||
|
|
||||||
|
select << "SELECT " << getTableName() << ".id, email, first_name, last_name, username, description, password, pubkey, privkey, email_hash, created, email_checked, language, disabled, group_id, publisher_id, " << table_name_user_roles << ".role_id "
|
||||||
<< " FROM " << getTableName()
|
<< " FROM " << getTableName()
|
||||||
<< " LEFT JOIN user_roles ON " << getTableName() << ".id = user_roles.user_id "
|
<< " LEFT JOIN " << table_name_user_roles
|
||||||
<< " WHERE " << _fieldName << " = ?" ,
|
<< " ON " << getTableName() << ".id = " << table_name_user_roles << ".user_id "
|
||||||
|
<< " WHERE " << _fieldName << " = ?; " ,
|
||||||
into(mID), into(mEmail), into(mFirstName), into(mLastName), into(mUsername), into(mDescription), into(mPasswordHashed),
|
into(mID), into(mEmail), into(mFirstName), into(mLastName), into(mUsername), into(mDescription), into(mPasswordHashed),
|
||||||
into(mPublicKey), into(mPrivateKey), into(mEmailHash), into(mCreated), into(mEmailChecked),
|
into(mPublicKey), into(mPrivateKey), into(mEmailHash), into(mCreated), into(mEmailChecked),
|
||||||
into(mLanguageKey), into(mDisabled), into(mGroupId), into(mPublisherId), into(mRole);
|
into(mLanguageKey), into(mDisabled), into(mGroupId), into(mPublisherId), into(mRole);
|
||||||
@ -194,7 +197,7 @@ namespace model {
|
|||||||
|
|
||||||
Poco::Data::Statement update(session);
|
Poco::Data::Statement update(session);
|
||||||
|
|
||||||
update << "UPDATE users SET password = ?, privkey = ? where id = ?;",
|
update << "UPDATE " << getTableName() << " SET password = ?, privkey = ? where id = ?;",
|
||||||
bind(mPasswordHashed), use(mPrivateKey), use(mID);
|
bind(mPasswordHashed), use(mPrivateKey), use(mID);
|
||||||
|
|
||||||
|
|
||||||
@ -221,7 +224,7 @@ namespace model {
|
|||||||
|
|
||||||
Poco::Data::Statement update(session);
|
Poco::Data::Statement update(session);
|
||||||
|
|
||||||
update << "UPDATE users SET pubkey = ?, privkey = ? where id = ?;",
|
update << "UPDATE " << getTableName() << " SET pubkey = ?, privkey = ? where id = ?;",
|
||||||
use(mPublicKey), use(mPrivateKey), use(mID);
|
use(mPublicKey), use(mPrivateKey), use(mID);
|
||||||
|
|
||||||
|
|
||||||
@ -246,7 +249,7 @@ namespace model {
|
|||||||
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
||||||
|
|
||||||
Poco::Data::Statement update(session);
|
Poco::Data::Statement update(session);
|
||||||
update << "UPDATE users SET first_name = ?, last_name = ?, username = ?, description = ?, disabled = ?, language = ?, publisher_id = ? where id = ?;",
|
update << "UPDATE " << getTableName() << " SET first_name = ?, last_name = ?, username = ?, description = ?, disabled = ?, language = ?, publisher_id = ? where id = ?;",
|
||||||
use(mFirstName), use(mLastName), use(mUsername), use(mDescription), use(mDisabled), use(mLanguageKey), use(mPublisherId), use(mID);
|
use(mFirstName), use(mLastName), use(mUsername), use(mDescription), use(mDisabled), use(mLanguageKey), use(mPublisherId), use(mID);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -54,7 +54,7 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "users"; }
|
const char* getTableName() const { return "login_users"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
std::string toHTMLString();
|
std::string toHTMLString();
|
||||||
|
|
||||||
|
|||||||
@ -17,7 +17,7 @@ namespace model {
|
|||||||
UserBackup();
|
UserBackup();
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "user_backups"; }
|
const char* getTableName() const { return "login_user_backups"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline int getUserId() const { return mUserId; }
|
inline int getUserId() const { return mUserId; }
|
||||||
|
|||||||
@ -4,7 +4,6 @@
|
|||||||
#include "ModelBase.h"
|
#include "ModelBase.h"
|
||||||
#include "Poco/Types.h"
|
#include "Poco/Types.h"
|
||||||
#include "Poco/Tuple.h"
|
#include "Poco/Tuple.h"
|
||||||
//#include "Roles.h"
|
|
||||||
|
|
||||||
namespace model {
|
namespace model {
|
||||||
namespace table {
|
namespace table {
|
||||||
@ -25,7 +24,7 @@ namespace model {
|
|||||||
UserRole();
|
UserRole();
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "user_roles"; }
|
const char* getTableName() const { return "login_user_roles"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline int getUserId() const { return mUserId; }
|
inline int getUserId() const { return mUserId; }
|
||||||
|
|||||||
@ -176,9 +176,9 @@ int load(int argc, char* argv[]) {
|
|||||||
|
|
||||||
// clean up and fill db
|
// clean up and fill db
|
||||||
std::string tables[] = {
|
std::string tables[] = {
|
||||||
"groups",
|
"login_groups",
|
||||||
"users",
|
"login_users",
|
||||||
"user_roles"
|
"login_user_roles"
|
||||||
};
|
};
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
if (runMysql("TRUNCATE " + tables[i])) {
|
if (runMysql("TRUNCATE " + tables[i])) {
|
||||||
@ -191,7 +191,7 @@ int load(int argc, char* argv[]) {
|
|||||||
|
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
// password = TestP4ssword&H
|
// password = TestP4ssword&H
|
||||||
ss << "INSERT INTO `users` (`id`, `email`, `first_name`, `last_name`, `username`, `password`, `pubkey`, `privkey`, `created`, `email_checked`, `passphrase_shown`, `language`, `disabled`, `group_id`) VALUES "
|
ss << "INSERT INTO `login_users` (`id`, `email`, `first_name`, `last_name`, `username`, `password`, `pubkey`, `privkey`, `created`, `email_checked`, `passphrase_shown`, `language`, `disabled`, `group_id`) VALUES "
|
||||||
<< "(1, 'd_schultz32@gmx.de', 'DDD', 'Schultz', 'Diddel', 18242007140018938940, 0x69f2fefd6fa6947a370b9f8d3147f6617cf67416517ce25cb2d63901c666933c, 0x567f3e623a1899d1f8d69190c5799433c134ce0137c0c38cc0347874586d6234a19f2a0b484e6cc1863502e580ae6c17db1131f29a35eba45a46be29c7ee592940a3bd3ad519075fdeed6e368f0eb818, '2020-02-20 16:05:44', 1, 0, 'de', 0, 1), ";
|
<< "(1, 'd_schultz32@gmx.de', 'DDD', 'Schultz', 'Diddel', 18242007140018938940, 0x69f2fefd6fa6947a370b9f8d3147f6617cf67416517ce25cb2d63901c666933c, 0x567f3e623a1899d1f8d69190c5799433c134ce0137c0c38cc0347874586d6234a19f2a0b484e6cc1863502e580ae6c17db1131f29a35eba45a46be29c7ee592940a3bd3ad519075fdeed6e368f0eb818, '2020-02-20 16:05:44', 1, 0, 'de', 0, 1), ";
|
||||||
|
|
||||||
// if this isn't the same, some tests will fail, so we update the test data here.
|
// if this isn't the same, some tests will fail, so we update the test data here.
|
||||||
@ -211,7 +211,7 @@ int load(int argc, char* argv[]) {
|
|||||||
}
|
}
|
||||||
ss.str(std::string());
|
ss.str(std::string());
|
||||||
|
|
||||||
ss << "INSERT INTO `user_roles` (`id`, `user_id`, `role_id`) VALUES"
|
ss << "INSERT INTO `login_user_roles` (`id`, `user_id`, `role_id`) VALUES"
|
||||||
<< "(1, 3, 1);";
|
<< "(1, 3, 1);";
|
||||||
|
|
||||||
if (runMysql(ss.str())) {
|
if (runMysql(ss.str())) {
|
||||||
@ -219,7 +219,7 @@ int load(int argc, char* argv[]) {
|
|||||||
}
|
}
|
||||||
ss.str(std::string());
|
ss.str(std::string());
|
||||||
|
|
||||||
ss << "INSERT INTO `groups` (`id`, `alias`, `name`, `url`, `description`) VALUES"
|
ss << "INSERT INTO `login_groups` (`id`, `alias`, `name`, `url`, `description`) VALUES"
|
||||||
<< "(1, 'gdd1', 'Gradido1', 'gdd1.gradido.com', 'Der erste offizielle Gradido Server (zum Testen)'), "
|
<< "(1, 'gdd1', 'Gradido1', 'gdd1.gradido.com', 'Der erste offizielle Gradido Server (zum Testen)'), "
|
||||||
<< "(2, 'gdd_test', 'Gradido Test', 'gdd1.gradido.com', 'Testgroup (zum Testen)'); ";
|
<< "(2, 'gdd_test', 'Gradido Test', 'gdd1.gradido.com', 'Testgroup (zum Testen)'); ";
|
||||||
if (runMysql(ss.str())) {
|
if (runMysql(ss.str())) {
|
||||||
|
|||||||
@ -74,7 +74,8 @@ enum PageState {
|
|||||||
{
|
{
|
||||||
//mSession->finalizeTransaction(false, true);
|
//mSession->finalizeTransaction(false, true);
|
||||||
//
|
//
|
||||||
if(!transaction.isNull() && transaction->getModel()->getUserId() == user_model->getID())
|
if(!transaction.isNull() &&
|
||||||
|
(transaction_body->isCreation() || transaction->getModel()->getUserId() == user_model->getID()))
|
||||||
{
|
{
|
||||||
if(pt->removeTask(transaction)) {
|
if(pt->removeTask(transaction)) {
|
||||||
transaction->deleteFromDB();
|
transaction->deleteFromDB();
|
||||||
@ -150,7 +151,7 @@ enum PageState {
|
|||||||
transaction_body = transaction->getTransactionBody();
|
transaction_body = transaction->getTransactionBody();
|
||||||
// user can only delete there own transactions
|
// user can only delete there own transactions
|
||||||
// TODO: Auto timeout for community transactions
|
// TODO: Auto timeout for community transactions
|
||||||
if(transaction->getModel()->getUserId() == user_model->getID()) {
|
if(transaction_body->isCreation() || transaction->getModel()->getUserId() == user_model->getID()) {
|
||||||
transaction_removeable = true;
|
transaction_removeable = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -338,20 +339,19 @@ enum PageState {
|
|||||||
<%= gettext("Transaktion unterzeichnen") %>
|
<%= gettext("Transaktion unterzeichnen") %>
|
||||||
</button>
|
</button>
|
||||||
<% } %>
|
<% } %>
|
||||||
|
<button type="submit" class="form-button button-cancel" name="skip" value="skip">
|
||||||
|
<i class="material-icons-outlined">debug-step-over</i>
|
||||||
|
<%= gettext("Transaktion überspringen") %>
|
||||||
|
</button>
|
||||||
<% if(transaction_removeable) { %>
|
<% if(transaction_removeable) { %>
|
||||||
<button type="submit" class="form-button button-cancel" name="abort" value="abort">
|
<button type="submit" class="form-button button-cancel" name="abort" value="abort">
|
||||||
<i class="material-icons-outlined">delete</i>
|
<i class="material-icons-outlined">delete</i>
|
||||||
<%= gettext("Transaktion verwerfen") %>
|
<%= gettext("Transaktion verwerfen") %>
|
||||||
</button>
|
</button>
|
||||||
<% } else { %>
|
|
||||||
<button type="submit" class="form-button button-cancel" name="skip" value="skip">
|
|
||||||
<i class="material-icons-outlined">debug-step-over</i>
|
|
||||||
<%= gettext("Transaktion überspringen") %>
|
|
||||||
</button>
|
|
||||||
<% } %>
|
<% } %>
|
||||||
</form>
|
</form>
|
||||||
<% } %>
|
<% } %>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<%@ include file="include/footer_chr.cpsp" %>
|
<%@ include file="include/footer_chr.cpsp" %>
|
||||||
|
|||||||
@ -1,5 +0,0 @@
|
|||||||
[production]
|
|
||||||
flavor=mariadb:10.5
|
|
||||||
host=127.0.0.1
|
|
||||||
port=3306
|
|
||||||
user=root
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
default-character-set=utf8mb4
|
|
||||||
default-collation=utf8mb4_unicode_ci
|
|
||||||
schema=gradido_login
|
|
||||||
@ -3,29 +3,7 @@
|
|||||||
#########################################################################################################
|
#########################################################################################################
|
||||||
FROM mariadb/server:10.5 as mariadb_server
|
FROM mariadb/server:10.5 as mariadb_server
|
||||||
|
|
||||||
ENV DOCKER_WORKDIR="/docker-entrypoint-initdb.d"
|
# ENV DOCKER_WORKDIR="/docker-entrypoint-initdb.d"
|
||||||
|
|
||||||
RUN mkdir -p ${DOCKER_WORKDIR}
|
# RUN mkdir -p ${DOCKER_WORKDIR}
|
||||||
WORKDIR ${DOCKER_WORKDIR}
|
# WORKDIR ${DOCKER_WORKDIR}
|
||||||
|
|
||||||
# create databases
|
|
||||||
COPY ./mariadb/setup_dbs.sql a1_setup_dbs.sql
|
|
||||||
# login server db
|
|
||||||
COPY ./login_server/skeema/ .
|
|
||||||
RUN cd ./gradido_login/ && for f in *.sql; do cp -- "$f" "../b1_$f"; sed -i '1i use gradido_login;' "../b1_$f"; done
|
|
||||||
COPY ./configs/login_server/setup_db_tables ./gradido_login/insert
|
|
||||||
RUN cd ./gradido_login/insert && for f in *.sql; do cp -- "$f" "../../c1_$f"; sed -i '1i use gradido_login;' "../../c1_$f"; done
|
|
||||||
|
|
||||||
#########################################################################################################
|
|
||||||
# mariadb server with test dbs
|
|
||||||
#########################################################################################################
|
|
||||||
FROM mariadb_server as mariadb_server_test
|
|
||||||
|
|
||||||
# create test databases
|
|
||||||
COPY ./mariadb/setup_test_dbs.sql a2_setup_dbs.sql
|
|
||||||
|
|
||||||
# login server test db
|
|
||||||
COPY ./login_server/skeema/ .
|
|
||||||
RUN cd ./gradido_login/ && for f in *.sql; do cp -- "$f" "../b2_$f"; sed -i '1i use gradido_login_test;' "../b2_$f"; done
|
|
||||||
COPY ./configs/login_server/setup_db_tables ./gradido_login/insert
|
|
||||||
RUN cd ./gradido_login/insert && for f in *.sql; do cp -- "$f" "../../c2_$f"; sed -i '1i use gradido_login_test;' "../../c2_$f"; done
|
|
||||||
|
|||||||
@ -1,7 +0,0 @@
|
|||||||
create database gradido_login
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database IF NOT EXISTS _skeema_tmp
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
FLUSH PRIVILEGES;
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
COLOR_GREEN="\033[0;32m"
|
|
||||||
COLOR_YELLOW="\e[33m"
|
|
||||||
COLOR_NONE="\033[0m"
|
|
||||||
|
|
||||||
LOGIN_DB_USER=gradido_login_live
|
|
||||||
LOGIN_DB_NAME=gradido_login_live
|
|
||||||
LOGIN_DB_PASSWD=$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c${1:-32};echo);
|
|
||||||
|
|
||||||
COMMUNITY_DB_USER=gradido_community_live
|
|
||||||
COMMUNITY_DB_NAME=gradido_community_live
|
|
||||||
COMMUNITY_DB_PASSWD=$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c${1:-32};echo);
|
|
||||||
|
|
||||||
# create table
|
|
||||||
mysql <<EOFMYSQL
|
|
||||||
create database $LOGIN_DB_NAME
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database $COMMUNITY_DB_NAME
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database IF NOT EXISTS _skeema_tmp
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
CREATE USER '$LOGIN_DB_USER'@'localhost' IDENTIFIED BY '$LOGIN_DB_PASSWD';
|
|
||||||
GRANT ALL PRIVILEGES ON $LOGIN_DB_NAME.* TO '$LOGIN_DB_USER'@'localhost';
|
|
||||||
GRANT ALL PRIVILEGES ON _skeema_tmp.* TO '$LOGIN_DB_USER'@'localhost';
|
|
||||||
|
|
||||||
CREATE USER '$COMMUNITY_DB_USER'@'localhost' IDENTIFIED BY '$COMMUNITY_DB_PASSWD';
|
|
||||||
GRANT ALL PRIVILEGES ON $COMMUNITY_DB_NAME.* TO '$COMMUNITY_DB_USER'@'localhost';
|
|
||||||
GRANT ALL PRIVILEGES ON _skeema_tmp.* TO '$COMMUNITY_DB_USER'@'localhost';
|
|
||||||
FLUSH PRIVILEGES;
|
|
||||||
EOFMYSQL
|
|
||||||
|
|
||||||
# populate db of login-server
|
|
||||||
cd ../login_server/skeema
|
|
||||||
sudo cat << EOF > .skeema
|
|
||||||
[production]
|
|
||||||
flavor=mariadb:10.3.25
|
|
||||||
host=127.0.0.1
|
|
||||||
port=3306
|
|
||||||
user=$LOGIN_DB_USER
|
|
||||||
EOF
|
|
||||||
cd gradido_login
|
|
||||||
sudo cat << EOF > .skeema
|
|
||||||
default-character-set=utf8mb4
|
|
||||||
default-collation=utf8mb4_unicode_ci
|
|
||||||
schema=$LOGIN_DB_NAME
|
|
||||||
EOF
|
|
||||||
|
|
||||||
source $HOME/.gvm/scripts/gvm
|
|
||||||
gvm use go1.14.4
|
|
||||||
skeema push -p$LOGIN_DB_PASSWD
|
|
||||||
|
|
||||||
echo -e "${COLOR_YELLOW}Login-Server db password: $LOGIN_DB_PASSWD${COLOR_NONE}"
|
|
||||||
@ -1,10 +0,0 @@
|
|||||||
create database gradido_login_test
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database gradido_community_test
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database IF NOT EXISTS _skeema_tmp
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
FLUSH PRIVILEGES;
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user