Merge branch 'master' into setup-admin-interface

This commit is contained in:
Moriz Wahl 2021-11-17 13:04:56 +01:00
commit 990aabfd0c
116 changed files with 11607 additions and 9163 deletions

View File

@ -173,7 +173,7 @@ jobs:
##########################################################################
- name: mariadb | Build `test` image
run: |
docker build --target mariadb_server_test -t "gradido/mariadb:test" -f ./mariadb/Dockerfile ./
docker build --target mariadb_server -t "gradido/mariadb:test" -f ./mariadb/Dockerfile ./
docker save "gradido/mariadb:test" > /tmp/mariadb.tar
- name: Upload Artifact
uses: actions/upload-artifact@v2
@ -399,7 +399,7 @@ jobs:
report_name: Coverage Frontend
type: lcov
result_path: ./coverage/lcov.info
min_coverage: 83
min_coverage: 85
token: ${{ github.token }}
##############################################################################
@ -450,7 +450,7 @@ jobs:
unit_test_backend:
name: Unit tests - Backend
runs-on: ubuntu-latest
needs: [build_test_backend]
needs: [build_test_backend,build_test_mariadb]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -460,6 +460,13 @@ jobs:
##########################################################################
# DOWNLOAD DOCKER IMAGES #################################################
##########################################################################
- name: Download Docker Image (Mariadb)
uses: actions/download-artifact@v2
with:
name: docker-mariadb-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/mariadb.tar
- name: Download Docker Image (Backend)
uses: actions/download-artifact@v2
with:
@ -470,10 +477,11 @@ jobs:
##########################################################################
# UNIT TESTS BACKEND #####################################################
##########################################################################
- name: backend | Unit tests
run: |
docker run -v ~/coverage:/app/coverage --rm gradido/backend:test yarn run test
cp -r ~/coverage ./coverage
- name: backend | docker-compose
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb database
- name: backend Unit tests | test
run: cd database && yarn && cd ../backend && yarn && yarn test
# run: docker-compose -f docker-compose.yml -f docker-compose.test.yml exec -T backend yarn test
##########################################################################
# COVERAGE CHECK BACKEND #################################################
##########################################################################
@ -482,8 +490,8 @@ jobs:
with:
report_name: Coverage Backend
type: lcov
result_path: ./coverage/lcov.info
min_coverage: 1
result_path: ./backend/coverage/lcov.info
min_coverage: 39
token: ${{ github.token }}
##############################################################################
@ -506,10 +514,16 @@ jobs:
--health-timeout=3s
--health-retries=4
steps:
- name: Debug service
run: echo "$(docker ps)"
- name: Debug container choosing script
run: echo "$(docker container ls | grep mariadb | awk '{ print $1 }')"
# - name: Debug service
# run: echo "$(docker ps)"
#- name: Debug container choosing script
# run: echo "$(docker container ls | grep mariadb | awk '{ print $1 }')"
- name: get mariadb container id
run: echo "::set-output name=id::$(docker container ls | grep mariadb | awk '{ print $1 }')"
id: mariadb_container
- name: get automatic created network
run: echo "::set-output name=id::$(docker network ls | grep github_network | awk '{ print $1 }')"
id: network
##########################################################################
# CHECKOUT CODE ##########################################################
##########################################################################
@ -517,6 +531,11 @@ jobs:
uses: actions/checkout@v2
with:
submodules: true
# Database migration
- name: Start database migration
run: |
docker build --target production_up -t "gradido/database:production_up" database/
docker run --network ${{ steps.network.outputs.id }} --name=database --env NODE_ENV=production --env DB_HOST=mariadb --env DB_DATABASE=gradido_community_test -d gradido/database:production_up
##########################################################################
# Build Login-Server Test Docker image ###################################
##########################################################################

1
.gitignore vendored
View File

@ -2,7 +2,6 @@
/node_modules/*
.vscode
messages.pot
.skeema
nbproject
.metadata
/.env

7
.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,7 @@
{
"recommendations": [
"streetsidesoftware.code-spell-checker",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode"
]
}

View File

@ -8,33 +8,73 @@ The Gradido model can create global prosperity and peace
The Corona crisis has fundamentally changed our world within a very short time.
The dominant financial system threatens to fail around the globe, followed by mass insolvencies, record unemployment and abject poverty. Only with a sustainable new monetary system can humanity master these challenges of the 21st century. The Gradido Academy for Bionic Economy has developed such a system.
Find out more about the Project on its [Website](https://gradido.net/). It is offering vast resources about the idea. The remaining document will discuss the gradido software only.
## Software requirements
Currently we only support `docker` as environment to run all services, since many different programming languages and frameworks are used.
Currently we only support `docker` install instructions to run all services, since many different programming languages and frameworks are used.
- [docker](https://www.docker.com/)
- [docker](https://www.docker.com/)
- [docker-compose]
### For Arch Linux
Install the required packages:
```bash
sudo pacman -S docker
sudo pacman -S docker-compose
```
Add group `docker` and then your user to it in order to allow you to run docker without sudo
```bash
sudo groupadd docker # may already exist `groupadd: group 'docker' already exists`
sudo usermod -aG docker $USER
groups # verify you have the group (requires relog)
```
Start the docker service:
```bash
sudo systemctrl start docker
```
## How to run?
1. Clone the repo and pull all submodules
### 1. Clone Sources
Clone the repo and pull all submodules
```bash
git clone git@github.com:gradido/gradido.git
git submodule update --recursive --init
```
2. Run docker compose
1. Run docker compose for the debug build
### 2. Run docker-compose
Run docker-compose to bring up the development environment
```bash
docker-compose up
```
### Additional Build options
If you want to build for production you can do this aswell:
```bash
docker-compose -f docker-compose.yml up
```
```bash
docker-compose up
```
## Services defined in this package
2. Or run docker compose in production build
- [frontend](./frontend) Wallet frontend
- [backend](./backend) GraphQL & Business logic backend
- [mariadb](./mariadb) Database backend
- [login_server](./login_server) User credential storage & business logic backend
- [community_server](./community_server/) Business logic backend
```bash
docker-compose -f docker-compose.yml up
```
We are currently restructuring the service to reduce dependencies and unify business logic into one place. Furthermore the databases defined for each service will be unified into one.
### Open the wallet
Once you have `docker-compose` up and running, you can open [http://localhost/vue](http://localhost/vue) and create yourself a new wallet account.
## Troubleshooting
| Problem | Issue | Solution | Description |
| ------- | ----- | -------- | ----------- |
| docker-compose raises database connection errors | [#1062](https://github.com/gradido/gradido/issues/1062) | End `ctrl+c` and restart the `docker-compose up` after a successful build | Several Database connection related errors occur in the docker-compose log. |
| Wallet page is empty | [#1063](https://github.com/gradido/gradido/issues/1063) | Accept Cookies and Local Storage in your Browser | The page stays empty when navigating to [http://localhost/vue](http://localhost/vue) |
## Useful Links

View File

@ -10,6 +10,16 @@ DB_PORT=3306
DB_USER=root
DB_PASSWORD=
DB_DATABASE=gradido_community
#EMAIL=true
#EMAIL_USERNAME=
#EMAIL_SENDER=
#EMAIL_PASSWORD=
#EMAIL_SMTP_URL=
#EMAIL_SMTP_PORT=587
#EMAIL_LINK_VERIFICATION=http://localhost/vue/checkEmail/$1
#KLICKTIPP_USER=
#KLICKTIPP_PASSWORD=
#KLICKTIPP_APIKEY_DE=
@ -18,4 +28,6 @@ DB_DATABASE=gradido_community
COMMUNITY_NAME=
COMMUNITY_URL=
COMMUNITY_REGISTER_URL=
COMMUNITY_DESCRIPTION=
COMMUNITY_DESCRIPTION=
LOGIN_APP_SECRET=21ffbbc616fe
LOGIN_SERVER_KEY=a51ef8ac7ef1abf162fb7a65261acd7a

View File

@ -85,7 +85,7 @@ RUN cd ../database && yarn run build
FROM build as test
# Run command
CMD /bin/sh -c "yarn run dev"
CMD /bin/sh -c "yarn run start"
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #

7120
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -13,11 +13,12 @@
"start": "node build/index.js",
"dev": "nodemon -w src --ext ts --exec ts-node src/index.ts",
"lint": "eslint . --ext .js,.ts",
"test": "jest --coverage"
"test": "jest --runInBand --coverage "
},
"dependencies": {
"@types/jest": "^27.0.2",
"apollo-server-express": "^2.25.2",
"apollo-server-testing": "^2.25.2",
"axios": "^0.21.1",
"class-validator": "^0.13.1",
"cors": "^2.8.5",
@ -26,10 +27,12 @@
"graphql": "^15.5.1",
"jest": "^27.2.4",
"jsonwebtoken": "^8.5.1",
"libsodium-wrappers": "^0.7.9",
"module-alias": "^2.2.2",
"mysql2": "^2.3.0",
"nodemailer": "^6.6.5",
"random-bigint": "^0.0.1",
"reflect-metadata": "^0.1.13",
"sodium-native": "^3.3.0",
"ts-jest": "^27.0.5",
"type-graphql": "^1.1.1",
"typeorm": "^0.2.38"
@ -37,7 +40,8 @@
"devDependencies": {
"@types/express": "^4.17.12",
"@types/jsonwebtoken": "^8.5.2",
"@types/libsodium-wrappers": "^0.7.9",
"@types/node": "^16.10.3",
"@types/nodemailer": "^6.4.4",
"@typescript-eslint/eslint-plugin": "^4.28.0",
"@typescript-eslint/parser": "^4.28.0",
"eslint": "^7.29.0",
@ -53,6 +57,6 @@
"typescript": "^4.3.4"
},
"_moduleAliases": {
"@entity" : "../database/build/entity"
"@entity": "../database/build/entity"
}
}

View File

@ -39,9 +39,26 @@ const community = {
process.env.COMMUNITY_DESCRIPTION || 'Die lokale Entwicklungsumgebung von Gradido.',
}
const loginServer = {
LOGIN_APP_SECRET: process.env.LOGIN_APP_SECRET || '21ffbbc616fe',
LOGIN_SERVER_KEY: process.env.LOGIN_SERVER_KEY || 'a51ef8ac7ef1abf162fb7a65261acd7a',
}
const email = {
EMAIL: process.env.EMAIL === 'true' || false,
EMAIL_USERNAME: process.env.EMAIL_USERNAME || 'gradido_email',
EMAIL_SENDER: process.env.EMAIL_SENDER || 'info@gradido.net',
EMAIL_PASSWORD: process.env.EMAIL_PASSWORD || 'xxx',
EMAIL_SMTP_URL: process.env.EMAIL_SMTP_URL || 'gmail.com',
EMAIL_SMTP_PORT: process.env.EMAIL_SMTP_PORT || '587',
EMAIL_LINK_VERIFICATION:
process.env.EMAIL_LINK_VERIFICATION || 'http://localhost/vue/checkEmail/$1',
}
// This is needed by graphql-directive-auth
process.env.APP_SECRET = server.JWT_SECRET
const CONFIG = { ...server, ...database, ...klicktipp, ...community }
const CONFIG = { ...server, ...database, ...klicktipp, ...community, ...email, ...loginServer }
export default CONFIG

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -4,7 +4,4 @@ import { ArgsType, Field } from 'type-graphql'
export default class CheckUsernameArgs {
@Field(() => String)
username: string
@Field(() => Number, { nullable: true })
groupId?: number
}

View File

@ -1,21 +0,0 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import { ObjectType, Field } from 'type-graphql'
@ObjectType()
export class CheckUsernameResponse {
constructor(json: any) {
this.state = json.state
this.msg = json.msg
this.groupId = json.group_id
}
@Field(() => String)
state: string
@Field(() => String)
msg?: string
@Field(() => Number)
groupId?: number
}

View File

@ -0,0 +1,123 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import { createTestClient } from 'apollo-server-testing'
import createServer from '../../server/createServer'
import CONFIG from '../../config'
jest.mock('../../config')
let query: any
// to do: We need a setup for the tests that closes the connection
let con: any
beforeAll(async () => {
const server = await createServer({})
con = server.con
query = createTestClient(server.apollo).query
})
afterAll(async () => {
await con.close()
})
describe('CommunityResolver', () => {
const getCommunityInfoQuery = `
query {
getCommunityInfo {
name
description
url
registerUrl
}
}
`
const communities = `
query {
communities {
id
name
url
description
registerUrl
}
}
`
describe('getCommunityInfo', () => {
it('returns the default values', async () => {
expect(query({ query: getCommunityInfoQuery })).resolves.toMatchObject({
data: {
getCommunityInfo: {
name: 'Gradido Entwicklung',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register',
},
},
})
})
})
describe('communities', () => {
describe('PRODUCTION = false', () => {
beforeEach(() => {
CONFIG.PRODUCTION = false
})
it('returns three communities', async () => {
expect(query({ query: communities })).resolves.toMatchObject({
data: {
communities: [
{
id: 1,
name: 'Gradido Entwicklung',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register-community',
},
{
id: 2,
name: 'Gradido Staging',
description: 'Der Testserver der Gradido-Akademie.',
url: 'https://stage1.gradido.net/vue/',
registerUrl: 'https://stage1.gradido.net/vue/register-community',
},
{
id: 3,
name: 'Gradido-Akademie',
description: 'Freies Institut für Wirtschaftsbionik.',
url: 'https://gradido.net',
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
},
],
},
})
})
})
describe('PRODUCTION = true', () => {
beforeEach(() => {
CONFIG.PRODUCTION = true
})
it('returns one community', async () => {
expect(query({ query: communities })).resolves.toMatchObject({
data: {
communities: [
{
id: 3,
name: 'Gradido-Akademie',
description: 'Freies Institut für Wirtschaftsbionik.',
url: 'https://gradido.net',
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
},
],
},
})
})
})
})
})

View File

@ -1,10 +1,12 @@
/* eslint-disable new-cap */
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import { Resolver, Query, Args, Authorized, Ctx, Mutation } from 'type-graphql'
import { getCustomRepository } from 'typeorm'
import { getCustomRepository, getConnection, QueryRunner } from 'typeorm'
import CONFIG from '../../config'
import { sendEMail } from '../../util/sendEMail'
import { Transaction } from '../model/Transaction'
import { TransactionList } from '../model/TransactionList'
@ -22,12 +24,181 @@ import { TransactionRepository } from '../../typeorm/repository/Transaction'
import { User as dbUser } from '@entity/User'
import { UserTransaction as dbUserTransaction } from '@entity/UserTransaction'
import { Transaction as dbTransaction } from '@entity/Transaction'
import { TransactionSendCoin as dbTransactionSendCoin } from '@entity/TransactionSendCoin'
import { Balance as dbBalance } from '@entity/Balance'
import { apiPost } from '../../apis/HttpRequest'
import { roundFloorFrom4, roundCeilFrom4 } from '../../util/round'
import { calculateDecay, calculateDecayWithInterval } from '../../util/decay'
import { TransactionTypeId } from '../enum/TransactionTypeId'
import { TransactionType } from '../enum/TransactionType'
import { hasUserAmount, isHexPublicKey } from '../../util/validate'
/*
# Test
## Prepare
> sudo systemctl start docker
> docker-compose up mariadb
> DROP all databases
> docker-compose down
> docker compose up mariadb database
> verify there is exactly one database `gradido_community`
TODO:
INSERT INTO `login_groups` (`id`, `alias`, `name`, `url`, `host`, `home`, `description`) VALUES
(1, 'docker', 'docker gradido group', 'localhost', 'nginx', '/', 'gradido test group for docker and stage2 with blockchain db');
>> Database is cool
### Start login server
> docker-compose up login-server community-server nginx
>> Login & community servers and nginx proxy are up and running
## Build database
> cd database
> yarn
> yarn build
> cd ..
>> Database has been built successful
### Start backend (no docker for debugging)
> cd backend
> yarn
> yarn dev
>> Backend is up and running
### Create users
> chromium http://localhost:4000/graphql
> mutation{createUser(email: "receiver@user.net", firstName: "Receiver", lastName: "user", password: "123!AAAb", language: "de")}
> mutation{createUser(email: "sender@user.net", firstName: "Sender", lastName: "user", password: "123!AAAb", language: "de")}
> mutation{createUser(email: "creator@user.net", firstName: "Creator", lastName: "user", password: "123!AAAb", language: "de")}
>> Verify you have 3 entries in `login_users`, `login_user_backups` and `state_users`
### make creator an admin
> INSERT INTO login_user_roles (id, user_id, role_id) VALUES (NULL, '3', '1');
> UPDATE login_users SET email_checked = 1 WHERE id = 3;
> uncomment line: 19 in community_server/src/Controller/ServerUsersController.php
> chromium http://localhost/server-users/add
> create user `creator` `123` `creator@different.net`
>> verify you have 1 entry in `server_users`
> login with user on http://localhost/server-users
> activate server user by changing the corresponding flag in the interface
> navigate to http://localhost/transaction-creations/create-multi
> create 1000GDD for user sender@user.net
> navigate to http://localhost
> login with `creator@user.net` `123!AAAb`
> confirm transaction (top right corner - click the thingy, click the green button `Transaktion abschließen`)
### the test:
> chromium http://localhost:4000/graphql
> query{login(email: "sender@user.net", password: "123!AAAb"){pubkey}}
>> copy token from network tab (inspect)
> mutation{sendCoins(email: "receiver@user.net", amount: 10.0, memo: "Hier!")}
> mutation{sendCoins(email: "receiver@user.net", amount: 10.0, memo: "Hier!")}
> Headers: {"Authorization": "Bearer ${token}"}
>> Verify via Database that stuff is as it should see `state_balance` & `transaction_send_coins`
### create decay block
> chromium http://localhost/transactions/add
> login with `creator` `123`
> select `decay start`
> press submit
> wait for at least 0.02 display of decay on user sender@user.net on old frontend, this should be aprox 10min
> chromium http://localhost:4000/graphql
> query{login(email: "sender@user.net", password: "123!AAAb"){pubkey}}
>> copy token from network tab (inspect)
> mutation{sendCoins(email: "receiver@user.net", amount: 10.0, memo: "Hier!")}
>> verify in `transaction_send_coins` that a decay was taken into account
>> same in `state_balances`
>> now check the old frontend
>>> sender@user.net should have a decay of 0.02
>>> while receiver@user.net should have zero decay on anything (old frontend)
### Export data
> docker-compose up phpmyadmin
> chromium http://localhost:8074/
> select gradido_community
> export
> select custom
> untick structure
> ok
## Results
NOTE: We decided not to write the `transaction_signatures` since its unused. This is the main difference.
NOTE: We fixed a bug in the `state_user_transactions code` with the new implementation of apollo
Master:
--
-- Dumping data for table `state_user_transactions`
--
INSERT INTO `state_user_transactions` (`id`, `state_user_id`, `transaction_id`, `transaction_type_id`, `balance`, `balance_date`) VALUES
(1, 2, 1, 1, 10000000, '2021-11-05 12:45:18'),
(2, 2, 2, 2, 9900000, '2021-11-05 12:48:35'),
(3, 1, 2, 2, 100000, '2021-11-05 12:48:35'),
(4, 2, 3, 2, 9800000, '2021-11-05 12:49:07'),
(5, 1, 3, 2, 200000, '2021-11-05 12:49:07'),
(6, 2, 5, 2, 9699845, '2021-11-05 13:03:50'),
(7, 1, 5, 2, 99996, '2021-11-05 13:03:50');
--
-- Dumping data for table `transactions`
--
INSERT INTO `transactions` (`id`, `state_group_id`, `transaction_type_id`, `tx_hash`, `memo`, `received`, `blockchain_type_id`) VALUES
(1, NULL, 1, 0x9ccdcd01ccb6320c09c2d1da2f0bf735a95ece0e7c1df6bbff51918fbaec061700000000000000000000000000000000, '', '2021-11-05 12:45:18', 1),
(2, NULL, 2, 0x58d7706a67fa4ff4b8038168c6be39a2963d7e28e9d3872759ad09c519fe093700000000000000000000000000000000, 'Hier!', '2021-11-05 12:48:35', 1),
(3, NULL, 2, 0x427cd214f92ef35af671129d50edc5a478c53d1e464f285b7615d9794a69f69b00000000000000000000000000000000, 'Hier!', '2021-11-05 12:49:07', 1),
(4, NULL, 9, 0x32807368f0906a21b94c072599795bc9eeab88fb565df82e85cc62a4fdcde48500000000000000000000000000000000, '', '2021-11-05 12:51:51', 1),
(5, NULL, 2, 0x75eb729e0f60a1c8cead1342955853d2440d7a2ea57dfef6d4a18bff0d94491e00000000000000000000000000000000, 'Hier!', '2021-11-05 13:03:50', 1);
--
-- Dumping data for table `transaction_signatures`
--
INSERT INTO `transaction_signatures` (`id`, `transaction_id`, `signature`, `pubkey`) VALUES
(1, 1, 0x5888edcdcf77aaadad6d321882903bc831d7416f17213fd5020a764365b5fcb336e4c7917385a1278ea44ccdb31eac4a09e448053b5e3f8f1fe5da3baf53c008, 0xd5b20f8dee415038bfa2b6b0e1b40ff54850351109444863b04d6d28825b7b7d),
(2, 2, 0xf6fef428f8f22faf7090f7d740e6088d1d90c58ae92d757117d7d91d799e659f3a3a0c65a3fd97cbde798e761f9d23eff13e8810779a184c97c411f28e7c4608, 0xdc74a589004377ab14836dce68ce2ca34e5b17147cd78ad4b3afe8137524ae8a),
(3, 3, 0x8ebe9730c6cf61f56ef401d6f2bd229f3c298ca3c2791ee9137e4827b7af6c6d6566fca616eb1fe7adc2e4d56b5c7350ae3990c9905580630fa75ecffca8e001, 0xdc74a589004377ab14836dce68ce2ca34e5b17147cd78ad4b3afe8137524ae8a),
(4, 5, 0x50cf418f7e217391e89ab9c2879ae68d7c7c597d846b4fe1c082b5b16e5d0c85c328fbf48ad3490bcfe94f446700ae0a4b0190e76d26cc752abced58f480c80f, 0xdc74a589004377ab14836dce68ce2ca34e5b17147cd78ad4b3afe8137524ae8a);
This Feature Branch:
--
-- Dumping data for table `state_user_transactions`
--
INSERT INTO `state_user_transactions` (`id`, `state_user_id`, `transaction_id`, `transaction_type_id`, `balance`, `balance_date`) VALUES
(1, 2, 1, 1, 10000000, '2021-11-05 00:25:46'),
(12, 2, 7, 2, 9900000, '2021-11-05 00:55:37'),
(13, 1, 7, 2, 100000, '2021-11-05 00:55:37'),
(14, 2, 8, 2, 9800000, '2021-11-05 01:00:04'),
(15, 1, 8, 2, 200000, '2021-11-05 01:00:04'),
(16, 2, 10, 2, 9699772, '2021-11-05 01:17:41'),
(17, 1, 10, 2, 299995, '2021-11-05 01:17:41');
--
-- Dumping data for table `transactions`
--
INSERT INTO `transactions` (`id`, `state_group_id`, `transaction_type_id`, `tx_hash`, `memo`, `received`, `blockchain_type_id`) VALUES
(1, NULL, 1, 0xdd030d475479877587d927ed9024784ba62266cf1f3d87862fc98ad68f7b26e400000000000000000000000000000000, '', '2021-11-05 00:25:46', 1),
(7, NULL, 2, NULL, 'Hier!', '2021-11-05 00:55:37', 1),
(8, NULL, 2, NULL, 'Hier!', '2021-11-05 01:00:04', 1),
(9, NULL, 9, 0xb1cbedbf126aa35f5edbf06e181c415361d05228ab4da9d19a4595285a673dfa00000000000000000000000000000000, '', '2021-11-05 01:05:34', 1),
(10, NULL, 2, NULL, 'Hier!', '2021-11-05 01:17:41', 1);
--
-- Dumping data for table `transaction_signatures`
--
INSERT INTO `transaction_signatures` (`id`, `transaction_id`, `signature`, `pubkey`) VALUES
(1, 1, 0x60d632479707e5d01cdc32c3326b5a5bae11173a0c06b719ee7b552f9fd644de1a0cd4afc207253329081d39dac1a63421f51571d836995c649fc39afac7480a, 0x48c45cb4fea925e83850f68f2fa8f27a1a4ed1bcba68cdb59fcd86adef3f52ee);
*/
// Helper function
async function calculateAndAddDecayTransactions(
@ -210,6 +381,87 @@ async function listTransactions(
return transactionList
}
// helper helper function
async function updateStateBalance(
user: dbUser,
centAmount: number,
received: Date,
queryRunner: QueryRunner,
): Promise<dbBalance> {
const balanceRepository = getCustomRepository(BalanceRepository)
let balance = await balanceRepository.findByUser(user.id)
if (!balance) {
balance = new dbBalance()
balance.userId = user.id
balance.amount = centAmount
balance.modified = received
} else {
const decaiedBalance = await calculateDecay(balance.amount, balance.recordDate, received).catch(
() => {
throw new Error('error by calculating decay')
},
)
balance.amount = Number(decaiedBalance) + centAmount
balance.modified = new Date()
}
if (balance.amount <= 0) {
throw new Error('error new balance <= 0')
}
balance.recordDate = received
return queryRunner.manager.save(balance).catch((error) => {
throw new Error('error saving balance:' + error)
})
}
// helper helper function
async function addUserTransaction(
user: dbUser,
transaction: dbTransaction,
centAmount: number,
queryRunner: QueryRunner,
): Promise<dbUserTransaction> {
let newBalance = centAmount
const userTransactionRepository = getCustomRepository(UserTransactionRepository)
const lastUserTransaction = await userTransactionRepository.findLastForUser(user.id)
if (lastUserTransaction) {
newBalance += Number(
await calculateDecay(
Number(lastUserTransaction.balance),
lastUserTransaction.balanceDate,
transaction.received,
).catch(() => {
throw new Error('error by calculating decay')
}),
)
}
if (newBalance <= 0) {
throw new Error('error new balance <= 0')
}
const newUserTransaction = new dbUserTransaction()
newUserTransaction.userId = user.id
newUserTransaction.transactionId = transaction.id
newUserTransaction.transactionTypeId = transaction.transactionTypeId
newUserTransaction.balance = newBalance
newUserTransaction.balanceDate = transaction.received
return queryRunner.manager.save(newUserTransaction).catch((error) => {
throw new Error('Error saving user transaction: ' + error)
})
}
async function getPublicKey(email: string, sessionId: number): Promise<string | undefined> {
const result = await apiPost(CONFIG.LOGIN_API_URL + 'getUserInfos', {
session_id: sessionId,
email,
ask: ['user.pubkeyhex'],
})
if (result.success) {
return result.data.userData.pubkeyhex
}
}
@Resolver()
export class TransactionResolver {
@Authorized()
@ -252,19 +504,148 @@ export class TransactionResolver {
@Args() { email, amount, memo }: TransactionSendArgs,
@Ctx() context: any,
): Promise<string> {
const payload = {
session_id: context.sessionId,
target_email: email,
amount: amount * 10000,
memo,
auto_sign: true,
transaction_type: 'transfer',
blockchain_type: 'mysql',
// TODO this is subject to replay attacks
// validate sender user (logged in)
const userRepository = getCustomRepository(UserRepository)
const senderUser = await userRepository.findByPubkeyHex(context.pubKey)
if (senderUser.pubkey.length !== 32) {
throw new Error('invalid sender public key')
}
const result = await apiPost(CONFIG.LOGIN_API_URL + 'createTransaction', payload)
if (!result.success) {
throw new Error(result.data)
if (!hasUserAmount(senderUser, amount)) {
throw new Error("user hasn't enough GDD")
}
// validate recipient user
// TODO: the detour over the public key is unnecessary
const recipiantPublicKey = await getPublicKey(email, context.sessionId)
if (!recipiantPublicKey) {
throw new Error('recipiant not known')
}
if (!isHexPublicKey(recipiantPublicKey)) {
throw new Error('invalid recipiant public key')
}
const recipiantUser = await userRepository.findByPubkeyHex(recipiantPublicKey)
if (!recipiantUser) {
throw new Error('Cannot find recipiant user by local send coins transaction')
} else if (recipiantUser.disabled) {
throw new Error('recipiant user account is disabled')
}
// validate amount
if (amount <= 0) {
throw new Error('invalid amount')
}
const centAmount = Math.trunc(amount * 10000)
const queryRunner = getConnection().createQueryRunner()
await queryRunner.connect()
await queryRunner.startTransaction('READ UNCOMMITTED')
try {
// transaction
let transaction = new dbTransaction()
transaction.transactionTypeId = TransactionTypeId.SEND
transaction.memo = memo
// TODO: NO! this is problematic in its construction
const insertResult = await queryRunner.manager.insert(dbTransaction, transaction)
transaction = await queryRunner.manager
.findOneOrFail(dbTransaction, insertResult.generatedMaps[0].id)
.catch((error) => {
throw new Error('error loading saved transaction: ' + error)
})
// Insert Transaction: sender - amount
const senderUserTransactionBalance = await addUserTransaction(
senderUser,
transaction,
-centAmount,
queryRunner,
)
// Insert Transaction: recipient + amount
const recipiantUserTransactionBalance = await addUserTransaction(
recipiantUser,
transaction,
centAmount,
queryRunner,
)
// Update Balance: sender - amount
const senderStateBalance = await updateStateBalance(
senderUser,
-centAmount,
transaction.received,
queryRunner,
)
// Update Balance: recipiant + amount
const recipiantStateBalance = await updateStateBalance(
recipiantUser,
centAmount,
transaction.received,
queryRunner,
)
if (senderStateBalance.amount !== senderUserTransactionBalance.balance) {
throw new Error('db data corrupted, sender')
}
if (recipiantStateBalance.amount !== recipiantUserTransactionBalance.balance) {
throw new Error('db data corrupted, recipiant')
}
// transactionSendCoin
const transactionSendCoin = new dbTransactionSendCoin()
transactionSendCoin.transactionId = transaction.id
transactionSendCoin.userId = senderUser.id
transactionSendCoin.senderPublic = senderUser.pubkey
transactionSendCoin.recipiantUserId = recipiantUser.id
transactionSendCoin.recipiantPublic = Buffer.from(recipiantPublicKey, 'hex')
transactionSendCoin.amount = centAmount
transactionSendCoin.senderFinalBalance = senderStateBalance.amount
await queryRunner.manager.save(transactionSendCoin).catch((error) => {
throw new Error('error saving transaction send coin: ' + error)
})
await queryRunner.manager.save(transaction).catch((error) => {
throw new Error('error saving transaction with tx hash: ' + error)
})
await queryRunner.commitTransaction()
} catch (e) {
await queryRunner.rollbackTransaction()
throw e
} finally {
await queryRunner.release()
// TODO: This is broken code - we should never correct an autoincrement index in production
// according to dario it is required tho to properly work. The index of the table is used as
// index for the transaction which requires a chain without gaps
const count = await queryRunner.manager.count(dbTransaction)
// fix autoincrement value which seems not effected from rollback
await queryRunner
.query('ALTER TABLE `transactions` auto_increment = ?', [count])
.catch((error) => {
// eslint-disable-next-line no-console
console.log('problems with reset auto increment: %o', error)
})
}
// send notification email
// TODO: translate
await sendEMail({
from: `Gradido (nicht antworten) <${CONFIG.EMAIL_SENDER}>`,
to: `${recipiantUser.firstName} ${recipiantUser.lastName} <${recipiantUser.email}>`,
subject: 'Gradido Überweisung',
text: `Hallo ${recipiantUser.firstName} ${recipiantUser.lastName}
Du hast soeben ${amount} GDD von ${senderUser.firstName} ${senderUser.lastName} erhalten.
${senderUser.firstName} ${senderUser.lastName} schreibt:
${memo}
Bitte antworte nicht auf diese E-Mail!
Mit freundlichen Grüßen,
dein Gradido-Team`,
})
return 'success'
}
}

View File

@ -1,10 +1,10 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import fs from 'fs'
import { Resolver, Query, Args, Arg, Authorized, Ctx, UseMiddleware, Mutation } from 'type-graphql'
import { from_hex as fromHex } from 'libsodium-wrappers'
import { getConnection, getCustomRepository } from 'typeorm'
import CONFIG from '../../config'
import { CheckUsernameResponse } from '../model/CheckUsernameResponse'
import { LoginViaVerificationCode } from '../model/LoginViaVerificationCode'
import { SendPasswordResetEmailResponse } from '../model/SendPasswordResetEmailResponse'
import { UpdateUserInfosResponse } from '../model/UpdateUserInfosResponse'
@ -22,10 +22,165 @@ import {
klicktippNewsletterStateMiddleware,
} from '../../middleware/klicktippMiddleware'
import { CheckEmailResponse } from '../model/CheckEmailResponse'
import { getCustomRepository } from 'typeorm'
import { UserSettingRepository } from '../../typeorm/repository/UserSettingRepository'
import { Setting } from '../enum/Setting'
import { UserRepository } from '../../typeorm/repository/User'
import { LoginUser } from '@entity/LoginUser'
import { LoginElopageBuys } from '@entity/LoginElopageBuys'
import { LoginUserBackup } from '@entity/LoginUserBackup'
import { LoginEmailOptIn } from '@entity/LoginEmailOptIn'
import { sendEMail } from '../../util/sendEMail'
// eslint-disable-next-line @typescript-eslint/no-var-requires
const sodium = require('sodium-native')
// eslint-disable-next-line @typescript-eslint/no-var-requires
const random = require('random-bigint')
// We will reuse this for changePassword
const isPassword = (password: string): boolean => {
return !!password.match(/^(?=.*[a-z])(?=.*[A-Z])(?=.*[0-9])(?=.*[^a-zA-Z0-9 \\t\\n\\r]).{8,}$/)
}
const LANGUAGES = ['de', 'en']
const DEFAULT_LANGUAGE = 'de'
const isLanguage = (language: string): boolean => {
return LANGUAGES.includes(language)
}
const PHRASE_WORD_COUNT = 24
const WORDS = fs.readFileSync('src/config/mnemonic.english.txt').toString().split('\n')
const PassphraseGenerate = (): string[] => {
const result = []
for (let i = 0; i < PHRASE_WORD_COUNT; i++) {
result.push(WORDS[sodium.randombytes_random() % 2048])
}
return result
/*
return [
'behind',
'salmon',
'fluid',
'orphan',
'frost',
'elder',
'amateur',
'always',
'panel',
'palm',
'leopard',
'essay',
'punch',
'title',
'fun',
'annual',
'page',
'hundred',
'journey',
'select',
'figure',
'tunnel',
'casual',
'bar',
]
*/
}
/*
Test results:
INSERT INTO `login_users` (`id`, `email`, `first_name`, `last_name`, `username`, `description`, `password`, `pubkey`, `privkey`, `email_hash`, `created`, `email_checked`, `passphrase_shown`, `language`, `disabled`, `group_id`, `publisher_id`) VALUES
// old
(1, 'peter@lustig.de', 'peter', 'lustig', '', '', 4747956395458240931, 0x8c75edd507f470e5378f927489374694d68f3d155523f1c4402c36affd35a7ed, 0xb0e310655726b088631ccfd31ad6470ee50115c161dde8559572fa90657270ff13dc1200b2d3ea90dfbe92f3a4475ee4d9cee4989e39736a0870c33284bc73a8ae690e6da89f241a121eb3b500c22885, 0x9f700e6f6ec351a140b674c0edd4479509697b023bd8bee8826915ef6c2af036, '2021-11-03 20:05:04', 0, 0, 'de', 0, 1, 0);
// new
(2, 'peter@lustig.de', 'peter', 'lustig', '', '', 4747956395458240931, 0x8c75edd507f470e5378f927489374694d68f3d155523f1c4402c36affd35a7ed, 0xb0e310655726b088631ccfd31ad6470ee50115c161dde8559572fa90657270ff13dc1200b2d3ea90dfbe92f3a4475ee4d9cee4989e39736a0870c33284bc73a8ae690e6da89f241a121eb3b500c22885, 0x9f700e6f6ec351a140b674c0edd4479509697b023bd8bee8826915ef6c2af036, '2021-11-03 20:22:15', 0, 0, 'de', 0, 1, 0);
INSERT INTO `login_user_backups` (`id`, `user_id`, `passphrase`, `mnemonic_type`) VALUES
// old
(1, 1, 'behind salmon fluid orphan frost elder amateur always panel palm leopard essay punch title fun annual page hundred journey select figure tunnel casual bar ', 2);
// new
(2, 2, 'behind salmon fluid orphan frost elder amateur always panel palm leopard essay punch title fun annual page hundred journey select figure tunnel casual bar ', 2);
*/
const KeyPairEd25519Create = (passphrase: string[]): Buffer[] => {
if (!passphrase.length || passphrase.length < PHRASE_WORD_COUNT) {
throw new Error('passphrase empty or to short')
}
const state = Buffer.alloc(sodium.crypto_hash_sha512_STATEBYTES)
sodium.crypto_hash_sha512_init(state)
// To prevent breaking existing passphrase-hash combinations word indices will be put into 64 Bit Variable to mimic first implementation of algorithms
for (let i = 0; i < PHRASE_WORD_COUNT; i++) {
const value = Buffer.alloc(8)
const wordIndex = WORDS.indexOf(passphrase[i])
value.writeBigInt64LE(BigInt(wordIndex))
sodium.crypto_hash_sha512_update(state, value)
}
// trailing space is part of the login_server implementation
const clearPassphrase = passphrase.join(' ') + ' '
sodium.crypto_hash_sha512_update(state, Buffer.from(clearPassphrase))
const outputHashBuffer = Buffer.alloc(sodium.crypto_hash_sha512_BYTES)
sodium.crypto_hash_sha512_final(state, outputHashBuffer)
const pubKey = Buffer.alloc(sodium.crypto_sign_PUBLICKEYBYTES)
const privKey = Buffer.alloc(sodium.crypto_sign_SECRETKEYBYTES)
sodium.crypto_sign_seed_keypair(
pubKey,
privKey,
outputHashBuffer.slice(0, sodium.crypto_sign_SEEDBYTES),
)
return [pubKey, privKey]
}
const SecretKeyCryptographyCreateKey = (salt: string, password: string): Buffer[] => {
const configLoginAppSecret = Buffer.from(CONFIG.LOGIN_APP_SECRET, 'hex')
const configLoginServerKey = Buffer.from(CONFIG.LOGIN_SERVER_KEY, 'hex')
if (configLoginServerKey.length !== sodium.crypto_shorthash_KEYBYTES) {
throw new Error(
`ServerKey has an invalid size. The size must be ${sodium.crypto_shorthash_KEYBYTES} bytes.`,
)
}
const state = Buffer.alloc(sodium.crypto_hash_sha512_STATEBYTES)
sodium.crypto_hash_sha512_init(state)
sodium.crypto_hash_sha512_update(state, Buffer.from(salt))
sodium.crypto_hash_sha512_update(state, configLoginAppSecret)
const hash = Buffer.alloc(sodium.crypto_hash_sha512_BYTES)
sodium.crypto_hash_sha512_final(state, hash)
const encryptionKey = Buffer.alloc(sodium.crypto_box_SEEDBYTES)
const opsLimit = 10
const memLimit = 33554432
const algo = 2
sodium.crypto_pwhash(
encryptionKey,
Buffer.from(password),
hash.slice(0, sodium.crypto_pwhash_SALTBYTES),
opsLimit,
memLimit,
algo,
)
const encryptionKeyHash = Buffer.alloc(sodium.crypto_shorthash_BYTES)
sodium.crypto_shorthash(encryptionKeyHash, encryptionKey, configLoginServerKey)
return [encryptionKeyHash, encryptionKey]
}
const getEmailHash = (email: string): Buffer => {
const emailHash = Buffer.alloc(sodium.crypto_generichash_BYTES)
sodium.crypto_generichash(emailHash, Buffer.from(email))
return emailHash
}
const SecretKeyCryptographyEncrypt = (message: Buffer, encryptionKey: Buffer): Buffer => {
const encrypted = Buffer.alloc(sodium.crypto_secretbox_MACBYTES + message.length)
const nonce = Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES)
nonce.fill(31) // static nonce
sodium.crypto_secretbox_easy(encrypted, message, nonce, encryptionKey)
return encrypted
}
@Resolver()
export class UserResolver {
@ -62,9 +217,9 @@ export class UserResolver {
userEntity.lastName = user.lastName
userEntity.username = user.username
userEntity.email = user.email
userEntity.pubkey = Buffer.from(fromHex(user.pubkey))
userEntity.pubkey = Buffer.from(user.pubkey, 'hex')
userEntity.save().catch(() => {
userRepository.save(userEntity).catch(() => {
throw new Error('error by save userEntity')
})
})
@ -108,46 +263,152 @@ export class UserResolver {
@Authorized()
@Query(() => String)
async logout(@Ctx() context: any): Promise<string> {
const payload = { session_id: context.sessionId }
const result = await apiPost(CONFIG.LOGIN_API_URL + 'logout', payload)
if (!result.success) {
throw new Error(result.data)
}
return 'success'
async logout(): Promise<boolean> {
// TODO: We dont need this anymore, but might need this in the future in oder to invalidate a valid JWT-Token.
// Furthermore this hook can be useful for tracking user behaviour (did he logout or not? Warn him if he didn't on next login)
// The functionality is fully client side - the client just needs to delete his token with the current implementation.
// we could try to force this by sending `token: null` or `token: ''` with this call. But since it bares no real security
// we should just return true for now.
return true
}
@Mutation(() => String)
async createUser(
@Args() { email, firstName, lastName, password, language, publisherId }: CreateUserArgs,
): Promise<string> {
const payload = {
email,
first_name: firstName,
last_name: lastName,
password,
emailType: 2,
login_after_register: true,
language: language,
publisher_id: publisherId,
}
const result = await apiPost(CONFIG.LOGIN_API_URL + 'createUser', payload)
if (!result.success) {
throw new Error(result.data)
// TODO: wrong default value (should be null), how does graphql work here? Is it an required field?
// default int publisher_id = 0;
// Validate Language (no throw)
if (!isLanguage(language)) {
language = DEFAULT_LANGUAGE
}
const user = new User(result.data.user)
const dbuser = new DbUser()
dbuser.pubkey = Buffer.from(fromHex(user.pubkey))
dbuser.email = user.email
dbuser.firstName = user.firstName
dbuser.lastName = user.lastName
dbuser.username = user.username
// Validate Password
if (!isPassword(password)) {
throw new Error(
'Please enter a valid password with at least 8 characters, upper and lower case letters, at least one number and one special character!',
)
}
dbuser.save().catch(() => {
throw new Error('error saving user')
})
// Validate username
// TODO: never true
const username = ''
if (username.length > 3 && !this.checkUsername({ username })) {
throw new Error('Username already in use')
}
// Validate email unique
// TODO: i can register an email in upper/lower case twice
const userRepository = getCustomRepository(UserRepository)
const usersFound = await userRepository.count({ email })
if (usersFound !== 0) {
// TODO: this is unsecure, but the current implementation of the login server. This way it can be queried if the user with given EMail is existent.
throw new Error(`User already exists.`)
}
const passphrase = PassphraseGenerate()
const keyPair = KeyPairEd25519Create(passphrase) // return pub, priv Key
const passwordHash = SecretKeyCryptographyCreateKey(email, password) // return short and long hash
const emailHash = getEmailHash(email)
const encryptedPrivkey = SecretKeyCryptographyEncrypt(keyPair[1], passwordHash[1])
// Table: login_users
const loginUser = new LoginUser()
loginUser.email = email
loginUser.firstName = firstName
loginUser.lastName = lastName
loginUser.username = username
loginUser.description = ''
loginUser.password = passwordHash[0].readBigUInt64LE() // using the shorthash
loginUser.emailHash = emailHash
loginUser.language = language
loginUser.groupId = 1
loginUser.publisherId = publisherId
loginUser.pubKey = keyPair[0]
loginUser.privKey = encryptedPrivkey
const queryRunner = getConnection().createQueryRunner()
await queryRunner.connect()
await queryRunner.startTransaction('READ UNCOMMITTED')
try {
const { id: loginUserId } = await queryRunner.manager.save(loginUser).catch((error) => {
// eslint-disable-next-line no-console
console.log('insert LoginUser failed', error)
throw new Error('insert user failed')
})
// Table: login_user_backups
const loginUserBackup = new LoginUserBackup()
loginUserBackup.userId = loginUserId
loginUserBackup.passphrase = passphrase.join(' ') + ' ' // login server saves trailing space
loginUserBackup.mnemonicType = 2 // ServerConfig::MNEMONIC_BIP0039_SORTED_ORDER;
await queryRunner.manager.save(loginUserBackup).catch((error) => {
// eslint-disable-next-line no-console
console.log('insert LoginUserBackup failed', error)
throw new Error('insert user backup failed')
})
// Table: state_users
const dbUser = new DbUser()
dbUser.pubkey = keyPair[0]
dbUser.email = email
dbUser.firstName = firstName
dbUser.lastName = lastName
dbUser.username = username
await queryRunner.manager.save(dbUser).catch((er) => {
// eslint-disable-next-line no-console
console.log('Error while saving dbUser', er)
throw new Error('error saving user')
})
// Store EmailOptIn in DB
const emailOptIn = new LoginEmailOptIn()
emailOptIn.userId = loginUserId
emailOptIn.verificationCode = random(64)
emailOptIn.emailOptInTypeId = 2
await queryRunner.manager.save(emailOptIn).catch((error) => {
// eslint-disable-next-line no-console
console.log('Error while saving emailOptIn', error)
throw new Error('error saving email opt in')
})
// Send EMail to user
const activationLink = CONFIG.EMAIL_LINK_VERIFICATION.replace(
/\$1/g,
emailOptIn.verificationCode.toString(),
)
const emailSent = await sendEMail({
from: `Gradido (nicht antworten) <${CONFIG.EMAIL_SENDER}>`,
to: `${firstName} ${lastName} <${email}>`,
subject: 'Gradido: E-Mail Überprüfung',
text: `Hallo ${firstName} ${lastName},
Deine EMail wurde soeben bei Gradido registriert.
Klicke bitte auf diesen Link, um die Registrierung abzuschließen und dein Gradido-Konto zu aktivieren:
${activationLink}
oder kopiere den obigen Link in dein Browserfenster.
Mit freundlichen Grüßen,
dein Gradido-Team`,
})
// In case EMails are disabled log the activation link for the user
if (!emailSent) {
// eslint-disable-next-line no-console
console.log(`Account confirmation link: ${activationLink}`)
}
await queryRunner.commitTransaction()
} catch (e) {
await queryRunner.rollbackTransaction()
throw e
} finally {
await queryRunner.release()
}
return 'success'
}
@ -246,7 +507,7 @@ export class UserResolver {
userEntityChanged = true
}
if (userEntityChanged) {
userEntity.save().catch((error) => {
userRepository.save(userEntity).catch((error) => {
throw new Error(error)
})
}
@ -275,15 +536,27 @@ export class UserResolver {
return response
}
@Query(() => CheckUsernameResponse)
async checkUsername(
@Args() { username, groupId = 1 }: CheckUsernameArgs,
): Promise<CheckUsernameResponse> {
const response = await apiGet(
CONFIG.LOGIN_API_URL + `checkUsername?username=${username}&group_id=${groupId}`,
)
if (!response.success) throw new Error(response.data)
return new CheckUsernameResponse(response.data)
@Query(() => Boolean)
async checkUsername(@Args() { username }: CheckUsernameArgs): Promise<boolean> {
// Username empty?
if (username === '') {
throw new Error('Username must be set.')
}
// Do we fullfil the minimum character length?
const MIN_CHARACTERS_USERNAME = 2
if (username.length < MIN_CHARACTERS_USERNAME) {
throw new Error(`Username must be at minimum ${MIN_CHARACTERS_USERNAME} characters long.`)
}
const usersFound = await LoginUser.count({ username })
// Username already present?
if (usersFound !== 0) {
throw new Error(`Username "${username}" already taken.`)
}
return true
}
@Query(() => CheckEmailResponse)
@ -298,12 +571,16 @@ export class UserResolver {
return new CheckEmailResponse(result.data)
}
@Authorized()
@Query(() => Boolean)
async hasElopage(@Ctx() context: any): Promise<boolean> {
const result = await apiGet(CONFIG.LOGIN_API_URL + 'hasElopage?session_id=' + context.sessionId)
if (!result.success) {
throw new Error(result.data)
const userRepository = getCustomRepository(UserRepository)
const userEntity = await userRepository.findByPubkeyHex(context.pubKey).catch()
if (!userEntity) {
return false
}
return result.data.hasElopage
const elopageBuyCount = await LoginElopageBuys.count({ payerEmail: userEntity.email })
return elopageBuyCount > 0
}
}

View File

@ -6,7 +6,7 @@ import isAuthorized from './directive/isAuthorized'
const schema = async (): Promise<GraphQLSchema> => {
return buildSchema({
resolvers: [path.join(__dirname, 'resolver', `*.{js,ts}`)],
resolvers: [path.join(__dirname, 'resolver', `!(*.test).{js,ts}`)],
authChecker: isAuthorized,
})
}

View File

@ -1,64 +1,14 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import 'reflect-metadata'
import 'module-alias/register'
import express from 'express'
import { ApolloServer } from 'apollo-server-express'
import createServer from './server/createServer'
// config
import CONFIG from './config'
// database
import connection from './typeorm/connection'
import getDBVersion from './typeorm/getDBVersion'
// server
import cors from './server/cors'
import context from './server/context'
import plugins from './server/plugins'
// graphql
import schema from './graphql/schema'
// TODO implement
// import queryComplexity, { simpleEstimator, fieldConfigEstimator } from "graphql-query-complexity";
const DB_VERSION = '0002-add_settings'
async function main() {
// open mysql connection
const con = await connection()
if (!con || !con.isConnected) {
throw new Error(`Couldn't open connection to database`)
}
const { app } = await createServer()
// check for correct database version
const dbVersion = await getDBVersion()
if (!dbVersion || dbVersion.indexOf(DB_VERSION) === -1) {
throw new Error(
`Wrong database version - the backend requires '${DB_VERSION}' but found '${
dbVersion || 'None'
}'`,
)
}
// Express Server
const server = express()
// cors
server.use(cors)
// Apollo Server
const apollo = new ApolloServer({
schema: await schema(),
playground: CONFIG.GRAPHIQL,
context,
plugins,
})
apollo.applyMiddleware({ app: server })
// Start Server
server.listen(CONFIG.PORT, () => {
app.listen(CONFIG.PORT, () => {
// eslint-disable-next-line no-console
console.log(`Server is running at http://localhost:${CONFIG.PORT}`)
if (CONFIG.GRAPHIQL) {

View File

@ -0,0 +1,64 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import 'reflect-metadata'
import 'module-alias/register'
import { ApolloServer } from 'apollo-server-express'
import express from 'express'
// database
import connection from '../typeorm/connection'
import getDBVersion from '../typeorm/getDBVersion'
// server
import cors from './cors'
import serverContext from './context'
import plugins from './plugins'
// config
import CONFIG from '../config'
// graphql
import schema from '../graphql/schema'
// TODO implement
// import queryComplexity, { simpleEstimator, fieldConfigEstimator } from "graphql-query-complexity";
const DB_VERSION = '0004-login_server_data'
const createServer = async (context: any = serverContext): Promise<any> => {
// open mysql connection
const con = await connection()
if (!con || !con.isConnected) {
throw new Error(`Couldn't open connection to database`)
}
// check for correct database version
const dbVersion = await getDBVersion()
if (!dbVersion || dbVersion.indexOf(DB_VERSION) === -1) {
throw new Error(
`Wrong database version - the backend requires '${DB_VERSION}' but found '${
dbVersion || 'None'
}'`,
)
}
// Express Server
const app = express()
// cors
app.use(cors)
// Apollo Server
const apollo = new ApolloServer({
schema: await schema(),
playground: CONFIG.GRAPHIQL,
context,
plugins,
})
apollo.applyMiddleware({ app })
return { apollo, app, con }
}
export default createServer

View File

@ -0,0 +1,5 @@
import { EntityRepository, Repository } from 'typeorm'
import { LoginEmailOptIn } from '@entity/LoginEmailOptIn'
@EntityRepository(LoginEmailOptIn)
export class LoginEmailOptInRepository extends Repository<LoginEmailOptIn> {}

View File

@ -0,0 +1,5 @@
import { EntityRepository, Repository } from 'typeorm'
import { LoginUser } from '@entity/LoginUser'
@EntityRepository(LoginUser)
export class LoginUserRepository extends Repository<LoginUser> {}

View File

@ -0,0 +1,5 @@
import { EntityRepository, Repository } from 'typeorm'
import { LoginUserBackup } from '@entity/LoginUserBackup'
@EntityRepository(LoginUserBackup)
export class LoginUserBackupRepository extends Repository<LoginUserBackup> {}

View File

@ -17,4 +17,11 @@ export class UserTransactionRepository extends Repository<UserTransaction> {
.offset(offset)
.getManyAndCount()
}
findLastForUser(userId: number): Promise<UserTransaction | undefined> {
return this.createQueryBuilder('userTransaction')
.where('userTransaction.userId = :userId', { userId })
.orderBy('userTransaction.transactionId', 'DESC')
.getOne()
}
}

View File

@ -2,24 +2,25 @@ import { decayFormula, calculateDecay } from './decay'
describe('utils/decay', () => {
describe('decayFormula', () => {
it('has base 0.99999997802044727', async () => {
it('has base 0.99999997802044727', () => {
const amount = 1.0
const seconds = 1
expect(await decayFormula(amount, seconds)).toBe(0.99999997802044727)
expect(decayFormula(amount, seconds)).toBe(0.99999997802044727)
})
// Not sure if the following skiped tests make sence!?
it.skip('has negative decay?', async () => {
it('has negative decay?', async () => {
const amount = -1.0
const seconds = 1
expect(await decayFormula(amount, seconds)).toBe(-0.99999997802044727)
})
it.skip('has correct backward calculation', async () => {
it('has correct backward calculation', async () => {
const amount = 1.0
const seconds = -1
expect(await decayFormula(amount, seconds)).toBe(1.0000000219795533)
})
it.skip('has correct forward calculation', async () => {
const amount = 1.000000219795533
// not possible, nodejs hasn't enough accuracy
it('has correct forward calculation', async () => {
const amount = 1.0 / 0.99999997802044727
const seconds = 1
expect(await decayFormula(amount, seconds)).toBe(1.0)
})
@ -32,7 +33,7 @@ describe('utils/decay', () => {
expect(await calculateDecay(1.0, oneSecondAgo, now)).toBe(0.99999997802044727)
})
it.skip('returns input amount when from and to is the same', async () => {
it('returns input amount when from and to is the same', async () => {
const now = new Date()
expect(await calculateDecay(100.0, now, now)).toBe(100.0)
})

View File

@ -7,6 +7,15 @@ function decayFormula(amount: number, seconds: number): number {
}
async function calculateDecay(amount: number, from: Date, to: Date): Promise<number> {
if (amount === undefined || !from || !to) {
throw new Error('at least one parameter is undefined')
}
if (from === to) {
return amount
}
if (to < from) {
throw new Error('to < from, so the target date is in the past?')
}
// load decay start block
const transactionRepository = getCustomRepository(TransactionRepository)
const decayStartBlock = await transactionRepository.findDecayStartBlock()

View File

@ -0,0 +1,22 @@
import { roundCeilFrom4, roundFloorFrom4, roundCeilFrom2, roundFloorFrom2 } from './round'
describe('utils/round', () => {
it('roundCeilFrom4', () => {
const amount = 11617
expect(roundCeilFrom4(amount)).toBe(1.17)
})
// Not sure if the following skiped tests make sence!?
it('roundFloorFrom4', () => {
const amount = 11617
expect(roundFloorFrom4(amount)).toBe(1.16)
})
it('roundCeilFrom2', () => {
const amount = 1216
expect(roundCeilFrom2(amount)).toBe(13)
})
// not possible, nodejs hasn't enough accuracy
it('roundFloorFrom2', () => {
const amount = 1216
expect(roundFloorFrom2(amount)).toBe(12)
})
})

View File

@ -0,0 +1,26 @@
import { createTransport } from 'nodemailer'
import CONFIG from '../config'
export const sendEMail = async (emailDef: any): Promise<boolean> => {
if (!CONFIG.EMAIL) {
// eslint-disable-next-line no-console
console.log('Emails are disabled via config')
return false
}
const transporter = createTransport({
host: CONFIG.EMAIL_SMTP_URL,
port: Number(CONFIG.EMAIL_SMTP_PORT),
secure: false, // true for 465, false for other ports
requireTLS: true,
auth: {
user: CONFIG.EMAIL_USERNAME,
pass: CONFIG.EMAIL_PASSWORD,
},
})
const info = await transporter.sendMail(emailDef)
if (!info.messageId) {
throw new Error('error sending notification email, but transaction succeed')
}
return true
}

View File

@ -1,3 +1,8 @@
import { User as dbUser } from '@entity/User'
import { Balance as dbBalance } from '@entity/Balance'
import { getRepository } from 'typeorm'
import { calculateDecay } from './decay'
function isStringBoolean(value: string): boolean {
const lowerValue = value.toLowerCase()
if (lowerValue === 'true' || lowerValue === 'false') {
@ -6,4 +11,18 @@ function isStringBoolean(value: string): boolean {
return false
}
export { isStringBoolean }
function isHexPublicKey(publicKey: string): boolean {
return /^[0-9A-Fa-f]{64}$/i.test(publicKey)
}
async function hasUserAmount(user: dbUser, amount: number): Promise<boolean> {
if (amount < 0) return false
const balanceRepository = getRepository(dbBalance)
const balance = await balanceRepository.findOne({ userId: user.id })
if (!balance) return false
const decay = await calculateDecay(balance.amount, balance.recordDate, new Date())
return decay > amount
}
export { isHexPublicKey, hasUserAmount, isStringBoolean }

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,8 @@ class ServerUsersController extends AppController
public function initialize()
{
parent::initialize();
$this->Auth->allow(['add', 'edit']);
// uncomment in devmode to add new community server admin user, but don't!!! commit it
//$this->Auth->allow(['add', 'edit']);
$this->Auth->deny('index');
}

View File

@ -17,7 +17,7 @@ phpServer.host = nginx
loginServer.path = http://localhost/account
loginServer.default_locale = de
loginServer.db.host = mariadb
loginServer.db.name = gradido_login
loginServer.db.name = gradido_community
loginServer.db.user = root
loginServer.db.password =
loginServer.db.port = 3306

View File

@ -17,7 +17,7 @@ phpServer.host = nginx
loginServer.path = http://localhost/account
loginServer.default_locale = de
loginServer.db.host = mariadb
loginServer.db.name = gradido_login_test
loginServer.db.name = gradido_community_test
loginServer.db.user = root
loginServer.db.password =
loginServer.db.port = 3306

View File

@ -1,6 +0,0 @@
INSERT INTO `groups` (`id`, `alias`, `name`, `url`, `host`, `home`, `description`) VALUES
(1, 'docker', 'docker gradido group', 'localhost', 'nginx', '/', 'gradido test group for docker and stage2 with blockchain db');

View File

@ -24,6 +24,9 @@ export class TransactionSendCoin extends BaseEntity {
@Column()
amount: number
@Column({ name: 'sender_final_balance' })
senderFinalBalance: number
@OneToOne(() => Transaction)
@JoinColumn({ name: 'transaction_id' })
transaction: Transaction

View File

@ -0,0 +1,52 @@
import { BaseEntity, Entity, PrimaryGeneratedColumn, Column } from 'typeorm'
@Entity('login_elopage_buys')
export class LoginElopageBuys extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ name: 'elopage_user_id', nullable: false })
elopageUserId: number
@Column({ name: 'affiliate_program_id', nullable: false })
affiliateProgramId: number
@Column({ name: 'publisher_id', nullable: false })
publisherId: number
@Column({ name: 'order_id', nullable: false })
orderId: number
@Column({ name: 'product_id', nullable: false })
productId: number
@Column({ name: 'product_price', nullable: false })
productPrice: number
@Column({
name: 'payer_email',
length: 255,
nullable: false,
charset: 'utf8',
collation: 'utf8_bin',
})
payerEmail: string
@Column({
name: 'publisher_email',
length: 255,
nullable: false,
charset: 'utf8',
collation: 'utf8_bin',
})
publisherEmail: string
@Column({ nullable: false })
payed: boolean
@Column({ name: 'success_date', nullable: false })
successDate: Date
@Column({ length: 255, nullable: false })
event: string
}

View File

@ -0,0 +1,26 @@
import { BaseEntity, Entity, PrimaryGeneratedColumn, Column } from 'typeorm'
// Moriz: I do not like the idea of having two user tables
@Entity('login_email_opt_in')
export class LoginEmailOptIn extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ name: 'user_id' })
userId: number
@Column({ name: 'verification_code', type: 'bigint', unsigned: true, unique: true })
verificationCode: BigInt
@Column({ name: 'email_opt_in_type_id' })
emailOptInTypeId: number
@Column({ name: 'created', default: () => 'CURRENT_TIMESTAMP' })
createdAt: Date
@Column({ name: 'resend_count', default: 0 })
resendCount: number
@Column({ name: 'updated', default: () => 'CURRENT_TIMESTAMP' })
updatedAt: Date
}

View File

@ -0,0 +1,56 @@
import { BaseEntity, Entity, PrimaryGeneratedColumn, Column } from 'typeorm'
// Moriz: I do not like the idea of having two user tables
@Entity('login_users')
export class LoginUser extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ length: 191, unique: true })
email: string
@Column({ name: 'first_name', length: 150 })
firstName: string
@Column({ name: 'last_name', length: 255, default: '' })
lastName: string
@Column({ length: 255, default: '' })
username: string
@Column({ default: '' })
description: string
@Column({ type: 'bigint', default: 0, unsigned: true })
password: BigInt
@Column({ name: 'pubkey', type: 'binary', length: 32, default: null, nullable: true })
pubKey: Buffer
@Column({ name: 'privkey', type: 'binary', length: 80, default: null, nullable: true })
privKey: Buffer
@Column({ name: 'email_hash', type: 'binary', length: 32, default: null, nullable: true })
emailHash: Buffer
@Column({ name: 'created', default: () => 'CURRENT_TIMESTAMP' })
createdAt: Date
@Column({ name: 'email_checked', default: 0 })
emailChecked: boolean
@Column({ name: 'passphrase_shown', default: 0 })
passphraseShown: boolean
@Column({ length: 4, default: 'de' })
language: string
@Column({ default: 0 })
disabled: boolean
@Column({ name: 'group_id', default: 0, unsigned: true })
groupId: number
@Column({ name: 'publisher_id', default: 0 })
publisherId: number
}

View File

@ -0,0 +1,16 @@
import { BaseEntity, Entity, PrimaryGeneratedColumn, Column } from 'typeorm'
@Entity('login_user_backups')
export class LoginUserBackup extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ name: 'user_id', nullable: false })
userId: number
@Column({ type: 'text', name: 'passphrase', nullable: false })
passphrase: string
@Column({ name: 'mnemonic_type', default: -1 })
mnemonicType: number
}

View File

@ -0,0 +1 @@
export { LoginElopageBuys } from './0003-login_server_tables/LoginElopageBuys'

View File

@ -0,0 +1 @@
export { LoginEmailOptIn } from './0003-login_server_tables/LoginEmailOptIn'

View File

@ -0,0 +1 @@
export { LoginUser } from './0003-login_server_tables/LoginUser'

View File

@ -0,0 +1 @@
export { LoginUserBackup } from './0003-login_server_tables/LoginUserBackup'

View File

@ -1,4 +1,8 @@
import { Balance } from './Balance'
import { LoginElopageBuys } from './LoginElopageBuys'
import { LoginEmailOptIn } from './LoginEmailOptIn'
import { LoginUser } from './LoginUser'
import { LoginUserBackup } from './LoginUserBackup'
import { Migration } from './Migration'
import { Transaction } from './Transaction'
import { TransactionCreation } from './TransactionCreation'
@ -9,6 +13,10 @@ import { UserTransaction } from './UserTransaction'
export const entities = [
Balance,
LoginElopageBuys,
LoginEmailOptIn,
LoginUser,
LoginUserBackup,
Migration,
Transaction,
TransactionCreation,

View File

@ -11,7 +11,7 @@
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
await queryFn(`
CREATE TABLE IF NOT EXISTS \`user_setting\` (
CREATE TABLE \`user_setting\` (
\`id\` int(10) unsigned NOT NULL AUTO_INCREMENT,
\`userId\` int(11) NOT NULL,
\`key\` varchar(255) NOT NULL,
@ -22,5 +22,5 @@ export async function upgrade(queryFn: (query: string, values?: any[]) => Promis
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
// write downgrade logic as parameter of queryFn
await queryFn(`DROP TABLE IF EXISTS \`user_setting\`;`)
await queryFn(`DROP TABLE \`user_setting\`;`)
}

View File

@ -0,0 +1,153 @@
/* FIRST MIGRATION
*
* This migration is special since it takes into account that
* the database can be setup already but also may not be.
* Therefore you will find all `CREATE TABLE` statements with
* a `IF NOT EXISTS`, all `INSERT` with an `IGNORE` and in the
* downgrade function all `DROP TABLE` with a `IF EXISTS`.
* This ensures compatibility for existing or non-existing
* databases.
*/
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
await queryFn(`
CREATE TABLE \`login_app_access_tokens\` (
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
\`user_id\` int NOT NULL,
\`access_code\` bigint unsigned NOT NULL,
\`created\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`updated\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (\`id\`),
UNIQUE KEY \`access_code\` (\`access_code\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
`)
await queryFn(`
CREATE TABLE \`login_elopage_buys\` (
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
\`elopage_user_id\` int DEFAULT NULL,
\`affiliate_program_id\` int NOT NULL,
\`publisher_id\` int NOT NULL,
\`order_id\` int NOT NULL,
\`product_id\` int NOT NULL,
\`product_price\` int NOT NULL,
\`payer_email\` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
\`publisher_email\` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
\`payed\` tinyint NOT NULL,
\`success_date\` datetime NOT NULL,
\`event\` varchar(255) NOT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
`)
await queryFn(`
CREATE TABLE \`login_email_opt_in_types\` (
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
\`name\` varchar(255) NOT NULL,
\`description\` varchar(255) NOT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
`)
await queryFn(`
CREATE TABLE \`login_email_opt_in\` (
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
\`user_id\` int NOT NULL,
\`verification_code\` bigint unsigned NOT NULL,
\`email_opt_in_type_id\` int NOT NULL,
\`created\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`resend_count\` int DEFAULT '0',
\`updated\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (\`id\`),
UNIQUE KEY \`verification_code\` (\`verification_code\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
`)
await queryFn(`
CREATE TABLE \`login_groups\` (
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
\`alias\` varchar(190) NOT NULL,
\`name\` varchar(255) NOT NULL,
\`url\` varchar(255) NOT NULL,
\`host\` varchar(255) DEFAULT "/",
\`home\` varchar(255) DEFAULT "/",
\`description\` text,
PRIMARY KEY (\`id\`),
UNIQUE KEY \`alias\` (\`alias\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
`)
await queryFn(`
CREATE TABLE \`login_pending_tasks\` (
\`id\` int UNSIGNED NOT NULL AUTO_INCREMENT,
\`user_id\` int UNSIGNED DEFAULT 0,
\`request\` varbinary(2048) NOT NULL,
\`created\` datetime NOT NULL,
\`finished\` datetime DEFAULT '2000-01-01 000000',
\`result_json\` text DEFAULT NULL,
\`param_json\` text DEFAULT NULL,
\`task_type_id\` int UNSIGNED NOT NULL,
\`child_pending_task_id\` int UNSIGNED DEFAULT 0,
\`parent_pending_task_id\` int UNSIGNED DEFAULT 0,
PRIMARY KEY (\`id\`)
) ENGINE = InnoDB DEFAULT CHARSET=utf8mb4;
`)
await queryFn(`
CREATE TABLE \`login_roles\` (
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
\`name\` varchar(255) NOT NULL,
\`description\` varchar(255) NOT NULL,
\`flags\` bigint NOT NULL DEFAULT '0',
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
`)
await queryFn(`
CREATE TABLE \`login_user_backups\` (
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
\`user_id\` int NOT NULL,
\`passphrase\` text NOT NULL,
\`mnemonic_type\` int DEFAULT '-1',
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
`)
await queryFn(`
CREATE TABLE \`login_user_roles\` (
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
\`user_id\` int NOT NULL,
\`role_id\` int NOT NULL,
PRIMARY KEY (\`id\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
`)
await queryFn(`
CREATE TABLE \`login_users\` (
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
\`email\` varchar(191) NOT NULL,
\`first_name\` varchar(150) NOT NULL,
\`last_name\` varchar(255) DEFAULT '',
\`username\` varchar(255) DEFAULT '',
\`description\` text DEFAULT '',
\`password\` bigint unsigned DEFAULT '0',
\`pubkey\` binary(32) DEFAULT NULL,
\`privkey\` binary(80) DEFAULT NULL,
\`email_hash\` binary(32) DEFAULT NULL,
\`created\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`email_checked\` tinyint NOT NULL DEFAULT '0',
\`passphrase_shown\` tinyint NOT NULL DEFAULT '0',
\`language\` varchar(4) NOT NULL DEFAULT 'de',
\`disabled\` tinyint DEFAULT '0',
\`group_id\` int unsigned DEFAULT 0,
\`publisher_id\` int DEFAULT 0,
PRIMARY KEY (\`id\`),
UNIQUE KEY \`email\` (\`email\`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
`)
}
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
// write downgrade logic as parameter of queryFn
await queryFn(`DROP TABLE \`login_app_access_tokens\`;`)
await queryFn(`DROP TABLE \`login_elopage_buys\`;`)
await queryFn(`DROP TABLE \`login_email_opt_in_types\`;`)
await queryFn(`DROP TABLE \`login_email_opt_in\`;`)
await queryFn(`DROP TABLE \`login_groups\`;`)
await queryFn(`DROP TABLE \`login_pending_tasks\`;`)
await queryFn(`DROP TABLE \`login_roles\`;`)
await queryFn(`DROP TABLE \`login_user_backups\`;`)
await queryFn(`DROP TABLE \`login_user_roles\`;`)
await queryFn(`DROP TABLE \`login_users\`;`)
}

View File

@ -0,0 +1,74 @@
/* FIRST MIGRATION
*
* This migration is special since it takes into account that
* the database can be setup already but also may not be.
* Therefore you will find all `CREATE TABLE` statements with
* a `IF NOT EXISTS`, all `INSERT` with an `IGNORE` and in the
* downgrade function all `DROP TABLE` with a `IF EXISTS`.
* This ensures compatibility for existing or non-existing
* databases.
*/
const LOGIN_SERVER_DB = 'gradido_login'
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
const loginDatabaseExists = await queryFn(`
SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${LOGIN_SERVER_DB}'
`)
if (loginDatabaseExists.length === 0) {
// eslint-disable-next-line no-console
console.log(`Skipping Login Server Database migration - Database ${LOGIN_SERVER_DB} not found`)
return
}
await queryFn(`
INSERT INTO \`login_app_access_tokens\` SELECT * FROM ${LOGIN_SERVER_DB}.\`app_access_tokens\`;
`)
await queryFn(`
INSERT INTO \`login_elopage_buys\` SELECT * FROM ${LOGIN_SERVER_DB}.\`elopage_buys\`;
`)
await queryFn(`
INSERT INTO \`login_email_opt_in_types\` SELECT * FROM ${LOGIN_SERVER_DB}.\`email_opt_in_types\`;
`)
await queryFn(`
INSERT INTO \`login_email_opt_in\` SELECT * FROM ${LOGIN_SERVER_DB}.\`email_opt_in\`;
`)
await queryFn(`
INSERT INTO \`login_groups\` SELECT * FROM ${LOGIN_SERVER_DB}.\`groups\`;
`)
await queryFn(`
INSERT INTO \`login_pending_tasks\` SELECT * FROM ${LOGIN_SERVER_DB}.\`pending_tasks\`;
`)
await queryFn(`
INSERT INTO \`login_roles\` SELECT * FROM ${LOGIN_SERVER_DB}.\`roles\`;
`)
await queryFn(`
INSERT INTO \`login_user_backups\` SELECT * FROM ${LOGIN_SERVER_DB}.\`user_backups\`;
`)
await queryFn(`
INSERT INTO \`login_user_roles\` SELECT * FROM ${LOGIN_SERVER_DB}.\`user_roles\`;
`)
await queryFn(`
INSERT INTO \`login_users\` SELECT * FROM ${LOGIN_SERVER_DB}.\`users\`;
`)
// TODO clarify if we need this on non docker environment?
await queryFn(`
INSERT IGNORE INTO \`login_groups\` (\`id\`, \`alias\`, \`name\`, \`url\`, \`host\`, \`home\`, \`description\`) VALUES
(1, 'docker', 'docker gradido group', 'localhost', 'nginx', '/', 'gradido test group for docker and stage2 with blockchain db');
`)
}
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
// write downgrade logic as parameter of queryFn
await queryFn(`DELETE FROM \`login_app_access_tokens\`;`)
await queryFn(`DELETE FROM \`login_elopage_buys\`;`)
await queryFn(`DELETE FROM \`login_email_opt_in_types\`;`)
await queryFn(`DELETE FROM \`login_email_opt_in\`;`)
await queryFn(`DELETE FROM \`login_groups\`;`)
await queryFn(`DELETE FROM \`login_pending_tasks\`;`)
await queryFn(`DELETE FROM \`login_roles\`;`)
await queryFn(`DELETE FROM \`login_user_backups\`;`)
await queryFn(`DELETE FROM \`login_user_roles\`;`)
await queryFn(`DELETE FROM \`login_users\`;`)
}

View File

@ -36,8 +36,6 @@ cd $PROJECT_PATH
# git checkout -f master
git pull
cd deployment/bare_metal
echo 'update schemas' >> $UPDATE_HTML
./update_db_schemas.sh
echo 'starting with rebuilding login-server<br>' >> $UPDATE_HTML
./build_and_start_login_server.sh
echo 'starting with rebuilding frontend<br>' >> $UPDATE_HTML

View File

@ -1,7 +0,0 @@
#!/bin/bash
# For that to work skeema needed to be installed on system
# in login_server/skeema skeema configuration files need to be there also in the subfolders
# Update DB Schemas (only the schemas, not the data)
cd ../../login_server/skeema
skeema push --allow-unsafe

View File

@ -91,6 +91,9 @@ services:
login-server:
build:
dockerfile: Dockerfiles/ubuntu/Dockerfile.debug
networks:
- external-net
- internal-net
security_opt:
- seccomp:unconfined
cap_add:
@ -155,22 +158,7 @@ services:
- external-net
volumes:
- /sessions
#########################################################
## skeema for updating dbs if changes happend ###########
#########################################################
skeema:
build:
context: .
dockerfile: ./skeema/Dockerfile
target: skeema_dev_run
depends_on:
- mariadb
networks:
- internal-net
volumes:
- ./login_server/skeema/gradido_login:/skeema/gradido_login
volumes:
frontend_node_modules:
admin_node_modules:

View File

@ -2,6 +2,26 @@ version: "3.4"
services:
########################################################
# BACKEND ##############################################
########################################################
backend:
image: gradido/backend:test
build:
target: test
networks:
- external-net
- internal-net
environment:
- NODE_ENV="test"
- DB_HOST=mariadb
########################################################
# DATABASE #############################################
########################################################
database:
restart: always # this is very dangerous, but worth a test for the delayed mariadb startup at first run
#########################################################
## MARIADB ##############################################
#########################################################
@ -9,16 +29,17 @@ services:
build:
context: .
dockerfile: ./mariadb/Dockerfile
target: mariadb_server_test
target: mariadb_server
environment:
- MARIADB_ALLOW_EMPTY_PASSWORD=1
- MARIADB_USER=root
networks:
- internal-net
- external-net
ports:
- 3306:3306
volumes:
- db_test_vol:/var/lib/mysql
- db_test_vol:/var/lib/mysql
#########################################################
## LOGIN SERVER #########################################
@ -75,19 +96,6 @@ services:
- external-net
volumes:
- /sessions
#########################################################
## skeema for updating dbs if changes happend ###########
#########################################################
skeema:
build:
context: .
dockerfile: ./skeema/Dockerfile
target: skeema_run
depends_on:
- mariadb
networks:
- internal-net
networks:
external-net:

View File

@ -59,9 +59,8 @@ services:
#########################################################
mariadb:
build:
context: .
dockerfile: ./mariadb/Dockerfile
target: mariadb_server_test
context: ./mariadb
target: mariadb_server
environment:
- MARIADB_ALLOW_EMPTY_PASSWORD=1
- MARIADB_USER=root
@ -182,41 +181,6 @@ services:
- internal-net
volumes:
- ./community_server/config/php-fpm/php-ini-overrides.ini:/etc/php/7.4/fpm/conf.d/99-overrides.ini
#########################################################
## skeema for updating dbs if changes happend ###########
#########################################################
skeema:
build:
context: .
dockerfile: ./skeema/Dockerfile
target: skeema_run
depends_on:
- mariadb
networks:
- internal-net
#########################################################
## GRADIDO NODE v1 ######################################
#########################################################
# gradido-node:
# build:
# context: .
# dockerfile: ./gn/docker/deprecated-hedera-node/Dockerfile
# volumes:
# - ${GN_INSTANCE_FOLDER}:/opt/instance
# container_name: ${GN_CONTAINER_NAME}
#########################################################
## GRADIDO NODE test ###################################
#########################################################
# gradido-node-test:
# build:
# context: .
# dockerfile: ./gn/docker/deprecated-hedera-node/Dockerfile
# container_name: gn-test
# working_dir: /opt/gn/build
# command: ["./unit_tests"]
networks:
external-net:

View File

@ -15,7 +15,7 @@ This document describes the technical overview for the Gradido infrastructur. Be
![CommunityServerAPI](../image/CommunityServerAPI.png)
### Database Skeema
### Database Skeema (outdated)
![CommunityDBSkeema](../image/CommunityDBSkeema.png)

BIN
docu/Gradido-Admin.epgz Normal file

Binary file not shown.

View File

@ -0,0 +1,27 @@
### User creation
A user needs to be created on the login_server we do this when we create a User in the client https://$community_domain/vue/register.
### Admin user
To set a User admin we need the following SQL query on the gradido_login database:
```
INSERT INTO user_roles (id, user_id, role_id) VALUES (NULL, '1', '1');
```
user_id has to be found in users
Now when we login in on https://$community_domain/account/ we can create coins but we will be restricted cause we can't sign the creations.
### Signation account
At first we need to enable the server user account creation with uncommenting line: 19 in
community_server/src/Controller/ServerUsersController.php
```php
$this->Auth->allow(['add', 'edit']);
```
This enable us to use this action without being logged in.
To add a signation account we need to go on the following url: http://$community_domain/server-users/add
### Coin creation process
The coin creation for work is done in the following url: http://$community_domain/transaction-creations/create-multi
Where we can create coins for a number of as many users as we want excepted for our self.
Furthermore we must sign the transactions we created. Normally after clicking on the left button (Transaktion abschließen) we should be automatically forwarded to http://$community_domain/account/checkTransactions where we can do this.
If not this page can also be reached by clicking on the shield-icon with the hook in it on the Dashboard, which is only shown if at least one transaction is waiting for signing.
For debug purposes you can check the `pending_tasks` table, which is used to store the transactions which are not signed yet or had errors.

Binary file not shown.

After

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

View File

@ -9,8 +9,9 @@ module.exports = {
],
// coverageReporters: ['lcov', 'text'],
moduleNameMapper: {
'^@/(.*)$': '<rootDir>/src/$1',
'\\.(css|less)$': 'identity-obj-proxy',
'\\.(scss)$': '<rootDir>/src/assets/mocks/styleMock.js',
'^@/(.*)$': '<rootDir>/src/$1',
},
transform: {
'^.+\\.vue$': 'vue-jest',

View File

@ -0,0 +1 @@
module.exports = {}

View File

@ -75,9 +75,7 @@ export const sendResetPasswordEmail = gql`
export const checkUsername = gql`
query($username: String!) {
checkUsername(username: $username) {
state
}
checkUsername(username: $username)
}
`

View File

@ -96,7 +96,7 @@
"conversion-gdt-euro": "Umrechnung Euro / Gradido Transform (GDT)",
"credit": "Gutschrift",
"factor": "Faktor",
"formula": "Berechungsformel",
"formula": "Berechnungsformel",
"funding": "Zu den Förderbeiträgen",
"gdt-received": "Gradido Transform (GDT) erhalten",
"no-transactions": "Du hast noch keine Gradido Transform (GDT).",
@ -132,8 +132,8 @@
},
"newsletter": {
"newsletter": "Newsletter",
"newsletterFalse": "Du bist aus Newslettersystem ausgetragen.",
"newsletterTrue": "Du bist im Newslettersystem eingetragen."
"newsletterFalse": "Du erhältst keine Informationen per E-Mail.",
"newsletterTrue": "Du erhältst Informationen per E-Mail."
},
"password": {
"change-password": "Passwort ändern",

View File

@ -132,8 +132,8 @@
},
"newsletter": {
"newsletter": "Newsletter",
"newsletterFalse": "You are unsubscribed from newsletter system.",
"newsletterTrue": "You are subscribed to newsletter system."
"newsletterFalse": "You will not receive any information by e-mail.",
"newsletterTrue": "You will receive information by e-mail."
},
"password": {
"change-password": "Change password",

View File

@ -0,0 +1,24 @@
import { communityInfo } from '../graphql/queries'
export const getCommunityInfoMixin = {
methods: {
getCommunityInfo() {
if (this.$store.state.community.name === '') {
this.$apollo
.query({
query: communityInfo,
})
.then((result) => {
this.$store.commit('community', result.data.getCommunityInfo)
return result.data.getCommunityInfo
})
.catch((error) => {
this.$toasted.error(error.message)
})
}
},
},
created() {
this.getCommunityInfo()
},
}

View File

@ -0,0 +1,25 @@
import dashboardPlugin from './dashboard-plugin.js'
import Vue from 'vue'
import GlobalComponents from './globalComponents'
import GlobalDirectives from './globalDirectives'
jest.mock('./globalComponents')
jest.mock('./globalDirectives')
jest.mock('vue')
const vueUseMock = jest.fn()
Vue.use = vueUseMock
describe('dashboard plugin', () => {
dashboardPlugin.install(Vue)
it('installs the global components', () => {
expect(vueUseMock).toBeCalledWith(GlobalComponents)
})
it('installs the global directives', () => {
expect(vueUseMock).toBeCalledWith(GlobalDirectives)
})
})

View File

@ -89,7 +89,10 @@ export const store = new Vuex.Store({
token: null,
coinanimation: true,
newsletterState: null,
community: null,
community: {
name: '',
description: '',
},
hasElopage: false,
publisherId: null,
},

View File

@ -59,7 +59,7 @@ export const loadAllRules = (i18nCallback) => {
},
})
.then((result) => {
return result.data.checkUsername.state === 'success'
return result.data.checkUsername
})
.catch(() => {
return false

View File

@ -5,10 +5,10 @@
<div class="header-body text-center mb-7">
<b-row class="justify-content-center">
<b-col xl="5" lg="6" md="8" class="px-2">
<h1>{{ $t('checkEmail.title') }}</h1>
<h1>{{ $t('site.checkEmail.title') }}</h1>
<div class="pb-4" v-if="!pending">
<span v-if="!authenticated">
{{ $t('checkEmail.errorText') }}
{{ $t('site.checkEmail.errorText') }}
</span>
</div>
</b-col>

View File

@ -1,4 +1,4 @@
import { mount, RouterLinkStub } from '@vue/test-utils'
import { RouterLinkStub, mount } from '@vue/test-utils'
import flushPromises from 'flush-promises'
import Login from './Login'
@ -39,10 +39,8 @@ describe('Login', () => {
commit: mockStoreCommit,
state: {
community: {
name: 'Gradido Entwicklung',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
name: '',
description: '',
},
publisherId: 12345,
},
@ -74,10 +72,6 @@ describe('Login', () => {
wrapper = Wrapper()
})
it('renders the Login form', () => {
expect(wrapper.find('div.login-form').exists()).toBeTruthy()
})
it('commits the community info to the store', () => {
expect(mockStoreCommit).toBeCalledWith('community', {
name: 'test12',
@ -87,6 +81,10 @@ describe('Login', () => {
})
})
it('renders the Login form', () => {
expect(wrapper.find('div.login-form').exists()).toBeTruthy()
})
describe('communities gives back error', () => {
beforeEach(() => {
apolloQueryMock.mockRejectedValue({
@ -106,7 +104,18 @@ describe('Login', () => {
})
})
describe('Community Data', () => {
describe('Community data already loaded', () => {
beforeEach(() => {
jest.clearAllMocks()
mocks.$store.state.community = {
name: 'Gradido Entwicklung',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
}
wrapper = Wrapper()
})
it('has a Community name', () => {
expect(wrapper.find('.test-communitydata b').text()).toBe('Gradido Entwicklung')
})
@ -116,6 +125,10 @@ describe('Login', () => {
'Die lokale Entwicklungsumgebung von Gradido.',
)
})
it('does not call community data update', () => {
expect(apolloQueryMock).not.toBeCalled()
})
})
describe('links', () => {

View File

@ -62,7 +62,8 @@
<script>
import InputPassword from '../../components/Inputs/InputPassword'
import InputEmail from '../../components/Inputs/InputEmail'
import { login, communityInfo } from '../../graphql/queries'
import { login } from '../../graphql/queries'
import { getCommunityInfoMixin } from '../../mixins/getCommunityInfo'
export default {
name: 'login',
@ -70,6 +71,7 @@ export default {
InputPassword,
InputEmail,
},
mixins: [getCommunityInfoMixin],
data() {
return {
form: {
@ -107,21 +109,6 @@ export default {
this.$toasted.error(this.$t('error.no-account'))
})
},
async onCreated() {
this.$apollo
.query({
query: communityInfo,
})
.then((result) => {
this.$store.commit('community', result.data.getCommunityInfo)
})
.catch((error) => {
this.$toasted.error(error.message)
})
},
},
created() {
this.onCreated()
},
}
</script>

View File

@ -5,6 +5,19 @@ import Register from './Register'
const localVue = global.localVue
const apolloQueryMock = jest.fn().mockResolvedValue({
data: {
getCommunityInfo: {
name: 'test12',
description: 'test community 12',
url: 'http://test12.test12/',
registerUrl: 'http://test12.test12/vue/register',
},
},
})
const toastErrorMock = jest.fn()
const mockStoreCommit = jest.fn()
const registerUserMutationMock = jest.fn()
const routerPushMock = jest.fn()
@ -21,20 +34,23 @@ describe('Register', () => {
},
$apollo: {
mutate: registerUserMutationMock,
query: apolloQueryMock,
},
$store: {
commit: mockStoreCommit,
state: {
email: 'peter@lustig.de',
language: 'en',
community: {
name: 'Gradido Entwicklung',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
name: '',
description: '',
},
publisherId: 12345,
},
},
$toasted: {
error: toastErrorMock,
},
}
const stubs = {
@ -50,6 +66,15 @@ describe('Register', () => {
wrapper = Wrapper()
})
it('commits the community info to the store', () => {
expect(mockStoreCommit).toBeCalledWith('community', {
name: 'test12',
description: 'test community 12',
url: 'http://test12.test12/',
registerUrl: 'http://test12.test12/vue/register',
})
})
it('renders the Register form', () => {
expect(wrapper.find('div#registerform').exists()).toBeTruthy()
})
@ -60,16 +85,44 @@ describe('Register', () => {
})
})
describe('Community Data', () => {
it('has a Community name?', () => {
describe('communities gives back error', () => {
beforeEach(() => {
apolloQueryMock.mockRejectedValue({
message: 'Failed to get communities',
})
wrapper = Wrapper()
})
it('toasts an error message', () => {
expect(toastErrorMock).toBeCalledWith('Failed to get communities')
})
})
describe('Community data already loaded', () => {
beforeEach(() => {
jest.clearAllMocks()
mocks.$store.state.community = {
name: 'Gradido Entwicklung',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
}
wrapper = Wrapper()
})
it('has a Community name', () => {
expect(wrapper.find('.test-communitydata b').text()).toBe('Gradido Entwicklung')
})
it('has a Community description?', () => {
it('has a Community description', () => {
expect(wrapper.find('.test-communitydata p').text()).toBe(
'Die lokale Entwicklungsumgebung von Gradido.',
)
})
it('does not call community data update', () => {
expect(apolloQueryMock).not.toBeCalled()
})
})
describe('links', () => {

View File

@ -161,10 +161,12 @@ import InputEmail from '../../components/Inputs/InputEmail.vue'
import InputPasswordConfirmation from '../../components/Inputs/InputPasswordConfirmation.vue'
import LanguageSwitchSelect from '../../components/LanguageSwitchSelect.vue'
import { registerUser } from '../../graphql/mutations'
import { getCommunityInfoMixin } from '../../mixins/getCommunityInfo'
export default {
components: { InputPasswordConfirmation, InputEmail, LanguageSwitchSelect },
name: 'register',
mixins: [getCommunityInfoMixin],
data() {
return {
form: {
@ -205,12 +207,6 @@ export default {
},
})
.then(() => {
this.form.email = ''
this.form.firstname = ''
this.form.lastname = ''
this.form.password.password = ''
this.form.password.passwordRepeat = ''
this.language = ''
this.$router.push('/thx/register')
})
.catch((error) => {
@ -226,7 +222,6 @@ export default {
this.form.lastname = ''
this.form.password.password = ''
this.form.password.passwordRepeat = ''
this.language = ''
},
},
computed: {

View File

@ -3,6 +3,19 @@ import RegisterCommunity from './RegisterCommunity'
const localVue = global.localVue
const apolloQueryMock = jest.fn().mockResolvedValue({
data: {
getCommunityInfo: {
name: 'test12',
description: 'test community 12',
url: 'http://test12.test12/',
registerUrl: 'http://test12.test12/vue/register',
},
},
})
const toastErrorMock = jest.fn()
const mockStoreCommit = jest.fn()
describe('RegisterCommunity', () => {
let wrapper
@ -11,16 +24,21 @@ describe('RegisterCommunity', () => {
locale: 'en',
},
$t: jest.fn((t) => t),
$apollo: {
query: apolloQueryMock,
},
$store: {
commit: mockStoreCommit,
state: {
community: {
name: 'Gradido Entwicklung',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
name: '',
description: '',
},
},
},
$toasted: {
error: toastErrorMock,
},
}
const stubs = {
@ -36,23 +54,56 @@ describe('RegisterCommunity', () => {
wrapper = Wrapper()
})
it('commits the community info to the store', () => {
expect(mockStoreCommit).toBeCalledWith('community', {
name: 'test12',
description: 'test community 12',
url: 'http://test12.test12/',
registerUrl: 'http://test12.test12/vue/register',
})
})
it('renders the Div Element "#register-community"', () => {
expect(wrapper.find('div#register-community').exists()).toBeTruthy()
})
describe('Displaying the current community info', () => {
it('has a current community name', () => {
expect(wrapper.find('.header h1').text()).toBe('Gradido Entwicklung')
describe('communities gives back error', () => {
beforeEach(() => {
apolloQueryMock.mockRejectedValue({
message: 'Failed to get communities',
})
wrapper = Wrapper()
})
it('has a current community description', () => {
expect(wrapper.find('.header p').text()).toBe(
it('toasts an error message', () => {
expect(toastErrorMock).toBeCalledWith('Failed to get communities')
})
})
describe('Community data already loaded', () => {
beforeEach(() => {
jest.clearAllMocks()
mocks.$store.state.community = {
name: 'Gradido Entwicklung',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
}
wrapper = Wrapper()
})
it('has a Community name', () => {
expect(wrapper.find('.justify-content-center h1').text()).toBe('Gradido Entwicklung')
})
it('has a Community description', () => {
expect(wrapper.find('.justify-content-center p').text()).toBe(
'Die lokale Entwicklungsumgebung von Gradido.',
)
})
it('has a current community location', () => {
expect(wrapper.find('.header p.community-location').text()).toBe('http://localhost/vue/')
it('does not call community data update', () => {
expect(apolloQueryMock).not.toBeCalled()
})
})

View File

@ -49,12 +49,11 @@
</div>
</template>
<script>
import { getCommunityInfoMixin } from '../../mixins/getCommunityInfo'
export default {
name: 'registerCommunity',
data() {
return {}
},
methods: {},
mixins: [getCommunityInfoMixin],
}
</script>
<style></style>

View File

@ -1,4 +1,5 @@
import { mount, RouterLinkStub } from '@vue/test-utils'
import { communities, communityInfo } from '../../graphql/queries'
import RegisterSelectCommunity from './RegisterSelectCommunity'
const localVue = global.localVue
@ -11,35 +12,48 @@ const spinnerMock = jest.fn(() => {
}
})
const apolloQueryMock = jest.fn().mockResolvedValue({
data: {
communities: [
{
id: 1,
name: 'Gradido Entwicklung',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register-community',
const apolloQueryMock = jest
.fn()
.mockResolvedValueOnce({
data: {
getCommunityInfo: {
name: 'test12',
description: 'test community 12',
url: 'http://test12.test12/',
registerUrl: 'http://test12.test12/vue/register',
},
{
id: 2,
name: 'Gradido Staging',
description: 'Der Testserver der Gradido-Akademie.',
url: 'https://stage1.gradido.net/vue/',
registerUrl: 'https://stage1.gradido.net/vue/register-community',
},
{
id: 3,
name: 'Gradido-Akademie',
description: 'Freies Institut für Wirtschaftsbionik.',
url: 'https://gradido.net',
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
},
],
},
})
},
})
.mockResolvedValue({
data: {
communities: [
{
id: 1,
name: 'Gradido Entwicklung',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register-community',
},
{
id: 2,
name: 'Gradido Staging',
description: 'Der Testserver der Gradido-Akademie.',
url: 'https://stage1.gradido.net/vue/',
registerUrl: 'https://stage1.gradido.net/vue/register-community',
},
{
id: 3,
name: 'Gradido-Akademie',
description: 'Freies Institut für Wirtschaftsbionik.',
url: 'https://gradido.net',
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
},
],
},
})
const toasterMock = jest.fn()
const mockStoreCommit = jest.fn()
describe('RegisterSelectCommunity', () => {
let wrapper
@ -50,12 +64,11 @@ describe('RegisterSelectCommunity', () => {
},
$t: jest.fn((t) => t),
$store: {
commit: mockStoreCommit,
state: {
community: {
name: 'Gradido Entwicklung',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
name: '',
description: '',
},
},
},
@ -80,9 +93,23 @@ describe('RegisterSelectCommunity', () => {
describe('mount', () => {
beforeEach(() => {
jest.clearAllMocks()
wrapper = Wrapper()
})
it('calls the API to get the community info data', () => {
expect(apolloQueryMock).toBeCalledWith({
query: communityInfo,
})
})
it('calls the API to get the communities data', () => {
expect(apolloQueryMock).toBeCalledWith({
query: communities,
fetchPolicy: 'network-only',
})
})
it('renders the Div Element "#register-select-community"', () => {
expect(wrapper.find('div#register-select-community').exists()).toBeTruthy()
})
@ -91,8 +118,72 @@ describe('RegisterSelectCommunity', () => {
expect(spinnerMock).toBeCalled()
})
describe('communities gives back error', () => {
beforeEach(() => {
apolloQueryMock.mockRejectedValue({
message: 'Failed to get communities',
})
wrapper = Wrapper()
})
it('toasts an error message', () => {
expect(toasterMock).toBeCalledWith('Failed to get communities')
})
})
describe('Community data already loaded', () => {
beforeEach(() => {
jest.clearAllMocks()
mocks.$store.state.community = {
name: 'Gradido Entwicklung',
description: 'Die lokale Entwicklungsumgebung von Gradido.',
url: 'http://localhost/vue/',
registerUrl: 'http://localhost/vue/register-community',
}
wrapper = Wrapper()
})
it('does not call community info data when already filled', () => {
expect(apolloQueryMock).not.toBeCalledWith({
query: communityInfo,
})
})
it('has a Community name', () => {
expect(wrapper.find('.card-body b').text()).toBe('Gradido Entwicklung')
})
it('has a Community description', () => {
expect(wrapper.find('.card-body p').text()).toBe(
'Die lokale Entwicklungsumgebung von Gradido.',
)
})
})
describe('calls the apollo query', () => {
describe('server returns data', () => {
beforeEach(async () => {
wrapper = Wrapper()
await wrapper.setData({
communities: [
{
id: 2,
name: 'Gradido Staging',
description: 'Der Testserver der Gradido-Akademie.',
url: 'https://stage1.gradido.net/vue/',
registerUrl: 'https://stage1.gradido.net/vue/register-community',
},
{
id: 3,
name: 'Gradido-Akademie',
description: 'Freies Institut für Wirtschaftsbionik.',
url: 'https://gradido.net',
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
},
],
})
})
it('calls the API to get the data', () => {
expect(apolloQueryMock).toBeCalled()
})

View File

@ -7,7 +7,7 @@
<b-card class="border-0 mb-0" bg-variant="primary">
<b>{{ $store.state.community.name }}</b>
<br />
{{ $store.state.community.description }}
<p>{{ $store.state.community.description }}</p>
<br />
<router-link to="/register">
<b-button variant="outline-secondary">
@ -24,7 +24,7 @@
<b-card bg-variant="secondary">
<b>{{ community.name }}</b>
<br />
{{ community.description }}
<p>{{ community.description }}</p>
<br />
<b>
<small>
@ -49,6 +49,7 @@
</template>
<script>
import { communities } from '../../graphql/queries'
import { getCommunityInfoMixin } from '../../mixins/getCommunityInfo'
export default {
name: 'registerSelectCommunity',
@ -58,6 +59,7 @@ export default {
pending: true,
}
},
mixins: [getCommunityInfoMixin],
methods: {
async getCommunities() {
const loader = this.$loading.show({

View File

@ -5,6 +5,5 @@ src/cpsp/*.h
src/cpsp/*.cpp
src/cpp/proto/
build*/
/skeema/gradido_login/insert/crypto_key.sql
src/LOCALE/messages.pot

View File

@ -56,8 +56,7 @@ To update messages.pot run
This will be also called by ./scripts/build_debug.sh
## database
Login-Server needs a db to run, it is tested with mariadb
table definitions are found in folder ./skeema/gradido_login
Login-Server needs a db to run, it is tested with mariadb.
Currently at least one group must be present in table groups.
For example:
```sql

View File

@ -1,9 +0,0 @@
CREATE TABLE `app_access_tokens` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`user_id` int NOT NULL,
`access_code` bigint unsigned NOT NULL,
`created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `access_code` (`access_code`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -1,15 +0,0 @@
CREATE TABLE `elopage_buys` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`elopage_user_id` int DEFAULT NULL,
`affiliate_program_id` int NOT NULL,
`publisher_id` int NOT NULL,
`order_id` int NOT NULL,
`product_id` int NOT NULL,
`product_price` int NOT NULL,
`payer_email` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
`publisher_email` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
`payed` tinyint NOT NULL,
`success_date` datetime NOT NULL,
`event` varchar(255) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -1,11 +0,0 @@
CREATE TABLE `email_opt_in` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`user_id` int NOT NULL,
`verification_code` bigint unsigned NOT NULL,
`email_opt_in_type_id` int NOT NULL,
`created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`resend_count` int DEFAULT '0',
`updated` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `verification_code` (`verification_code`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -1,6 +0,0 @@
CREATE TABLE `email_opt_in_types` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(255) NOT NULL,
`description` varchar(255) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -1,11 +0,0 @@
CREATE TABLE `groups` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`alias` varchar(190) NOT NULL,
`name` varchar(255) NOT NULL,
`url` varchar(255) NOT NULL,
`host` varchar(255) DEFAULT "/",
`home` varchar(255) DEFAULT "/",
`description` text,
PRIMARY KEY (`id`),
UNIQUE KEY `alias` (`alias`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -1,13 +0,0 @@
CREATE TABLE `pending_tasks` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`user_id` int UNSIGNED DEFAULT 0,
`request` varbinary(2048) NOT NULL,
`created` datetime NOT NULL,
`finished` datetime DEFAULT '2000-01-01 000000',
`result_json` text DEFAULT NULL,
`param_json` text DEFAULT NULL,
`task_type_id` int UNSIGNED NOT NULL,
`child_pending_task_id` int UNSIGNED DEFAULT 0,
`parent_pending_task_id` int UNSIGNED DEFAULT 0,
PRIMARY KEY (`id`)
) ENGINE = InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -1,7 +0,0 @@
CREATE TABLE `roles` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(255) NOT NULL,
`description` varchar(255) NOT NULL,
`flags` bigint NOT NULL DEFAULT '0',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -1,7 +0,0 @@
CREATE TABLE `user_backups` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`user_id` int NOT NULL,
`passphrase` text NOT NULL,
`mnemonic_type` int DEFAULT '-1',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -1,6 +0,0 @@
CREATE TABLE `user_roles` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`user_id` int NOT NULL,
`role_id` int NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -1,21 +0,0 @@
CREATE TABLE `users` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`email` varchar(191) NOT NULL,
`first_name` varchar(150) NOT NULL,
`last_name` varchar(255) DEFAULT '',
`username` varchar(255) DEFAULT '',
`description` text DEFAULT '',
`password` bigint unsigned DEFAULT '0',
`pubkey` binary(32) DEFAULT NULL,
`privkey` binary(80) DEFAULT NULL,
`email_hash` binary(32) DEFAULT NULL,
`created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`email_checked` tinyint NOT NULL DEFAULT '0',
`passphrase_shown` tinyint NOT NULL DEFAULT '0',
`language` varchar(4) NOT NULL DEFAULT 'de',
`disabled` tinyint DEFAULT '0',
`group_id` int unsigned DEFAULT 0,
`publisher_id` int DEFAULT 0,
PRIMARY KEY (`id`),
UNIQUE KEY `email` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -13,6 +13,7 @@
#include "JsonCreateTransaction.h"
#include "JsonCreateUser.h"
#include "JsonGetLogin.h"
#include "JsonSignTransaction.h"
#include "JsonUnknown.h"
#include "JsonGetRunningUserTasks.h"
#include "JsonGetUsers.h"
@ -77,6 +78,9 @@ Poco::Net::HTTPRequestHandler* JsonRequestHandlerFactory::createRequestHandler(c
else if (url_first_part == "/checkSessionState") {
return new JsonCheckSessionState;
}
else if (url_first_part == "/signTransaction") {
return new JsonSignTransaction;
}
else if (url_first_part == "/checkUsername") {
return new JsonCheckUsername;
}

View File

@ -0,0 +1,48 @@
#include "JsonSignTransaction.h"
#include "lib/DataTypeConverter.h"
Poco::JSON::Object* JsonSignTransaction::handle(Poco::Dynamic::Var params)
{
auto result = checkAndLoadSession(params);
if (result) {
return result;
}
std::string bodyBytes_base64;
auto mm = MemoryManager::getInstance();
// if is json object
if (params.type() == typeid(Poco::JSON::Object::Ptr)) {
Poco::JSON::Object::Ptr paramJsonObject = params.extract<Poco::JSON::Object::Ptr>();
/// Throws a RangeException if the value does not fit
/// into the result variable.
/// Throws a NotImplementedException if conversion is
/// not available for the given type.
/// Throws InvalidAccessException if Var is empty.
try {
paramJsonObject->get("bodyBytes").convert(bodyBytes_base64);
}
catch (Poco::Exception& ex) {
return stateError("json exception", ex.displayText());
}
}
auto user = mSession->getNewUser();
auto keyPair = user->getGradidoKeyPair();
if (!keyPair) {
return stateError("error reading keys");
}
auto bodyBytes = DataTypeConverter::base64ToBin(bodyBytes_base64);
auto sign = keyPair->sign(bodyBytes_base64);
mm->releaseMemory(bodyBytes);
if (!sign) {
return stateError("error signing transaction");
}
auto sign_base64 = DataTypeConverter::binToBase64(sign);
mm->releaseMemory(sign);
result = stateSuccess();
result->set("sign", sign_base64);
return result;
}

View File

@ -0,0 +1,15 @@
#ifndef __JSON_INTERFACE_JSON_SIGN_TRANSACTION_
#define __JSON_INTERFACE_JSON_SIGN_TRANSACTION_
#include "JsonRequestHandler.h"
class JsonSignTransaction : public JsonRequestHandler
{
public:
Poco::JSON::Object* handle(Poco::Dynamic::Var params);
protected:
};
#endif // __JSON_INTERFACE_JSON_SIGN_TRANSACTION_

View File

@ -58,7 +58,7 @@ bool EmailManager::init(const Poco::Util::LayeredConfiguration& cfg)
void EmailManager::addEmail(model::Email* email) {
if (mDisableEmail) {
std::string dateTimeString = Poco::DateTimeFormatter::format(Poco::DateTime(), "%d.%m.%y %H:%M:%S");
std::string log_message = dateTimeString + " Email should be sended to: ";
std::string log_message = dateTimeString + " Email should have been sent to: ";
auto email_user = email->getUser();
Poco::AutoPtr<model::table::User> email_model;
if (email_user) {

View File

@ -148,7 +148,7 @@ Session* SessionManager::getNewSession(int* handle)
mWorkingMutex.tryLock(500);
}
catch (Poco::TimeoutException &ex) {
printf("[%s] exception timout mutex: %s\n", functionName, ex.displayText().data());
printf("[%s] exception timeout mutex: %s\n", functionName, ex.displayText().data());
return nullptr;
}
//mWorkingMutex.lock();

View File

@ -69,7 +69,7 @@ namespace controller {
using namespace Poco::Data::Keywords;
Poco::Data::Statement select(session);
select << "SELECT id, first_name, last_name, email, username, description, pubkey, created, email_checked, disabled, group_id FROM " << db->getTableName();
select << "SELECT id, first_name, last_name, email, username, description, pubkey, created, email_checked, disabled, group_id, publisher_id FROM " << db->getTableName();
select << " where email_checked = 0 ";
select, into(resultFromDB);
if (searchString != "") {
@ -439,6 +439,7 @@ namespace controller {
return 0;
auto cm = ConnectionManager::getInstance();
auto em = ErrorManager::getInstance();
auto db = new model::table::User();
static const char* function_name = "User::checkIfVerificationEmailsShouldBeResend";
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
@ -446,8 +447,9 @@ namespace controller {
std::vector<Poco::Tuple<int,Poco::DateTime>> results;
int email_checked = 0;
int resend_count = 1;
select << "select u.id, v.created from users as u "
<< "LEFT JOIN email_opt_in as v ON(u.id = v.user_id) "
std::string table_name_email_opt_in = "login_email_opt_in";
select << "select u.id, v.created from " << db->getTableName() << " as u "
<< "LEFT JOIN " << table_name_email_opt_in << " as v ON(u.id = v.user_id) "
<< "where u.email_checked = ? "
<< "AND v.resend_count <= ? "
<< "ORDER BY u.id, v.created " ,
@ -519,14 +521,15 @@ namespace controller {
{
auto cm = ConnectionManager::getInstance();
auto em = ErrorManager::getInstance();
auto db = new model::table::User();
static const char* function_name = "User::addMissingEmailHashes";
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
Poco::Data::Statement select(session);
std::vector<Poco::Tuple<int, std::string>> results;
select << "select id, email from users "
<< "where email_hash IS NULL "
select << "select id, email from " << db->getTableName()
<< " where email_hash IS NULL "
, Poco::Data::Keywords::into(results)
;
int result_count = 0;
@ -556,7 +559,7 @@ namespace controller {
// update db
// reuse connection, I hope it's working
Poco::Data::Statement update(session);
update << "UPDATE users set email_hash = ? where id = ?"
update << "UPDATE " << db->getTableName() << " set email_hash = ? where id = ?"
, Poco::Data::Keywords::use(updates);
int updated_count = 0;
try {

View File

@ -19,7 +19,7 @@ namespace model {
// generic db operations
const char* getTableName() const { return "app_access_tokens"; }
const char* getTableName() const { return "login_app_access_tokens"; }
std::string toString();
inline Poco::UInt64 getCode() const { return mAccessCode; }

View File

@ -34,7 +34,7 @@ namespace model {
ElopageBuy();
// generic db operations
const char* getTableName() const { return "elopage_buys"; }
const char* getTableName() const { return "login_elopage_buys"; }
std::string toString();

View File

@ -29,7 +29,7 @@ namespace model {
// generic db operations
const char* getTableName() const { return "email_opt_in"; }
const char* getTableName() const { return "login_email_opt_in"; }
std::string toString();
inline Poco::UInt64 getCode() const { return mEmailVerificationCode; }

View File

@ -17,7 +17,7 @@ namespace model {
Group(GroupTuple userTuple);
// generic db operations
const char* getTableName() const { return "groups"; }
const char* getTableName() const { return "login_groups"; }
std::string toString();
inline const std::string& getAlias() const { return mAlias; }

View File

@ -30,7 +30,7 @@ namespace model {
// generic db operations
const char* getTableName() const { return "pending_tasks"; }
const char* getTableName() const { return "login_pending_tasks"; }
std::string toString();
//! \brief update table row with current request

Some files were not shown because too many files have changed in this diff Show More