mirror of
https://github.com/IT4Change/gradido.git
synced 2025-12-13 07:45:54 +00:00
Merge branch 'master' into 1036-register-page-breaks-without-community
This commit is contained in:
commit
6a79ba35c4
45
.github/workflows/test.yml
vendored
45
.github/workflows/test.yml
vendored
@ -147,7 +147,7 @@ jobs:
|
|||||||
##########################################################################
|
##########################################################################
|
||||||
- name: mariadb | Build `test` image
|
- name: mariadb | Build `test` image
|
||||||
run: |
|
run: |
|
||||||
docker build --target mariadb_server_test -t "gradido/mariadb:test" -f ./mariadb/Dockerfile ./
|
docker build --target mariadb_server -t "gradido/mariadb:test" -f ./mariadb/Dockerfile ./
|
||||||
docker save "gradido/mariadb:test" > /tmp/mariadb.tar
|
docker save "gradido/mariadb:test" > /tmp/mariadb.tar
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -344,7 +344,7 @@ jobs:
|
|||||||
report_name: Coverage Frontend
|
report_name: Coverage Frontend
|
||||||
type: lcov
|
type: lcov
|
||||||
result_path: ./coverage/lcov.info
|
result_path: ./coverage/lcov.info
|
||||||
min_coverage: 83
|
min_coverage: 85
|
||||||
token: ${{ github.token }}
|
token: ${{ github.token }}
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
@ -353,7 +353,7 @@ jobs:
|
|||||||
unit_test_backend:
|
unit_test_backend:
|
||||||
name: Unit tests - Backend
|
name: Unit tests - Backend
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [build_test_backend]
|
needs: [build_test_backend,build_test_mariadb]
|
||||||
steps:
|
steps:
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# CHECKOUT CODE ##########################################################
|
# CHECKOUT CODE ##########################################################
|
||||||
@ -363,6 +363,13 @@ jobs:
|
|||||||
##########################################################################
|
##########################################################################
|
||||||
# DOWNLOAD DOCKER IMAGES #################################################
|
# DOWNLOAD DOCKER IMAGES #################################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
- name: Download Docker Image (Mariadb)
|
||||||
|
uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: docker-mariadb-test
|
||||||
|
path: /tmp
|
||||||
|
- name: Load Docker Image
|
||||||
|
run: docker load < /tmp/mariadb.tar
|
||||||
- name: Download Docker Image (Backend)
|
- name: Download Docker Image (Backend)
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v2
|
||||||
with:
|
with:
|
||||||
@ -373,10 +380,11 @@ jobs:
|
|||||||
##########################################################################
|
##########################################################################
|
||||||
# UNIT TESTS BACKEND #####################################################
|
# UNIT TESTS BACKEND #####################################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
- name: backend | Unit tests
|
- name: backend | docker-compose
|
||||||
run: |
|
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb database
|
||||||
docker run -v ~/coverage:/app/coverage --rm gradido/backend:test yarn run test
|
- name: backend Unit tests | test
|
||||||
cp -r ~/coverage ./coverage
|
run: cd database && yarn && cd ../backend && yarn && yarn test
|
||||||
|
# run: docker-compose -f docker-compose.yml -f docker-compose.test.yml exec -T backend yarn test
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# COVERAGE CHECK BACKEND #################################################
|
# COVERAGE CHECK BACKEND #################################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
@ -385,8 +393,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
report_name: Coverage Backend
|
report_name: Coverage Backend
|
||||||
type: lcov
|
type: lcov
|
||||||
result_path: ./coverage/lcov.info
|
result_path: ./backend/coverage/lcov.info
|
||||||
min_coverage: 1
|
min_coverage: 45
|
||||||
token: ${{ github.token }}
|
token: ${{ github.token }}
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
@ -409,10 +417,16 @@ jobs:
|
|||||||
--health-timeout=3s
|
--health-timeout=3s
|
||||||
--health-retries=4
|
--health-retries=4
|
||||||
steps:
|
steps:
|
||||||
- name: Debug service
|
# - name: Debug service
|
||||||
run: echo "$(docker ps)"
|
# run: echo "$(docker ps)"
|
||||||
- name: Debug container choosing script
|
#- name: Debug container choosing script
|
||||||
run: echo "$(docker container ls | grep mariadb | awk '{ print $1 }')"
|
# run: echo "$(docker container ls | grep mariadb | awk '{ print $1 }')"
|
||||||
|
- name: get mariadb container id
|
||||||
|
run: echo "::set-output name=id::$(docker container ls | grep mariadb | awk '{ print $1 }')"
|
||||||
|
id: mariadb_container
|
||||||
|
- name: get automatic created network
|
||||||
|
run: echo "::set-output name=id::$(docker network ls | grep github_network | awk '{ print $1 }')"
|
||||||
|
id: network
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# CHECKOUT CODE ##########################################################
|
# CHECKOUT CODE ##########################################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
@ -420,6 +434,11 @@ jobs:
|
|||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
|
# Database migration
|
||||||
|
- name: Start database migration
|
||||||
|
run: |
|
||||||
|
docker build --target production_up -t "gradido/database:production_up" database/
|
||||||
|
docker run --network ${{ steps.network.outputs.id }} --name=database --env NODE_ENV=production --env DB_HOST=mariadb --env DB_DATABASE=gradido_community_test -d gradido/database:production_up
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# Build Login-Server Test Docker image ###################################
|
# Build Login-Server Test Docker image ###################################
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -2,7 +2,6 @@
|
|||||||
/node_modules/*
|
/node_modules/*
|
||||||
.vscode
|
.vscode
|
||||||
messages.pot
|
messages.pot
|
||||||
.skeema
|
|
||||||
nbproject
|
nbproject
|
||||||
.metadata
|
.metadata
|
||||||
/.env
|
/.env
|
||||||
|
|||||||
7
.vscode/extensions.json
vendored
Normal file
7
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"recommendations": [
|
||||||
|
"streetsidesoftware.code-spell-checker",
|
||||||
|
"dbaeumer.vscode-eslint",
|
||||||
|
"esbenp.prettier-vscode"
|
||||||
|
]
|
||||||
|
}
|
||||||
66
README.md
66
README.md
@ -8,33 +8,73 @@ The Gradido model can create global prosperity and peace
|
|||||||
The Corona crisis has fundamentally changed our world within a very short time.
|
The Corona crisis has fundamentally changed our world within a very short time.
|
||||||
The dominant financial system threatens to fail around the globe, followed by mass insolvencies, record unemployment and abject poverty. Only with a sustainable new monetary system can humanity master these challenges of the 21st century. The Gradido Academy for Bionic Economy has developed such a system.
|
The dominant financial system threatens to fail around the globe, followed by mass insolvencies, record unemployment and abject poverty. Only with a sustainable new monetary system can humanity master these challenges of the 21st century. The Gradido Academy for Bionic Economy has developed such a system.
|
||||||
|
|
||||||
|
Find out more about the Project on its [Website](https://gradido.net/). It is offering vast resources about the idea. The remaining document will discuss the gradido software only.
|
||||||
## Software requirements
|
## Software requirements
|
||||||
|
|
||||||
Currently we only support `docker` as environment to run all services, since many different programming languages and frameworks are used.
|
Currently we only support `docker` install instructions to run all services, since many different programming languages and frameworks are used.
|
||||||
|
|
||||||
- [docker](https://www.docker.com/)
|
- [docker](https://www.docker.com/)
|
||||||
|
- [docker-compose]
|
||||||
|
|
||||||
|
### For Arch Linux
|
||||||
|
Install the required packages:
|
||||||
|
```bash
|
||||||
|
sudo pacman -S docker
|
||||||
|
sudo pacman -S docker-compose
|
||||||
|
```
|
||||||
|
|
||||||
|
Add group `docker` and then your user to it in order to allow you to run docker without sudo
|
||||||
|
```bash
|
||||||
|
sudo groupadd docker # may already exist `groupadd: group 'docker' already exists`
|
||||||
|
sudo usermod -aG docker $USER
|
||||||
|
groups # verify you have the group (requires relog)
|
||||||
|
```
|
||||||
|
|
||||||
|
Start the docker service:
|
||||||
|
```bash
|
||||||
|
sudo systemctrl start docker
|
||||||
|
```
|
||||||
|
|
||||||
## How to run?
|
## How to run?
|
||||||
|
|
||||||
1. Clone the repo and pull all submodules
|
### 1. Clone Sources
|
||||||
|
Clone the repo and pull all submodules
|
||||||
```bash
|
```bash
|
||||||
git clone git@github.com:gradido/gradido.git
|
git clone git@github.com:gradido/gradido.git
|
||||||
git submodule update --recursive --init
|
git submodule update --recursive --init
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Run docker compose
|
### 2. Run docker-compose
|
||||||
1. Run docker compose for the debug build
|
Run docker-compose to bring up the development environment
|
||||||
|
```bash
|
||||||
|
docker-compose up
|
||||||
|
```
|
||||||
|
### Additional Build options
|
||||||
|
If you want to build for production you can do this aswell:
|
||||||
|
```bash
|
||||||
|
docker-compose -f docker-compose.yml up
|
||||||
|
```
|
||||||
|
|
||||||
```bash
|
## Services defined in this package
|
||||||
docker-compose up
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Or run docker compose in production build
|
- [frontend](./frontend) Wallet frontend
|
||||||
|
- [backend](./backend) GraphQL & Business logic backend
|
||||||
|
- [mariadb](./mariadb) Database backend
|
||||||
|
- [login_server](./login_server) User credential storage & business logic backend
|
||||||
|
- [community_server](./community_server/) Business logic backend
|
||||||
|
|
||||||
```bash
|
We are currently restructuring the service to reduce dependencies and unify business logic into one place. Furthermore the databases defined for each service will be unified into one.
|
||||||
docker-compose -f docker-compose.yml up
|
|
||||||
```
|
### Open the wallet
|
||||||
|
|
||||||
|
Once you have `docker-compose` up and running, you can open [http://localhost/vue](http://localhost/vue) and create yourself a new wallet account.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
| Problem | Issue | Solution | Description |
|
||||||
|
| ------- | ----- | -------- | ----------- |
|
||||||
|
| docker-compose raises database connection errors | [#1062](https://github.com/gradido/gradido/issues/1062) | End `ctrl+c` and restart the `docker-compose up` after a successful build | Several Database connection related errors occur in the docker-compose log. |
|
||||||
|
| Wallet page is empty | [#1063](https://github.com/gradido/gradido/issues/1063) | Accept Cookies and Local Storage in your Browser | The page stays empty when navigating to [http://localhost/vue](http://localhost/vue) |
|
||||||
|
|
||||||
## Useful Links
|
## Useful Links
|
||||||
|
|
||||||
|
|||||||
@ -85,7 +85,7 @@ RUN cd ../database && yarn run build
|
|||||||
FROM build as test
|
FROM build as test
|
||||||
|
|
||||||
# Run command
|
# Run command
|
||||||
CMD /bin/sh -c "yarn run dev"
|
CMD /bin/sh -c "yarn run start"
|
||||||
|
|
||||||
##################################################################################
|
##################################################################################
|
||||||
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
|
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
|
||||||
|
|||||||
@ -13,11 +13,12 @@
|
|||||||
"start": "node build/index.js",
|
"start": "node build/index.js",
|
||||||
"dev": "nodemon -w src --ext ts --exec ts-node src/index.ts",
|
"dev": "nodemon -w src --ext ts --exec ts-node src/index.ts",
|
||||||
"lint": "eslint . --ext .js,.ts",
|
"lint": "eslint . --ext .js,.ts",
|
||||||
"test": "jest --coverage"
|
"test": "jest --runInBand --coverage "
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/jest": "^27.0.2",
|
"@types/jest": "^27.0.2",
|
||||||
"apollo-server-express": "^2.25.2",
|
"apollo-server-express": "^2.25.2",
|
||||||
|
"apollo-server-testing": "^2.25.2",
|
||||||
"axios": "^0.21.1",
|
"axios": "^0.21.1",
|
||||||
"class-validator": "^0.13.1",
|
"class-validator": "^0.13.1",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
|
|||||||
@ -4,7 +4,4 @@ import { ArgsType, Field } from 'type-graphql'
|
|||||||
export default class CheckUsernameArgs {
|
export default class CheckUsernameArgs {
|
||||||
@Field(() => String)
|
@Field(() => String)
|
||||||
username: string
|
username: string
|
||||||
|
|
||||||
@Field(() => Number, { nullable: true })
|
|
||||||
groupId?: number
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,21 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
|
||||||
import { ObjectType, Field } from 'type-graphql'
|
|
||||||
|
|
||||||
@ObjectType()
|
|
||||||
export class CheckUsernameResponse {
|
|
||||||
constructor(json: any) {
|
|
||||||
this.state = json.state
|
|
||||||
this.msg = json.msg
|
|
||||||
this.groupId = json.group_id
|
|
||||||
}
|
|
||||||
|
|
||||||
@Field(() => String)
|
|
||||||
state: string
|
|
||||||
|
|
||||||
@Field(() => String)
|
|
||||||
msg?: string
|
|
||||||
|
|
||||||
@Field(() => Number)
|
|
||||||
groupId?: number
|
|
||||||
}
|
|
||||||
123
backend/src/graphql/resolver/CommunityResolver.test.ts
Normal file
123
backend/src/graphql/resolver/CommunityResolver.test.ts
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
||||||
|
|
||||||
|
import { createTestClient } from 'apollo-server-testing'
|
||||||
|
import createServer from '../../server/createServer'
|
||||||
|
import CONFIG from '../../config'
|
||||||
|
|
||||||
|
jest.mock('../../config')
|
||||||
|
|
||||||
|
let query: any
|
||||||
|
|
||||||
|
// to do: We need a setup for the tests that closes the connection
|
||||||
|
let con: any
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const server = await createServer({})
|
||||||
|
con = server.con
|
||||||
|
query = createTestClient(server.apollo).query
|
||||||
|
})
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await con.close()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('CommunityResolver', () => {
|
||||||
|
const getCommunityInfoQuery = `
|
||||||
|
query {
|
||||||
|
getCommunityInfo {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
url
|
||||||
|
registerUrl
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
const communities = `
|
||||||
|
query {
|
||||||
|
communities {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
url
|
||||||
|
description
|
||||||
|
registerUrl
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
describe('getCommunityInfo', () => {
|
||||||
|
it('returns the default values', async () => {
|
||||||
|
expect(query({ query: getCommunityInfoQuery })).resolves.toMatchObject({
|
||||||
|
data: {
|
||||||
|
getCommunityInfo: {
|
||||||
|
name: 'Gradido Entwicklung',
|
||||||
|
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
|
url: 'http://localhost/vue/',
|
||||||
|
registerUrl: 'http://localhost/vue/register',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('communities', () => {
|
||||||
|
describe('PRODUCTION = false', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
CONFIG.PRODUCTION = false
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns three communities', async () => {
|
||||||
|
expect(query({ query: communities })).resolves.toMatchObject({
|
||||||
|
data: {
|
||||||
|
communities: [
|
||||||
|
{
|
||||||
|
id: 1,
|
||||||
|
name: 'Gradido Entwicklung',
|
||||||
|
description: 'Die lokale Entwicklungsumgebung von Gradido.',
|
||||||
|
url: 'http://localhost/vue/',
|
||||||
|
registerUrl: 'http://localhost/vue/register-community',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 2,
|
||||||
|
name: 'Gradido Staging',
|
||||||
|
description: 'Der Testserver der Gradido-Akademie.',
|
||||||
|
url: 'https://stage1.gradido.net/vue/',
|
||||||
|
registerUrl: 'https://stage1.gradido.net/vue/register-community',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 3,
|
||||||
|
name: 'Gradido-Akademie',
|
||||||
|
description: 'Freies Institut für Wirtschaftsbionik.',
|
||||||
|
url: 'https://gradido.net',
|
||||||
|
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('PRODUCTION = true', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
CONFIG.PRODUCTION = true
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns one community', async () => {
|
||||||
|
expect(query({ query: communities })).resolves.toMatchObject({
|
||||||
|
data: {
|
||||||
|
communities: [
|
||||||
|
{
|
||||||
|
id: 3,
|
||||||
|
name: 'Gradido-Akademie',
|
||||||
|
description: 'Freies Institut für Wirtschaftsbionik.',
|
||||||
|
url: 'https://gradido.net',
|
||||||
|
registerUrl: 'https://gdd1.gradido.com/vue/register-community',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
import { Resolver, Query, Args, Arg, Authorized, Ctx, UseMiddleware, Mutation } from 'type-graphql'
|
import { Resolver, Query, Args, Arg, Authorized, Ctx, UseMiddleware, Mutation } from 'type-graphql'
|
||||||
import { from_hex as fromHex } from 'libsodium-wrappers'
|
import { from_hex as fromHex } from 'libsodium-wrappers'
|
||||||
|
import { getCustomRepository } from 'typeorm'
|
||||||
import CONFIG from '../../config'
|
import CONFIG from '../../config'
|
||||||
import { CheckUsernameResponse } from '../model/CheckUsernameResponse'
|
|
||||||
import { LoginViaVerificationCode } from '../model/LoginViaVerificationCode'
|
import { LoginViaVerificationCode } from '../model/LoginViaVerificationCode'
|
||||||
import { SendPasswordResetEmailResponse } from '../model/SendPasswordResetEmailResponse'
|
import { SendPasswordResetEmailResponse } from '../model/SendPasswordResetEmailResponse'
|
||||||
import { UpdateUserInfosResponse } from '../model/UpdateUserInfosResponse'
|
import { UpdateUserInfosResponse } from '../model/UpdateUserInfosResponse'
|
||||||
@ -22,10 +22,10 @@ import {
|
|||||||
klicktippNewsletterStateMiddleware,
|
klicktippNewsletterStateMiddleware,
|
||||||
} from '../../middleware/klicktippMiddleware'
|
} from '../../middleware/klicktippMiddleware'
|
||||||
import { CheckEmailResponse } from '../model/CheckEmailResponse'
|
import { CheckEmailResponse } from '../model/CheckEmailResponse'
|
||||||
import { getCustomRepository } from 'typeorm'
|
|
||||||
import { UserSettingRepository } from '../../typeorm/repository/UserSettingRepository'
|
import { UserSettingRepository } from '../../typeorm/repository/UserSettingRepository'
|
||||||
import { Setting } from '../enum/Setting'
|
import { Setting } from '../enum/Setting'
|
||||||
import { UserRepository } from '../../typeorm/repository/User'
|
import { UserRepository } from '../../typeorm/repository/User'
|
||||||
|
import { LoginUser } from '@entity/LoginUser'
|
||||||
|
|
||||||
@Resolver()
|
@Resolver()
|
||||||
export class UserResolver {
|
export class UserResolver {
|
||||||
@ -275,15 +275,27 @@ export class UserResolver {
|
|||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
@Query(() => CheckUsernameResponse)
|
@Query(() => Boolean)
|
||||||
async checkUsername(
|
async checkUsername(@Args() { username }: CheckUsernameArgs): Promise<boolean> {
|
||||||
@Args() { username, groupId = 1 }: CheckUsernameArgs,
|
// Username empty?
|
||||||
): Promise<CheckUsernameResponse> {
|
if (username === '') {
|
||||||
const response = await apiGet(
|
throw new Error('Username must be set.')
|
||||||
CONFIG.LOGIN_API_URL + `checkUsername?username=${username}&group_id=${groupId}`,
|
}
|
||||||
)
|
|
||||||
if (!response.success) throw new Error(response.data)
|
// Do we fullfil the minimum character length?
|
||||||
return new CheckUsernameResponse(response.data)
|
const MIN_CHARACTERS_USERNAME = 2
|
||||||
|
if (username.length < MIN_CHARACTERS_USERNAME) {
|
||||||
|
throw new Error(`Username must be at minimum ${MIN_CHARACTERS_USERNAME} characters long.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const usersFound = await LoginUser.count({ username })
|
||||||
|
|
||||||
|
// Username already present?
|
||||||
|
if (usersFound !== 0) {
|
||||||
|
throw new Error(`Username "${username}" already taken.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@Query(() => CheckEmailResponse)
|
@Query(() => CheckEmailResponse)
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import isAuthorized from './directive/isAuthorized'
|
|||||||
|
|
||||||
const schema = async (): Promise<GraphQLSchema> => {
|
const schema = async (): Promise<GraphQLSchema> => {
|
||||||
return buildSchema({
|
return buildSchema({
|
||||||
resolvers: [path.join(__dirname, 'resolver', `*.{js,ts}`)],
|
resolvers: [path.join(__dirname, 'resolver', `!(*.test).{js,ts}`)],
|
||||||
authChecker: isAuthorized,
|
authChecker: isAuthorized,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,64 +1,14 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
|
||||||
import 'reflect-metadata'
|
import createServer from './server/createServer'
|
||||||
import 'module-alias/register'
|
|
||||||
import express from 'express'
|
|
||||||
import { ApolloServer } from 'apollo-server-express'
|
|
||||||
|
|
||||||
// config
|
// config
|
||||||
import CONFIG from './config'
|
import CONFIG from './config'
|
||||||
|
|
||||||
// database
|
|
||||||
import connection from './typeorm/connection'
|
|
||||||
import getDBVersion from './typeorm/getDBVersion'
|
|
||||||
|
|
||||||
// server
|
|
||||||
import cors from './server/cors'
|
|
||||||
import context from './server/context'
|
|
||||||
import plugins from './server/plugins'
|
|
||||||
|
|
||||||
// graphql
|
|
||||||
import schema from './graphql/schema'
|
|
||||||
|
|
||||||
// TODO implement
|
|
||||||
// import queryComplexity, { simpleEstimator, fieldConfigEstimator } from "graphql-query-complexity";
|
|
||||||
|
|
||||||
const DB_VERSION = '0002-add_settings'
|
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
// open mysql connection
|
const { app } = await createServer()
|
||||||
const con = await connection()
|
|
||||||
if (!con || !con.isConnected) {
|
|
||||||
throw new Error(`Couldn't open connection to database`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// check for correct database version
|
app.listen(CONFIG.PORT, () => {
|
||||||
const dbVersion = await getDBVersion()
|
|
||||||
if (!dbVersion || dbVersion.indexOf(DB_VERSION) === -1) {
|
|
||||||
throw new Error(
|
|
||||||
`Wrong database version - the backend requires '${DB_VERSION}' but found '${
|
|
||||||
dbVersion || 'None'
|
|
||||||
}'`,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Express Server
|
|
||||||
const server = express()
|
|
||||||
|
|
||||||
// cors
|
|
||||||
server.use(cors)
|
|
||||||
|
|
||||||
// Apollo Server
|
|
||||||
const apollo = new ApolloServer({
|
|
||||||
schema: await schema(),
|
|
||||||
playground: CONFIG.GRAPHIQL,
|
|
||||||
context,
|
|
||||||
plugins,
|
|
||||||
})
|
|
||||||
apollo.applyMiddleware({ app: server })
|
|
||||||
|
|
||||||
// Start Server
|
|
||||||
server.listen(CONFIG.PORT, () => {
|
|
||||||
// eslint-disable-next-line no-console
|
// eslint-disable-next-line no-console
|
||||||
console.log(`Server is running at http://localhost:${CONFIG.PORT}`)
|
console.log(`Server is running at http://localhost:${CONFIG.PORT}`)
|
||||||
if (CONFIG.GRAPHIQL) {
|
if (CONFIG.GRAPHIQL) {
|
||||||
|
|||||||
64
backend/src/server/createServer.ts
Normal file
64
backend/src/server/createServer.ts
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
|
||||||
|
|
||||||
|
import 'reflect-metadata'
|
||||||
|
import 'module-alias/register'
|
||||||
|
|
||||||
|
import { ApolloServer } from 'apollo-server-express'
|
||||||
|
import express from 'express'
|
||||||
|
|
||||||
|
// database
|
||||||
|
import connection from '../typeorm/connection'
|
||||||
|
import getDBVersion from '../typeorm/getDBVersion'
|
||||||
|
|
||||||
|
// server
|
||||||
|
import cors from './cors'
|
||||||
|
import serverContext from './context'
|
||||||
|
import plugins from './plugins'
|
||||||
|
|
||||||
|
// config
|
||||||
|
import CONFIG from '../config'
|
||||||
|
|
||||||
|
// graphql
|
||||||
|
import schema from '../graphql/schema'
|
||||||
|
|
||||||
|
// TODO implement
|
||||||
|
// import queryComplexity, { simpleEstimator, fieldConfigEstimator } from "graphql-query-complexity";
|
||||||
|
|
||||||
|
const DB_VERSION = '0004-login_server_data'
|
||||||
|
|
||||||
|
const createServer = async (context: any = serverContext): Promise<any> => {
|
||||||
|
// open mysql connection
|
||||||
|
const con = await connection()
|
||||||
|
if (!con || !con.isConnected) {
|
||||||
|
throw new Error(`Couldn't open connection to database`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// check for correct database version
|
||||||
|
const dbVersion = await getDBVersion()
|
||||||
|
if (!dbVersion || dbVersion.indexOf(DB_VERSION) === -1) {
|
||||||
|
throw new Error(
|
||||||
|
`Wrong database version - the backend requires '${DB_VERSION}' but found '${
|
||||||
|
dbVersion || 'None'
|
||||||
|
}'`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Express Server
|
||||||
|
const app = express()
|
||||||
|
|
||||||
|
// cors
|
||||||
|
app.use(cors)
|
||||||
|
|
||||||
|
// Apollo Server
|
||||||
|
const apollo = new ApolloServer({
|
||||||
|
schema: await schema(),
|
||||||
|
playground: CONFIG.GRAPHIQL,
|
||||||
|
context,
|
||||||
|
plugins,
|
||||||
|
})
|
||||||
|
apollo.applyMiddleware({ app })
|
||||||
|
return { apollo, app, con }
|
||||||
|
}
|
||||||
|
|
||||||
|
export default createServer
|
||||||
@ -1333,6 +1333,13 @@ apollo-server-plugin-base@^0.13.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
apollo-server-types "^0.9.0"
|
apollo-server-types "^0.9.0"
|
||||||
|
|
||||||
|
apollo-server-testing@^2.25.2:
|
||||||
|
version "2.25.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/apollo-server-testing/-/apollo-server-testing-2.25.2.tgz#0043e98b1a03720352e94b409215fb4782ae2e50"
|
||||||
|
integrity sha512-HjQV9wPbi/ZqpRbyyhNwCbaDnfjDM0hTRec5TOoOjurEZ/vh4hTPHwGkDZx3kbcWowhGxe2qoHM6KANSB/SxuA==
|
||||||
|
dependencies:
|
||||||
|
apollo-server-core "^2.25.2"
|
||||||
|
|
||||||
apollo-server-types@^0.9.0:
|
apollo-server-types@^0.9.0:
|
||||||
version "0.9.0"
|
version "0.9.0"
|
||||||
resolved "https://registry.npmjs.org/apollo-server-types/-/apollo-server-types-0.9.0.tgz"
|
resolved "https://registry.npmjs.org/apollo-server-types/-/apollo-server-types-0.9.0.tgz"
|
||||||
|
|||||||
@ -15,7 +15,8 @@ class ServerUsersController extends AppController
|
|||||||
public function initialize()
|
public function initialize()
|
||||||
{
|
{
|
||||||
parent::initialize();
|
parent::initialize();
|
||||||
$this->Auth->allow(['add', 'edit']);
|
// uncomment in devmode to add new community server admin user, but don't!!! commit it
|
||||||
|
//$this->Auth->allow(['add', 'edit']);
|
||||||
$this->Auth->deny('index');
|
$this->Auth->deny('index');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -17,7 +17,7 @@ phpServer.host = nginx
|
|||||||
loginServer.path = http://localhost/account
|
loginServer.path = http://localhost/account
|
||||||
loginServer.default_locale = de
|
loginServer.default_locale = de
|
||||||
loginServer.db.host = mariadb
|
loginServer.db.host = mariadb
|
||||||
loginServer.db.name = gradido_login
|
loginServer.db.name = gradido_community
|
||||||
loginServer.db.user = root
|
loginServer.db.user = root
|
||||||
loginServer.db.password =
|
loginServer.db.password =
|
||||||
loginServer.db.port = 3306
|
loginServer.db.port = 3306
|
||||||
|
|||||||
@ -17,7 +17,7 @@ phpServer.host = nginx
|
|||||||
loginServer.path = http://localhost/account
|
loginServer.path = http://localhost/account
|
||||||
loginServer.default_locale = de
|
loginServer.default_locale = de
|
||||||
loginServer.db.host = mariadb
|
loginServer.db.host = mariadb
|
||||||
loginServer.db.name = gradido_login_test
|
loginServer.db.name = gradido_community_test
|
||||||
loginServer.db.user = root
|
loginServer.db.user = root
|
||||||
loginServer.db.password =
|
loginServer.db.password =
|
||||||
loginServer.db.port = 3306
|
loginServer.db.port = 3306
|
||||||
|
|||||||
@ -1,6 +0,0 @@
|
|||||||
INSERT INTO `groups` (`id`, `alias`, `name`, `url`, `host`, `home`, `description`) VALUES
|
|
||||||
(1, 'docker', 'docker gradido group', 'localhost', 'nginx', '/', 'gradido test group for docker and stage2 with blockchain db');
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
56
database/entity/0003-login_server_tables/LoginUser.ts
Normal file
56
database/entity/0003-login_server_tables/LoginUser.ts
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
import { BaseEntity, Entity, PrimaryGeneratedColumn, Column } from 'typeorm'
|
||||||
|
|
||||||
|
// Moriz: I do not like the idea of having two user tables
|
||||||
|
@Entity('login_users')
|
||||||
|
export class LoginUser extends BaseEntity {
|
||||||
|
@PrimaryGeneratedColumn('increment', { unsigned: true })
|
||||||
|
id: number
|
||||||
|
|
||||||
|
@Column({ length: 191, unique: true })
|
||||||
|
email: string
|
||||||
|
|
||||||
|
@Column({ name: 'first_name', length: 150 })
|
||||||
|
firstName: string
|
||||||
|
|
||||||
|
@Column({ name: 'last_name', length: 255, default: '' })
|
||||||
|
lastName: string
|
||||||
|
|
||||||
|
@Column({ length: 255, default: '' })
|
||||||
|
username: string
|
||||||
|
|
||||||
|
@Column({ default: '' })
|
||||||
|
description: string
|
||||||
|
|
||||||
|
@Column({ type: 'bigint', default: 0, unsigned: true })
|
||||||
|
password: string
|
||||||
|
|
||||||
|
@Column({ name: 'pubkey', type: 'binary', length: 32, default: null, nullable: true })
|
||||||
|
pubKey: Buffer
|
||||||
|
|
||||||
|
@Column({ name: 'privkey', type: 'binary', length: 80, default: null, nullable: true })
|
||||||
|
privKey: Buffer
|
||||||
|
|
||||||
|
@Column({ name: 'email_hash', type: 'binary', length: 32, default: null, nullable: true })
|
||||||
|
emailHash: Buffer
|
||||||
|
|
||||||
|
@Column({ name: 'created', default: () => 'CURRENT_TIMESTAMP' })
|
||||||
|
createdAt: Date
|
||||||
|
|
||||||
|
@Column({ name: 'email_checked', default: 0 })
|
||||||
|
emailChecked: boolean
|
||||||
|
|
||||||
|
@Column({ name: 'passphrase_shown', default: 0 })
|
||||||
|
passphraseShown: boolean
|
||||||
|
|
||||||
|
@Column({ length: 4, default: 'de' })
|
||||||
|
language: string
|
||||||
|
|
||||||
|
@Column({ default: 0 })
|
||||||
|
disabled: boolean
|
||||||
|
|
||||||
|
@Column({ name: 'group_id', default: 0, unsigned: true })
|
||||||
|
groupId: number
|
||||||
|
|
||||||
|
@Column({ name: 'publisher_id', default: 0 })
|
||||||
|
publisherId: number
|
||||||
|
}
|
||||||
1
database/entity/LoginUser.ts
Normal file
1
database/entity/LoginUser.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export { LoginUser } from './0003-login_server_tables/LoginUser'
|
||||||
@ -1,4 +1,5 @@
|
|||||||
import { Balance } from './Balance'
|
import { Balance } from './Balance'
|
||||||
|
import { LoginUser } from './LoginUser'
|
||||||
import { Migration } from './Migration'
|
import { Migration } from './Migration'
|
||||||
import { Transaction } from './Transaction'
|
import { Transaction } from './Transaction'
|
||||||
import { TransactionCreation } from './TransactionCreation'
|
import { TransactionCreation } from './TransactionCreation'
|
||||||
@ -9,6 +10,7 @@ import { UserTransaction } from './UserTransaction'
|
|||||||
|
|
||||||
export const entities = [
|
export const entities = [
|
||||||
Balance,
|
Balance,
|
||||||
|
LoginUser,
|
||||||
Migration,
|
Migration,
|
||||||
Transaction,
|
Transaction,
|
||||||
TransactionCreation,
|
TransactionCreation,
|
||||||
|
|||||||
@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
await queryFn(`
|
await queryFn(`
|
||||||
CREATE TABLE IF NOT EXISTS \`user_setting\` (
|
CREATE TABLE \`user_setting\` (
|
||||||
\`id\` int(10) unsigned NOT NULL AUTO_INCREMENT,
|
\`id\` int(10) unsigned NOT NULL AUTO_INCREMENT,
|
||||||
\`userId\` int(11) NOT NULL,
|
\`userId\` int(11) NOT NULL,
|
||||||
\`key\` varchar(255) NOT NULL,
|
\`key\` varchar(255) NOT NULL,
|
||||||
@ -22,5 +22,5 @@ export async function upgrade(queryFn: (query: string, values?: any[]) => Promis
|
|||||||
|
|
||||||
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
// write downgrade logic as parameter of queryFn
|
// write downgrade logic as parameter of queryFn
|
||||||
await queryFn(`DROP TABLE IF EXISTS \`user_setting\`;`)
|
await queryFn(`DROP TABLE \`user_setting\`;`)
|
||||||
}
|
}
|
||||||
|
|||||||
153
database/migrations/0003-login_server_tables.ts
Normal file
153
database/migrations/0003-login_server_tables.ts
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
/* FIRST MIGRATION
|
||||||
|
*
|
||||||
|
* This migration is special since it takes into account that
|
||||||
|
* the database can be setup already but also may not be.
|
||||||
|
* Therefore you will find all `CREATE TABLE` statements with
|
||||||
|
* a `IF NOT EXISTS`, all `INSERT` with an `IGNORE` and in the
|
||||||
|
* downgrade function all `DROP TABLE` with a `IF EXISTS`.
|
||||||
|
* This ensures compatibility for existing or non-existing
|
||||||
|
* databases.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_app_access_tokens\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int NOT NULL,
|
||||||
|
\`access_code\` bigint unsigned NOT NULL,
|
||||||
|
\`created\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
\`updated\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
PRIMARY KEY (\`id\`),
|
||||||
|
UNIQUE KEY \`access_code\` (\`access_code\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_elopage_buys\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`elopage_user_id\` int DEFAULT NULL,
|
||||||
|
\`affiliate_program_id\` int NOT NULL,
|
||||||
|
\`publisher_id\` int NOT NULL,
|
||||||
|
\`order_id\` int NOT NULL,
|
||||||
|
\`product_id\` int NOT NULL,
|
||||||
|
\`product_price\` int NOT NULL,
|
||||||
|
\`payer_email\` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
|
||||||
|
\`publisher_email\` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
|
||||||
|
\`payed\` tinyint NOT NULL,
|
||||||
|
\`success_date\` datetime NOT NULL,
|
||||||
|
\`event\` varchar(255) NOT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_email_opt_in_types\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`name\` varchar(255) NOT NULL,
|
||||||
|
\`description\` varchar(255) NOT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_email_opt_in\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int NOT NULL,
|
||||||
|
\`verification_code\` bigint unsigned NOT NULL,
|
||||||
|
\`email_opt_in_type_id\` int NOT NULL,
|
||||||
|
\`created\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
\`resend_count\` int DEFAULT '0',
|
||||||
|
\`updated\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
PRIMARY KEY (\`id\`),
|
||||||
|
UNIQUE KEY \`verification_code\` (\`verification_code\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_groups\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`alias\` varchar(190) NOT NULL,
|
||||||
|
\`name\` varchar(255) NOT NULL,
|
||||||
|
\`url\` varchar(255) NOT NULL,
|
||||||
|
\`host\` varchar(255) DEFAULT "/",
|
||||||
|
\`home\` varchar(255) DEFAULT "/",
|
||||||
|
\`description\` text,
|
||||||
|
PRIMARY KEY (\`id\`),
|
||||||
|
UNIQUE KEY \`alias\` (\`alias\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_pending_tasks\` (
|
||||||
|
\`id\` int UNSIGNED NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int UNSIGNED DEFAULT 0,
|
||||||
|
\`request\` varbinary(2048) NOT NULL,
|
||||||
|
\`created\` datetime NOT NULL,
|
||||||
|
\`finished\` datetime DEFAULT '2000-01-01 000000',
|
||||||
|
\`result_json\` text DEFAULT NULL,
|
||||||
|
\`param_json\` text DEFAULT NULL,
|
||||||
|
\`task_type_id\` int UNSIGNED NOT NULL,
|
||||||
|
\`child_pending_task_id\` int UNSIGNED DEFAULT 0,
|
||||||
|
\`parent_pending_task_id\` int UNSIGNED DEFAULT 0,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE = InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_roles\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`name\` varchar(255) NOT NULL,
|
||||||
|
\`description\` varchar(255) NOT NULL,
|
||||||
|
\`flags\` bigint NOT NULL DEFAULT '0',
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_user_backups\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int NOT NULL,
|
||||||
|
\`passphrase\` text NOT NULL,
|
||||||
|
\`mnemonic_type\` int DEFAULT '-1',
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_user_roles\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`user_id\` int NOT NULL,
|
||||||
|
\`role_id\` int NOT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
CREATE TABLE \`login_users\` (
|
||||||
|
\`id\` int unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
\`email\` varchar(191) NOT NULL,
|
||||||
|
\`first_name\` varchar(150) NOT NULL,
|
||||||
|
\`last_name\` varchar(255) DEFAULT '',
|
||||||
|
\`username\` varchar(255) DEFAULT '',
|
||||||
|
\`description\` text DEFAULT '',
|
||||||
|
\`password\` bigint unsigned DEFAULT '0',
|
||||||
|
\`pubkey\` binary(32) DEFAULT NULL,
|
||||||
|
\`privkey\` binary(80) DEFAULT NULL,
|
||||||
|
\`email_hash\` binary(32) DEFAULT NULL,
|
||||||
|
\`created\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
\`email_checked\` tinyint NOT NULL DEFAULT '0',
|
||||||
|
\`passphrase_shown\` tinyint NOT NULL DEFAULT '0',
|
||||||
|
\`language\` varchar(4) NOT NULL DEFAULT 'de',
|
||||||
|
\`disabled\` tinyint DEFAULT '0',
|
||||||
|
\`group_id\` int unsigned DEFAULT 0,
|
||||||
|
\`publisher_id\` int DEFAULT 0,
|
||||||
|
PRIMARY KEY (\`id\`),
|
||||||
|
UNIQUE KEY \`email\` (\`email\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||||
|
`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
|
// write downgrade logic as parameter of queryFn
|
||||||
|
await queryFn(`DROP TABLE \`login_app_access_tokens\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_elopage_buys\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_email_opt_in_types\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_email_opt_in\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_groups\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_pending_tasks\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_roles\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_user_backups\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_user_roles\`;`)
|
||||||
|
await queryFn(`DROP TABLE \`login_users\`;`)
|
||||||
|
}
|
||||||
74
database/migrations/0004-login_server_data.ts
Normal file
74
database/migrations/0004-login_server_data.ts
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
/* FIRST MIGRATION
|
||||||
|
*
|
||||||
|
* This migration is special since it takes into account that
|
||||||
|
* the database can be setup already but also may not be.
|
||||||
|
* Therefore you will find all `CREATE TABLE` statements with
|
||||||
|
* a `IF NOT EXISTS`, all `INSERT` with an `IGNORE` and in the
|
||||||
|
* downgrade function all `DROP TABLE` with a `IF EXISTS`.
|
||||||
|
* This ensures compatibility for existing or non-existing
|
||||||
|
* databases.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const LOGIN_SERVER_DB = 'gradido_login'
|
||||||
|
|
||||||
|
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
|
const loginDatabaseExists = await queryFn(`
|
||||||
|
SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = '${LOGIN_SERVER_DB}'
|
||||||
|
`)
|
||||||
|
if (loginDatabaseExists.length === 0) {
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log(`Skipping Login Server Database migration - Database ${LOGIN_SERVER_DB} not found`)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_app_access_tokens\` SELECT * FROM ${LOGIN_SERVER_DB}.\`app_access_tokens\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_elopage_buys\` SELECT * FROM ${LOGIN_SERVER_DB}.\`elopage_buys\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_email_opt_in_types\` SELECT * FROM ${LOGIN_SERVER_DB}.\`email_opt_in_types\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_email_opt_in\` SELECT * FROM ${LOGIN_SERVER_DB}.\`email_opt_in\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_groups\` SELECT * FROM ${LOGIN_SERVER_DB}.\`groups\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_pending_tasks\` SELECT * FROM ${LOGIN_SERVER_DB}.\`pending_tasks\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_roles\` SELECT * FROM ${LOGIN_SERVER_DB}.\`roles\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_user_backups\` SELECT * FROM ${LOGIN_SERVER_DB}.\`user_backups\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_user_roles\` SELECT * FROM ${LOGIN_SERVER_DB}.\`user_roles\`;
|
||||||
|
`)
|
||||||
|
await queryFn(`
|
||||||
|
INSERT INTO \`login_users\` SELECT * FROM ${LOGIN_SERVER_DB}.\`users\`;
|
||||||
|
`)
|
||||||
|
|
||||||
|
// TODO clarify if we need this on non docker environment?
|
||||||
|
await queryFn(`
|
||||||
|
INSERT IGNORE INTO \`login_groups\` (\`id\`, \`alias\`, \`name\`, \`url\`, \`host\`, \`home\`, \`description\`) VALUES
|
||||||
|
(1, 'docker', 'docker gradido group', 'localhost', 'nginx', '/', 'gradido test group for docker and stage2 with blockchain db');
|
||||||
|
`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||||
|
// write downgrade logic as parameter of queryFn
|
||||||
|
await queryFn(`DELETE FROM \`login_app_access_tokens\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_elopage_buys\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_email_opt_in_types\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_email_opt_in\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_groups\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_pending_tasks\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_roles\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_user_backups\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_user_roles\`;`)
|
||||||
|
await queryFn(`DELETE FROM \`login_users\`;`)
|
||||||
|
}
|
||||||
@ -36,8 +36,6 @@ cd $PROJECT_PATH
|
|||||||
# git checkout -f master
|
# git checkout -f master
|
||||||
git pull
|
git pull
|
||||||
cd deployment/bare_metal
|
cd deployment/bare_metal
|
||||||
echo 'update schemas' >> $UPDATE_HTML
|
|
||||||
./update_db_schemas.sh
|
|
||||||
echo 'starting with rebuilding login-server<br>' >> $UPDATE_HTML
|
echo 'starting with rebuilding login-server<br>' >> $UPDATE_HTML
|
||||||
./build_and_start_login_server.sh
|
./build_and_start_login_server.sh
|
||||||
echo 'starting with rebuilding frontend<br>' >> $UPDATE_HTML
|
echo 'starting with rebuilding frontend<br>' >> $UPDATE_HTML
|
||||||
|
|||||||
@ -1,7 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# For that to work skeema needed to be installed on system
|
|
||||||
# in login_server/skeema skeema configuration files need to be there also in the subfolders
|
|
||||||
# Update DB Schemas (only the schemas, not the data)
|
|
||||||
|
|
||||||
cd ../../login_server/skeema
|
|
||||||
skeema push --allow-unsafe
|
|
||||||
@ -136,22 +136,7 @@ services:
|
|||||||
- external-net
|
- external-net
|
||||||
volumes:
|
volumes:
|
||||||
- /sessions
|
- /sessions
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## skeema for updating dbs if changes happend ###########
|
|
||||||
#########################################################
|
|
||||||
skeema:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: ./skeema/Dockerfile
|
|
||||||
target: skeema_dev_run
|
|
||||||
depends_on:
|
|
||||||
- mariadb
|
|
||||||
networks:
|
|
||||||
- internal-net
|
|
||||||
volumes:
|
|
||||||
- ./login_server/skeema/gradido_login:/skeema/gradido_login
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
frontend_node_modules:
|
frontend_node_modules:
|
||||||
backend_node_modules:
|
backend_node_modules:
|
||||||
|
|||||||
@ -2,6 +2,26 @@ version: "3.4"
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
# BACKEND ##############################################
|
||||||
|
########################################################
|
||||||
|
backend:
|
||||||
|
image: gradido/backend:test
|
||||||
|
build:
|
||||||
|
target: test
|
||||||
|
networks:
|
||||||
|
- external-net
|
||||||
|
- internal-net
|
||||||
|
environment:
|
||||||
|
- NODE_ENV="test"
|
||||||
|
- DB_HOST=mariadb
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
# DATABASE #############################################
|
||||||
|
########################################################
|
||||||
|
database:
|
||||||
|
restart: always # this is very dangerous, but worth a test for the delayed mariadb startup at first run
|
||||||
|
|
||||||
#########################################################
|
#########################################################
|
||||||
## MARIADB ##############################################
|
## MARIADB ##############################################
|
||||||
#########################################################
|
#########################################################
|
||||||
@ -9,16 +29,17 @@ services:
|
|||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: ./mariadb/Dockerfile
|
dockerfile: ./mariadb/Dockerfile
|
||||||
target: mariadb_server_test
|
target: mariadb_server
|
||||||
environment:
|
environment:
|
||||||
- MARIADB_ALLOW_EMPTY_PASSWORD=1
|
- MARIADB_ALLOW_EMPTY_PASSWORD=1
|
||||||
- MARIADB_USER=root
|
- MARIADB_USER=root
|
||||||
networks:
|
networks:
|
||||||
- internal-net
|
- internal-net
|
||||||
|
- external-net
|
||||||
ports:
|
ports:
|
||||||
- 3306:3306
|
- 3306:3306
|
||||||
volumes:
|
volumes:
|
||||||
- db_test_vol:/var/lib/mysql
|
- db_test_vol:/var/lib/mysql
|
||||||
|
|
||||||
#########################################################
|
#########################################################
|
||||||
## LOGIN SERVER #########################################
|
## LOGIN SERVER #########################################
|
||||||
@ -75,19 +96,6 @@ services:
|
|||||||
- external-net
|
- external-net
|
||||||
volumes:
|
volumes:
|
||||||
- /sessions
|
- /sessions
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## skeema for updating dbs if changes happend ###########
|
|
||||||
#########################################################
|
|
||||||
skeema:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: ./skeema/Dockerfile
|
|
||||||
target: skeema_run
|
|
||||||
depends_on:
|
|
||||||
- mariadb
|
|
||||||
networks:
|
|
||||||
- internal-net
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
external-net:
|
external-net:
|
||||||
|
|||||||
@ -35,9 +35,8 @@ services:
|
|||||||
#########################################################
|
#########################################################
|
||||||
mariadb:
|
mariadb:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: ./mariadb
|
||||||
dockerfile: ./mariadb/Dockerfile
|
target: mariadb_server
|
||||||
target: mariadb_server_test
|
|
||||||
environment:
|
environment:
|
||||||
- MARIADB_ALLOW_EMPTY_PASSWORD=1
|
- MARIADB_ALLOW_EMPTY_PASSWORD=1
|
||||||
- MARIADB_USER=root
|
- MARIADB_USER=root
|
||||||
@ -158,41 +157,6 @@ services:
|
|||||||
- internal-net
|
- internal-net
|
||||||
volumes:
|
volumes:
|
||||||
- ./community_server/config/php-fpm/php-ini-overrides.ini:/etc/php/7.4/fpm/conf.d/99-overrides.ini
|
- ./community_server/config/php-fpm/php-ini-overrides.ini:/etc/php/7.4/fpm/conf.d/99-overrides.ini
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## skeema for updating dbs if changes happend ###########
|
|
||||||
#########################################################
|
|
||||||
skeema:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: ./skeema/Dockerfile
|
|
||||||
target: skeema_run
|
|
||||||
depends_on:
|
|
||||||
- mariadb
|
|
||||||
networks:
|
|
||||||
- internal-net
|
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## GRADIDO NODE v1 ######################################
|
|
||||||
#########################################################
|
|
||||||
# gradido-node:
|
|
||||||
# build:
|
|
||||||
# context: .
|
|
||||||
# dockerfile: ./gn/docker/deprecated-hedera-node/Dockerfile
|
|
||||||
# volumes:
|
|
||||||
# - ${GN_INSTANCE_FOLDER}:/opt/instance
|
|
||||||
# container_name: ${GN_CONTAINER_NAME}
|
|
||||||
|
|
||||||
#########################################################
|
|
||||||
## GRADIDO NODE test ###################################
|
|
||||||
#########################################################
|
|
||||||
# gradido-node-test:
|
|
||||||
# build:
|
|
||||||
# context: .
|
|
||||||
# dockerfile: ./gn/docker/deprecated-hedera-node/Dockerfile
|
|
||||||
# container_name: gn-test
|
|
||||||
# working_dir: /opt/gn/build
|
|
||||||
# command: ["./unit_tests"]
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
external-net:
|
external-net:
|
||||||
|
|||||||
@ -15,7 +15,7 @@ This document describes the technical overview for the Gradido infrastructur. Be
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Database Skeema
|
### Database Skeema (outdated)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|||||||
BIN
docu/Gradido-Admin.epgz
Normal file
BIN
docu/Gradido-Admin.epgz
Normal file
Binary file not shown.
27
docu/create-coins-as-admin.md
Normal file
27
docu/create-coins-as-admin.md
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
### User creation
|
||||||
|
A user needs to be created on the login_server we do this when we create a User in the client https://$community_domain/vue/register.
|
||||||
|
|
||||||
|
### Admin user
|
||||||
|
To set a User admin we need the following SQL query on the gradido_login database:
|
||||||
|
```
|
||||||
|
INSERT INTO user_roles (id, user_id, role_id) VALUES (NULL, '1', '1');
|
||||||
|
```
|
||||||
|
user_id has to be found in users
|
||||||
|
Now when we login in on https://$community_domain/account/ we can create coins but we will be restricted cause we can't sign the creations.
|
||||||
|
|
||||||
|
### Signation account
|
||||||
|
At first we need to enable the server user account creation with uncommenting line: 19 in
|
||||||
|
community_server/src/Controller/ServerUsersController.php
|
||||||
|
```php
|
||||||
|
$this->Auth->allow(['add', 'edit']);
|
||||||
|
```
|
||||||
|
This enable us to use this action without being logged in.
|
||||||
|
To add a signation account we need to go on the following url: http://$community_domain/server-users/add
|
||||||
|
|
||||||
|
### Coin creation process
|
||||||
|
The coin creation for work is done in the following url: http://$community_domain/transaction-creations/create-multi
|
||||||
|
Where we can create coins for a number of as many users as we want excepted for our self.
|
||||||
|
Furthermore we must sign the transactions we created. Normally after clicking on the left button (Transaktion abschließen) we should be automatically forwarded to http://$community_domain/account/checkTransactions where we can do this.
|
||||||
|
If not this page can also be reached by clicking on the shield-icon with the hook in it on the Dashboard, which is only shown if at least one transaction is waiting for signing.
|
||||||
|
|
||||||
|
For debug purposes you can check the `pending_tasks` table, which is used to store the transactions which are not signed yet or had errors.
|
||||||
BIN
docu/graphics/gradido_admin.png
Normal file
BIN
docu/graphics/gradido_admin.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 104 KiB |
BIN
docu/graphics/userdetails.png
Normal file
BIN
docu/graphics/userdetails.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 80 KiB |
BIN
docu/presentation/adminarea-old-new.pdf
Normal file
BIN
docu/presentation/adminarea-old-new.pdf
Normal file
Binary file not shown.
@ -9,8 +9,9 @@ module.exports = {
|
|||||||
],
|
],
|
||||||
// coverageReporters: ['lcov', 'text'],
|
// coverageReporters: ['lcov', 'text'],
|
||||||
moduleNameMapper: {
|
moduleNameMapper: {
|
||||||
'^@/(.*)$': '<rootDir>/src/$1',
|
|
||||||
'\\.(css|less)$': 'identity-obj-proxy',
|
'\\.(css|less)$': 'identity-obj-proxy',
|
||||||
|
'\\.(scss)$': '<rootDir>/src/assets/mocks/styleMock.js',
|
||||||
|
'^@/(.*)$': '<rootDir>/src/$1',
|
||||||
},
|
},
|
||||||
transform: {
|
transform: {
|
||||||
'^.+\\.vue$': 'vue-jest',
|
'^.+\\.vue$': 'vue-jest',
|
||||||
|
|||||||
1
frontend/src/assets/mocks/styleMock.js
Normal file
1
frontend/src/assets/mocks/styleMock.js
Normal file
@ -0,0 +1 @@
|
|||||||
|
module.exports = {}
|
||||||
@ -75,9 +75,7 @@ export const sendResetPasswordEmail = gql`
|
|||||||
|
|
||||||
export const checkUsername = gql`
|
export const checkUsername = gql`
|
||||||
query($username: String!) {
|
query($username: String!) {
|
||||||
checkUsername(username: $username) {
|
checkUsername(username: $username)
|
||||||
state
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
`
|
`
|
||||||
|
|
||||||
|
|||||||
@ -96,7 +96,7 @@
|
|||||||
"conversion-gdt-euro": "Umrechnung Euro / Gradido Transform (GDT)",
|
"conversion-gdt-euro": "Umrechnung Euro / Gradido Transform (GDT)",
|
||||||
"credit": "Gutschrift",
|
"credit": "Gutschrift",
|
||||||
"factor": "Faktor",
|
"factor": "Faktor",
|
||||||
"formula": "Berechungsformel",
|
"formula": "Berechnungsformel",
|
||||||
"funding": "Zu den Förderbeiträgen",
|
"funding": "Zu den Förderbeiträgen",
|
||||||
"gdt-received": "Gradido Transform (GDT) erhalten",
|
"gdt-received": "Gradido Transform (GDT) erhalten",
|
||||||
"no-transactions": "Du hast noch keine Gradido Transform (GDT).",
|
"no-transactions": "Du hast noch keine Gradido Transform (GDT).",
|
||||||
|
|||||||
25
frontend/src/plugins/dashboard-plugin.test.js
Normal file
25
frontend/src/plugins/dashboard-plugin.test.js
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import dashboardPlugin from './dashboard-plugin.js'
|
||||||
|
import Vue from 'vue'
|
||||||
|
|
||||||
|
import GlobalComponents from './globalComponents'
|
||||||
|
import GlobalDirectives from './globalDirectives'
|
||||||
|
|
||||||
|
jest.mock('./globalComponents')
|
||||||
|
jest.mock('./globalDirectives')
|
||||||
|
|
||||||
|
jest.mock('vue')
|
||||||
|
|
||||||
|
const vueUseMock = jest.fn()
|
||||||
|
Vue.use = vueUseMock
|
||||||
|
|
||||||
|
describe('dashboard plugin', () => {
|
||||||
|
dashboardPlugin.install(Vue)
|
||||||
|
|
||||||
|
it('installs the global components', () => {
|
||||||
|
expect(vueUseMock).toBeCalledWith(GlobalComponents)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('installs the global directives', () => {
|
||||||
|
expect(vueUseMock).toBeCalledWith(GlobalDirectives)
|
||||||
|
})
|
||||||
|
})
|
||||||
@ -59,7 +59,7 @@ export const loadAllRules = (i18nCallback) => {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
.then((result) => {
|
.then((result) => {
|
||||||
return result.data.checkUsername.state === 'success'
|
return result.data.checkUsername
|
||||||
})
|
})
|
||||||
.catch(() => {
|
.catch(() => {
|
||||||
return false
|
return false
|
||||||
|
|||||||
@ -5,10 +5,10 @@
|
|||||||
<div class="header-body text-center mb-7">
|
<div class="header-body text-center mb-7">
|
||||||
<b-row class="justify-content-center">
|
<b-row class="justify-content-center">
|
||||||
<b-col xl="5" lg="6" md="8" class="px-2">
|
<b-col xl="5" lg="6" md="8" class="px-2">
|
||||||
<h1>{{ $t('checkEmail.title') }}</h1>
|
<h1>{{ $t('site.checkEmail.title') }}</h1>
|
||||||
<div class="pb-4" v-if="!pending">
|
<div class="pb-4" v-if="!pending">
|
||||||
<span v-if="!authenticated">
|
<span v-if="!authenticated">
|
||||||
{{ $t('checkEmail.errorText') }}
|
{{ $t('site.checkEmail.errorText') }}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</b-col>
|
</b-col>
|
||||||
|
|||||||
@ -207,12 +207,6 @@ export default {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
.then(() => {
|
.then(() => {
|
||||||
this.form.email = ''
|
|
||||||
this.form.firstname = ''
|
|
||||||
this.form.lastname = ''
|
|
||||||
this.form.password.password = ''
|
|
||||||
this.form.password.passwordRepeat = ''
|
|
||||||
this.language = ''
|
|
||||||
this.$router.push('/thx/register')
|
this.$router.push('/thx/register')
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
@ -228,7 +222,6 @@ export default {
|
|||||||
this.form.lastname = ''
|
this.form.lastname = ''
|
||||||
this.form.password.password = ''
|
this.form.password.password = ''
|
||||||
this.form.password.passwordRepeat = ''
|
this.form.password.passwordRepeat = ''
|
||||||
this.language = ''
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
computed: {
|
computed: {
|
||||||
|
|||||||
1
login_server/.gitignore
vendored
1
login_server/.gitignore
vendored
@ -5,6 +5,5 @@ src/cpsp/*.h
|
|||||||
src/cpsp/*.cpp
|
src/cpsp/*.cpp
|
||||||
src/cpp/proto/
|
src/cpp/proto/
|
||||||
build*/
|
build*/
|
||||||
/skeema/gradido_login/insert/crypto_key.sql
|
|
||||||
|
|
||||||
src/LOCALE/messages.pot
|
src/LOCALE/messages.pot
|
||||||
|
|||||||
@ -56,8 +56,7 @@ To update messages.pot run
|
|||||||
This will be also called by ./scripts/build_debug.sh
|
This will be also called by ./scripts/build_debug.sh
|
||||||
|
|
||||||
## database
|
## database
|
||||||
Login-Server needs a db to run, it is tested with mariadb
|
Login-Server needs a db to run, it is tested with mariadb.
|
||||||
table definitions are found in folder ./skeema/gradido_login
|
|
||||||
Currently at least one group must be present in table groups.
|
Currently at least one group must be present in table groups.
|
||||||
For example:
|
For example:
|
||||||
```sql
|
```sql
|
||||||
|
|||||||
@ -1,9 +0,0 @@
|
|||||||
CREATE TABLE `app_access_tokens` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int NOT NULL,
|
|
||||||
`access_code` bigint unsigned NOT NULL,
|
|
||||||
`created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
`updated` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
PRIMARY KEY (`id`),
|
|
||||||
UNIQUE KEY `access_code` (`access_code`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
CREATE TABLE `elopage_buys` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`elopage_user_id` int DEFAULT NULL,
|
|
||||||
`affiliate_program_id` int NOT NULL,
|
|
||||||
`publisher_id` int NOT NULL,
|
|
||||||
`order_id` int NOT NULL,
|
|
||||||
`product_id` int NOT NULL,
|
|
||||||
`product_price` int NOT NULL,
|
|
||||||
`payer_email` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
|
|
||||||
`publisher_email` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
|
|
||||||
`payed` tinyint NOT NULL,
|
|
||||||
`success_date` datetime NOT NULL,
|
|
||||||
`event` varchar(255) NOT NULL,
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
CREATE TABLE `email_opt_in` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int NOT NULL,
|
|
||||||
`verification_code` bigint unsigned NOT NULL,
|
|
||||||
`email_opt_in_type_id` int NOT NULL,
|
|
||||||
`created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
`resend_count` int DEFAULT '0',
|
|
||||||
`updated` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
PRIMARY KEY (`id`),
|
|
||||||
UNIQUE KEY `verification_code` (`verification_code`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
CREATE TABLE `email_opt_in_types` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`name` varchar(255) NOT NULL,
|
|
||||||
`description` varchar(255) NOT NULL,
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
CREATE TABLE `groups` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`alias` varchar(190) NOT NULL,
|
|
||||||
`name` varchar(255) NOT NULL,
|
|
||||||
`url` varchar(255) NOT NULL,
|
|
||||||
`host` varchar(255) DEFAULT "/",
|
|
||||||
`home` varchar(255) DEFAULT "/",
|
|
||||||
`description` text,
|
|
||||||
PRIMARY KEY (`id`),
|
|
||||||
UNIQUE KEY `alias` (`alias`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
CREATE TABLE `pending_tasks` (
|
|
||||||
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int UNSIGNED DEFAULT 0,
|
|
||||||
`request` varbinary(2048) NOT NULL,
|
|
||||||
`created` datetime NOT NULL,
|
|
||||||
`finished` datetime DEFAULT '2000-01-01 000000',
|
|
||||||
`result_json` text DEFAULT NULL,
|
|
||||||
`param_json` text DEFAULT NULL,
|
|
||||||
`task_type_id` int UNSIGNED NOT NULL,
|
|
||||||
`child_pending_task_id` int UNSIGNED DEFAULT 0,
|
|
||||||
`parent_pending_task_id` int UNSIGNED DEFAULT 0,
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE = InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
CREATE TABLE `roles` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`name` varchar(255) NOT NULL,
|
|
||||||
`description` varchar(255) NOT NULL,
|
|
||||||
`flags` bigint NOT NULL DEFAULT '0',
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
CREATE TABLE `user_backups` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int NOT NULL,
|
|
||||||
`passphrase` text NOT NULL,
|
|
||||||
`mnemonic_type` int DEFAULT '-1',
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
CREATE TABLE `user_roles` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`user_id` int NOT NULL,
|
|
||||||
`role_id` int NOT NULL,
|
|
||||||
PRIMARY KEY (`id`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
CREATE TABLE `users` (
|
|
||||||
`id` int unsigned NOT NULL AUTO_INCREMENT,
|
|
||||||
`email` varchar(191) NOT NULL,
|
|
||||||
`first_name` varchar(150) NOT NULL,
|
|
||||||
`last_name` varchar(255) DEFAULT '',
|
|
||||||
`username` varchar(255) DEFAULT '',
|
|
||||||
`description` text DEFAULT '',
|
|
||||||
`password` bigint unsigned DEFAULT '0',
|
|
||||||
`pubkey` binary(32) DEFAULT NULL,
|
|
||||||
`privkey` binary(80) DEFAULT NULL,
|
|
||||||
`email_hash` binary(32) DEFAULT NULL,
|
|
||||||
`created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
`email_checked` tinyint NOT NULL DEFAULT '0',
|
|
||||||
`passphrase_shown` tinyint NOT NULL DEFAULT '0',
|
|
||||||
`language` varchar(4) NOT NULL DEFAULT 'de',
|
|
||||||
`disabled` tinyint DEFAULT '0',
|
|
||||||
`group_id` int unsigned DEFAULT 0,
|
|
||||||
`publisher_id` int DEFAULT 0,
|
|
||||||
PRIMARY KEY (`id`),
|
|
||||||
UNIQUE KEY `email` (`email`)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
@ -58,7 +58,7 @@ bool EmailManager::init(const Poco::Util::LayeredConfiguration& cfg)
|
|||||||
void EmailManager::addEmail(model::Email* email) {
|
void EmailManager::addEmail(model::Email* email) {
|
||||||
if (mDisableEmail) {
|
if (mDisableEmail) {
|
||||||
std::string dateTimeString = Poco::DateTimeFormatter::format(Poco::DateTime(), "%d.%m.%y %H:%M:%S");
|
std::string dateTimeString = Poco::DateTimeFormatter::format(Poco::DateTime(), "%d.%m.%y %H:%M:%S");
|
||||||
std::string log_message = dateTimeString + " Email should be sended to: ";
|
std::string log_message = dateTimeString + " Email should have been sent to: ";
|
||||||
auto email_user = email->getUser();
|
auto email_user = email->getUser();
|
||||||
Poco::AutoPtr<model::table::User> email_model;
|
Poco::AutoPtr<model::table::User> email_model;
|
||||||
if (email_user) {
|
if (email_user) {
|
||||||
|
|||||||
@ -148,7 +148,7 @@ Session* SessionManager::getNewSession(int* handle)
|
|||||||
mWorkingMutex.tryLock(500);
|
mWorkingMutex.tryLock(500);
|
||||||
}
|
}
|
||||||
catch (Poco::TimeoutException &ex) {
|
catch (Poco::TimeoutException &ex) {
|
||||||
printf("[%s] exception timout mutex: %s\n", functionName, ex.displayText().data());
|
printf("[%s] exception timeout mutex: %s\n", functionName, ex.displayText().data());
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
//mWorkingMutex.lock();
|
//mWorkingMutex.lock();
|
||||||
|
|||||||
@ -69,7 +69,7 @@ namespace controller {
|
|||||||
|
|
||||||
using namespace Poco::Data::Keywords;
|
using namespace Poco::Data::Keywords;
|
||||||
Poco::Data::Statement select(session);
|
Poco::Data::Statement select(session);
|
||||||
select << "SELECT id, first_name, last_name, email, username, description, pubkey, created, email_checked, disabled, group_id FROM " << db->getTableName();
|
select << "SELECT id, first_name, last_name, email, username, description, pubkey, created, email_checked, disabled, group_id, publisher_id FROM " << db->getTableName();
|
||||||
select << " where email_checked = 0 ";
|
select << " where email_checked = 0 ";
|
||||||
select, into(resultFromDB);
|
select, into(resultFromDB);
|
||||||
if (searchString != "") {
|
if (searchString != "") {
|
||||||
@ -439,6 +439,7 @@ namespace controller {
|
|||||||
return 0;
|
return 0;
|
||||||
auto cm = ConnectionManager::getInstance();
|
auto cm = ConnectionManager::getInstance();
|
||||||
auto em = ErrorManager::getInstance();
|
auto em = ErrorManager::getInstance();
|
||||||
|
auto db = new model::table::User();
|
||||||
static const char* function_name = "User::checkIfVerificationEmailsShouldBeResend";
|
static const char* function_name = "User::checkIfVerificationEmailsShouldBeResend";
|
||||||
|
|
||||||
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
||||||
@ -446,8 +447,9 @@ namespace controller {
|
|||||||
std::vector<Poco::Tuple<int,Poco::DateTime>> results;
|
std::vector<Poco::Tuple<int,Poco::DateTime>> results;
|
||||||
int email_checked = 0;
|
int email_checked = 0;
|
||||||
int resend_count = 1;
|
int resend_count = 1;
|
||||||
select << "select u.id, v.created from users as u "
|
std::string table_name_email_opt_in = "login_email_opt_in";
|
||||||
<< "LEFT JOIN email_opt_in as v ON(u.id = v.user_id) "
|
select << "select u.id, v.created from " << db->getTableName() << " as u "
|
||||||
|
<< "LEFT JOIN " << table_name_email_opt_in << " as v ON(u.id = v.user_id) "
|
||||||
<< "where u.email_checked = ? "
|
<< "where u.email_checked = ? "
|
||||||
<< "AND v.resend_count <= ? "
|
<< "AND v.resend_count <= ? "
|
||||||
<< "ORDER BY u.id, v.created " ,
|
<< "ORDER BY u.id, v.created " ,
|
||||||
@ -519,14 +521,15 @@ namespace controller {
|
|||||||
{
|
{
|
||||||
auto cm = ConnectionManager::getInstance();
|
auto cm = ConnectionManager::getInstance();
|
||||||
auto em = ErrorManager::getInstance();
|
auto em = ErrorManager::getInstance();
|
||||||
|
auto db = new model::table::User();
|
||||||
static const char* function_name = "User::addMissingEmailHashes";
|
static const char* function_name = "User::addMissingEmailHashes";
|
||||||
|
|
||||||
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
||||||
Poco::Data::Statement select(session);
|
Poco::Data::Statement select(session);
|
||||||
std::vector<Poco::Tuple<int, std::string>> results;
|
std::vector<Poco::Tuple<int, std::string>> results;
|
||||||
|
|
||||||
select << "select id, email from users "
|
select << "select id, email from " << db->getTableName()
|
||||||
<< "where email_hash IS NULL "
|
<< " where email_hash IS NULL "
|
||||||
, Poco::Data::Keywords::into(results)
|
, Poco::Data::Keywords::into(results)
|
||||||
;
|
;
|
||||||
int result_count = 0;
|
int result_count = 0;
|
||||||
@ -556,7 +559,7 @@ namespace controller {
|
|||||||
// update db
|
// update db
|
||||||
// reuse connection, I hope it's working
|
// reuse connection, I hope it's working
|
||||||
Poco::Data::Statement update(session);
|
Poco::Data::Statement update(session);
|
||||||
update << "UPDATE users set email_hash = ? where id = ?"
|
update << "UPDATE " << db->getTableName() << " set email_hash = ? where id = ?"
|
||||||
, Poco::Data::Keywords::use(updates);
|
, Poco::Data::Keywords::use(updates);
|
||||||
int updated_count = 0;
|
int updated_count = 0;
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -19,7 +19,7 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "app_access_tokens"; }
|
const char* getTableName() const { return "login_app_access_tokens"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline Poco::UInt64 getCode() const { return mAccessCode; }
|
inline Poco::UInt64 getCode() const { return mAccessCode; }
|
||||||
|
|||||||
@ -34,7 +34,7 @@ namespace model {
|
|||||||
ElopageBuy();
|
ElopageBuy();
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "elopage_buys"; }
|
const char* getTableName() const { return "login_elopage_buys"; }
|
||||||
|
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
|
|||||||
@ -29,7 +29,7 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "email_opt_in"; }
|
const char* getTableName() const { return "login_email_opt_in"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline Poco::UInt64 getCode() const { return mEmailVerificationCode; }
|
inline Poco::UInt64 getCode() const { return mEmailVerificationCode; }
|
||||||
|
|||||||
@ -17,7 +17,7 @@ namespace model {
|
|||||||
Group(GroupTuple userTuple);
|
Group(GroupTuple userTuple);
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "groups"; }
|
const char* getTableName() const { return "login_groups"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline const std::string& getAlias() const { return mAlias; }
|
inline const std::string& getAlias() const { return mAlias; }
|
||||||
|
|||||||
@ -30,7 +30,7 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "pending_tasks"; }
|
const char* getTableName() const { return "login_pending_tasks"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
//! \brief update table row with current request
|
//! \brief update table row with current request
|
||||||
|
|||||||
@ -1,60 +0,0 @@
|
|||||||
#ifndef GRADIDO_LOGIN_SERVER_MODEL_TABLE_ROLES_INCLUDE
|
|
||||||
#define GRADIDO_LOGIN_SERVER_MODEL_TABLE_ROLES_INCLUDE
|
|
||||||
|
|
||||||
#include "ModelBase.h"
|
|
||||||
#include "Poco/Types.h"
|
|
||||||
#include "Poco/Tuple.h"
|
|
||||||
|
|
||||||
namespace model {
|
|
||||||
namespace table {
|
|
||||||
|
|
||||||
enum RoleType {
|
|
||||||
ROLE_ADMIN = 1
|
|
||||||
};
|
|
||||||
|
|
||||||
class Roles : public ModelBase
|
|
||||||
{
|
|
||||||
|
|
||||||
};
|
|
||||||
/*
|
|
||||||
typedef Poco::Tuple<int, int, Poco::UInt64, int> EmailOptInTuple;
|
|
||||||
|
|
||||||
class EmailOptIn : public ModelBase
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
EmailOptIn(const Poco::UInt64& code, int user_id, EmailOptInType type);
|
|
||||||
EmailOptIn(const Poco::UInt64& code, EmailOptInType type);
|
|
||||||
EmailOptIn(const EmailOptInTuple& tuple);
|
|
||||||
EmailOptIn();
|
|
||||||
~EmailOptIn();
|
|
||||||
|
|
||||||
// generic db operations
|
|
||||||
const char* getTableName() { return "email_opt_in"; }
|
|
||||||
std::string toString();
|
|
||||||
|
|
||||||
inline Poco::UInt64 getCode() const { return mEmailVerificationCode; }
|
|
||||||
inline int getUserId() const { return mUserId; }
|
|
||||||
inline EmailOptInType getType() const { return static_cast<EmailOptInType>(mType); }
|
|
||||||
inline void setCode(Poco::UInt64 code) { mEmailVerificationCode = code; }
|
|
||||||
inline void setUserId(int user_Id) { mUserId = user_Id; }
|
|
||||||
|
|
||||||
static const char* typeToString(EmailOptInType type);
|
|
||||||
protected:
|
|
||||||
Poco::Data::Statement _loadFromDB(Poco::Data::Session session, const std::string& fieldName);
|
|
||||||
Poco::Data::Statement _loadIdFromDB(Poco::Data::Session session);
|
|
||||||
Poco::Data::Statement _loadMultipleFromDB(Poco::Data::Session session, const std::string& fieldName);
|
|
||||||
Poco::Data::Statement _loadFromDB(Poco::Data::Session session, const std::vector<std::string>& fieldNames, MysqlConditionType conditionType = MYSQL_CONDITION_AND);
|
|
||||||
Poco::Data::Statement _insertIntoDB(Poco::Data::Session session);
|
|
||||||
|
|
||||||
int mUserId;
|
|
||||||
// data type must be a multiple of 4
|
|
||||||
Poco::UInt64 mEmailVerificationCode;
|
|
||||||
int mType;
|
|
||||||
|
|
||||||
};
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#endif //GRADIDO_LOGIN_SERVER_MODEL_TABLE_ROLES_INCLUDE
|
|
||||||
@ -83,11 +83,11 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
if (mPasswordHashed) {
|
if (mPasswordHashed) {
|
||||||
insert << "INSERT INTO users (email, first_name, last_name, username, description, password, email_hash, language, group_id, publisher_id) VALUES(?,?,?,?,?,?,?,?,?,?);",
|
insert << "INSERT INTO " << getTableName() << " (email, first_name, last_name, username, description, password, email_hash, language, group_id, publisher_id) VALUES(?,?,?,?,?,?,?,?,?,?);",
|
||||||
use(mEmail), use(mFirstName), use(mLastName), use(mUsername), use(mDescription), bind(mPasswordHashed), use(mEmailHash), use(mLanguageKey), use(mGroupId), use(mPublisherId);
|
use(mEmail), use(mFirstName), use(mLastName), use(mUsername), use(mDescription), bind(mPasswordHashed), use(mEmailHash), use(mLanguageKey), use(mGroupId), use(mPublisherId);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
insert << "INSERT INTO users (email, first_name, last_name, username, description, email_hash, language, group_id, publisher_id) VALUES(?,?,?,?,?,?,?,?,?);",
|
insert << "INSERT INTO " << getTableName() << " (email, first_name, last_name, username, description, email_hash, language, group_id, publisher_id) VALUES(?,?,?,?,?,?,?,?,?);",
|
||||||
use(mEmail), use(mFirstName), use(mLastName), use(mUsername), use(mDescription), use(mEmailHash), use(mLanguageKey), use(mGroupId), use(mPublisherId);
|
use(mEmail), use(mFirstName), use(mLastName), use(mUsername), use(mDescription), use(mEmailHash), use(mLanguageKey), use(mGroupId), use(mPublisherId);
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -103,10 +103,13 @@ namespace model {
|
|||||||
}
|
}
|
||||||
Poco::Data::Statement select(session);
|
Poco::Data::Statement select(session);
|
||||||
|
|
||||||
select << "SELECT " << getTableName() << ".id, email, first_name, last_name, username, description, password, pubkey, privkey, email_hash, created, email_checked, language, disabled, group_id, publisher_id, user_roles.role_id "
|
std::string table_name_user_roles = "login_user_roles";
|
||||||
|
|
||||||
|
select << "SELECT " << getTableName() << ".id, email, first_name, last_name, username, description, password, pubkey, privkey, email_hash, created, email_checked, language, disabled, group_id, publisher_id, " << table_name_user_roles << ".role_id "
|
||||||
<< " FROM " << getTableName()
|
<< " FROM " << getTableName()
|
||||||
<< " LEFT JOIN user_roles ON " << getTableName() << ".id = user_roles.user_id "
|
<< " LEFT JOIN " << table_name_user_roles
|
||||||
<< " WHERE " << _fieldName << " = ?" ,
|
<< " ON " << getTableName() << ".id = " << table_name_user_roles << ".user_id "
|
||||||
|
<< " WHERE " << _fieldName << " = ?; " ,
|
||||||
into(mID), into(mEmail), into(mFirstName), into(mLastName), into(mUsername), into(mDescription), into(mPasswordHashed),
|
into(mID), into(mEmail), into(mFirstName), into(mLastName), into(mUsername), into(mDescription), into(mPasswordHashed),
|
||||||
into(mPublicKey), into(mPrivateKey), into(mEmailHash), into(mCreated), into(mEmailChecked),
|
into(mPublicKey), into(mPrivateKey), into(mEmailHash), into(mCreated), into(mEmailChecked),
|
||||||
into(mLanguageKey), into(mDisabled), into(mGroupId), into(mPublisherId), into(mRole);
|
into(mLanguageKey), into(mDisabled), into(mGroupId), into(mPublisherId), into(mRole);
|
||||||
@ -194,7 +197,7 @@ namespace model {
|
|||||||
|
|
||||||
Poco::Data::Statement update(session);
|
Poco::Data::Statement update(session);
|
||||||
|
|
||||||
update << "UPDATE users SET password = ?, privkey = ? where id = ?;",
|
update << "UPDATE " << getTableName() << " SET password = ?, privkey = ? where id = ?;",
|
||||||
bind(mPasswordHashed), use(mPrivateKey), use(mID);
|
bind(mPasswordHashed), use(mPrivateKey), use(mID);
|
||||||
|
|
||||||
|
|
||||||
@ -221,7 +224,7 @@ namespace model {
|
|||||||
|
|
||||||
Poco::Data::Statement update(session);
|
Poco::Data::Statement update(session);
|
||||||
|
|
||||||
update << "UPDATE users SET pubkey = ?, privkey = ? where id = ?;",
|
update << "UPDATE " << getTableName() << " SET pubkey = ?, privkey = ? where id = ?;",
|
||||||
use(mPublicKey), use(mPrivateKey), use(mID);
|
use(mPublicKey), use(mPrivateKey), use(mID);
|
||||||
|
|
||||||
|
|
||||||
@ -246,7 +249,7 @@ namespace model {
|
|||||||
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
auto session = cm->getConnection(CONNECTION_MYSQL_LOGIN_SERVER);
|
||||||
|
|
||||||
Poco::Data::Statement update(session);
|
Poco::Data::Statement update(session);
|
||||||
update << "UPDATE users SET first_name = ?, last_name = ?, username = ?, description = ?, disabled = ?, language = ?, publisher_id = ? where id = ?;",
|
update << "UPDATE " << getTableName() << " SET first_name = ?, last_name = ?, username = ?, description = ?, disabled = ?, language = ?, publisher_id = ? where id = ?;",
|
||||||
use(mFirstName), use(mLastName), use(mUsername), use(mDescription), use(mDisabled), use(mLanguageKey), use(mPublisherId), use(mID);
|
use(mFirstName), use(mLastName), use(mUsername), use(mDescription), use(mDisabled), use(mLanguageKey), use(mPublisherId), use(mID);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -54,7 +54,7 @@ namespace model {
|
|||||||
|
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "users"; }
|
const char* getTableName() const { return "login_users"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
std::string toHTMLString();
|
std::string toHTMLString();
|
||||||
|
|
||||||
|
|||||||
@ -17,7 +17,7 @@ namespace model {
|
|||||||
UserBackup();
|
UserBackup();
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "user_backups"; }
|
const char* getTableName() const { return "login_user_backups"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline int getUserId() const { return mUserId; }
|
inline int getUserId() const { return mUserId; }
|
||||||
|
|||||||
@ -4,7 +4,6 @@
|
|||||||
#include "ModelBase.h"
|
#include "ModelBase.h"
|
||||||
#include "Poco/Types.h"
|
#include "Poco/Types.h"
|
||||||
#include "Poco/Tuple.h"
|
#include "Poco/Tuple.h"
|
||||||
//#include "Roles.h"
|
|
||||||
|
|
||||||
namespace model {
|
namespace model {
|
||||||
namespace table {
|
namespace table {
|
||||||
@ -25,7 +24,7 @@ namespace model {
|
|||||||
UserRole();
|
UserRole();
|
||||||
|
|
||||||
// generic db operations
|
// generic db operations
|
||||||
const char* getTableName() const { return "user_roles"; }
|
const char* getTableName() const { return "login_user_roles"; }
|
||||||
std::string toString();
|
std::string toString();
|
||||||
|
|
||||||
inline int getUserId() const { return mUserId; }
|
inline int getUserId() const { return mUserId; }
|
||||||
|
|||||||
@ -176,9 +176,9 @@ int load(int argc, char* argv[]) {
|
|||||||
|
|
||||||
// clean up and fill db
|
// clean up and fill db
|
||||||
std::string tables[] = {
|
std::string tables[] = {
|
||||||
"groups",
|
"login_groups",
|
||||||
"users",
|
"login_users",
|
||||||
"user_roles"
|
"login_user_roles"
|
||||||
};
|
};
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
if (runMysql("TRUNCATE " + tables[i])) {
|
if (runMysql("TRUNCATE " + tables[i])) {
|
||||||
@ -191,7 +191,7 @@ int load(int argc, char* argv[]) {
|
|||||||
|
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
// password = TestP4ssword&H
|
// password = TestP4ssword&H
|
||||||
ss << "INSERT INTO `users` (`id`, `email`, `first_name`, `last_name`, `username`, `password`, `pubkey`, `privkey`, `created`, `email_checked`, `passphrase_shown`, `language`, `disabled`, `group_id`) VALUES "
|
ss << "INSERT INTO `login_users` (`id`, `email`, `first_name`, `last_name`, `username`, `password`, `pubkey`, `privkey`, `created`, `email_checked`, `passphrase_shown`, `language`, `disabled`, `group_id`) VALUES "
|
||||||
<< "(1, 'd_schultz32@gmx.de', 'DDD', 'Schultz', 'Diddel', 18242007140018938940, 0x69f2fefd6fa6947a370b9f8d3147f6617cf67416517ce25cb2d63901c666933c, 0x567f3e623a1899d1f8d69190c5799433c134ce0137c0c38cc0347874586d6234a19f2a0b484e6cc1863502e580ae6c17db1131f29a35eba45a46be29c7ee592940a3bd3ad519075fdeed6e368f0eb818, '2020-02-20 16:05:44', 1, 0, 'de', 0, 1), ";
|
<< "(1, 'd_schultz32@gmx.de', 'DDD', 'Schultz', 'Diddel', 18242007140018938940, 0x69f2fefd6fa6947a370b9f8d3147f6617cf67416517ce25cb2d63901c666933c, 0x567f3e623a1899d1f8d69190c5799433c134ce0137c0c38cc0347874586d6234a19f2a0b484e6cc1863502e580ae6c17db1131f29a35eba45a46be29c7ee592940a3bd3ad519075fdeed6e368f0eb818, '2020-02-20 16:05:44', 1, 0, 'de', 0, 1), ";
|
||||||
|
|
||||||
// if this isn't the same, some tests will fail, so we update the test data here.
|
// if this isn't the same, some tests will fail, so we update the test data here.
|
||||||
@ -211,7 +211,7 @@ int load(int argc, char* argv[]) {
|
|||||||
}
|
}
|
||||||
ss.str(std::string());
|
ss.str(std::string());
|
||||||
|
|
||||||
ss << "INSERT INTO `user_roles` (`id`, `user_id`, `role_id`) VALUES"
|
ss << "INSERT INTO `login_user_roles` (`id`, `user_id`, `role_id`) VALUES"
|
||||||
<< "(1, 3, 1);";
|
<< "(1, 3, 1);";
|
||||||
|
|
||||||
if (runMysql(ss.str())) {
|
if (runMysql(ss.str())) {
|
||||||
@ -219,7 +219,7 @@ int load(int argc, char* argv[]) {
|
|||||||
}
|
}
|
||||||
ss.str(std::string());
|
ss.str(std::string());
|
||||||
|
|
||||||
ss << "INSERT INTO `groups` (`id`, `alias`, `name`, `url`, `description`) VALUES"
|
ss << "INSERT INTO `login_groups` (`id`, `alias`, `name`, `url`, `description`) VALUES"
|
||||||
<< "(1, 'gdd1', 'Gradido1', 'gdd1.gradido.com', 'Der erste offizielle Gradido Server (zum Testen)'), "
|
<< "(1, 'gdd1', 'Gradido1', 'gdd1.gradido.com', 'Der erste offizielle Gradido Server (zum Testen)'), "
|
||||||
<< "(2, 'gdd_test', 'Gradido Test', 'gdd1.gradido.com', 'Testgroup (zum Testen)'); ";
|
<< "(2, 'gdd_test', 'Gradido Test', 'gdd1.gradido.com', 'Testgroup (zum Testen)'); ";
|
||||||
if (runMysql(ss.str())) {
|
if (runMysql(ss.str())) {
|
||||||
|
|||||||
@ -74,7 +74,8 @@ enum PageState {
|
|||||||
{
|
{
|
||||||
//mSession->finalizeTransaction(false, true);
|
//mSession->finalizeTransaction(false, true);
|
||||||
//
|
//
|
||||||
if(!transaction.isNull() && transaction->getModel()->getUserId() == user_model->getID())
|
if(!transaction.isNull() &&
|
||||||
|
(transaction_body->isCreation() || transaction->getModel()->getUserId() == user_model->getID()))
|
||||||
{
|
{
|
||||||
if(pt->removeTask(transaction)) {
|
if(pt->removeTask(transaction)) {
|
||||||
transaction->deleteFromDB();
|
transaction->deleteFromDB();
|
||||||
@ -150,7 +151,7 @@ enum PageState {
|
|||||||
transaction_body = transaction->getTransactionBody();
|
transaction_body = transaction->getTransactionBody();
|
||||||
// user can only delete there own transactions
|
// user can only delete there own transactions
|
||||||
// TODO: Auto timeout for community transactions
|
// TODO: Auto timeout for community transactions
|
||||||
if(transaction->getModel()->getUserId() == user_model->getID()) {
|
if(transaction_body->isCreation() || transaction->getModel()->getUserId() == user_model->getID()) {
|
||||||
transaction_removeable = true;
|
transaction_removeable = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -338,20 +339,19 @@ enum PageState {
|
|||||||
<%= gettext("Transaktion unterzeichnen") %>
|
<%= gettext("Transaktion unterzeichnen") %>
|
||||||
</button>
|
</button>
|
||||||
<% } %>
|
<% } %>
|
||||||
|
<button type="submit" class="form-button button-cancel" name="skip" value="skip">
|
||||||
|
<i class="material-icons-outlined">debug-step-over</i>
|
||||||
|
<%= gettext("Transaktion überspringen") %>
|
||||||
|
</button>
|
||||||
<% if(transaction_removeable) { %>
|
<% if(transaction_removeable) { %>
|
||||||
<button type="submit" class="form-button button-cancel" name="abort" value="abort">
|
<button type="submit" class="form-button button-cancel" name="abort" value="abort">
|
||||||
<i class="material-icons-outlined">delete</i>
|
<i class="material-icons-outlined">delete</i>
|
||||||
<%= gettext("Transaktion verwerfen") %>
|
<%= gettext("Transaktion verwerfen") %>
|
||||||
</button>
|
</button>
|
||||||
<% } else { %>
|
|
||||||
<button type="submit" class="form-button button-cancel" name="skip" value="skip">
|
|
||||||
<i class="material-icons-outlined">debug-step-over</i>
|
|
||||||
<%= gettext("Transaktion überspringen") %>
|
|
||||||
</button>
|
|
||||||
<% } %>
|
<% } %>
|
||||||
</form>
|
</form>
|
||||||
<% } %>
|
<% } %>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<%@ include file="include/footer_chr.cpsp" %>
|
<%@ include file="include/footer_chr.cpsp" %>
|
||||||
|
|||||||
@ -1,5 +0,0 @@
|
|||||||
[production]
|
|
||||||
flavor=mariadb:10.5
|
|
||||||
host=127.0.0.1
|
|
||||||
port=3306
|
|
||||||
user=root
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
default-character-set=utf8mb4
|
|
||||||
default-collation=utf8mb4_unicode_ci
|
|
||||||
schema=gradido_login
|
|
||||||
@ -3,29 +3,7 @@
|
|||||||
#########################################################################################################
|
#########################################################################################################
|
||||||
FROM mariadb/server:10.5 as mariadb_server
|
FROM mariadb/server:10.5 as mariadb_server
|
||||||
|
|
||||||
ENV DOCKER_WORKDIR="/docker-entrypoint-initdb.d"
|
# ENV DOCKER_WORKDIR="/docker-entrypoint-initdb.d"
|
||||||
|
|
||||||
RUN mkdir -p ${DOCKER_WORKDIR}
|
# RUN mkdir -p ${DOCKER_WORKDIR}
|
||||||
WORKDIR ${DOCKER_WORKDIR}
|
# WORKDIR ${DOCKER_WORKDIR}
|
||||||
|
|
||||||
# create databases
|
|
||||||
COPY ./mariadb/setup_dbs.sql a1_setup_dbs.sql
|
|
||||||
# login server db
|
|
||||||
COPY ./login_server/skeema/ .
|
|
||||||
RUN cd ./gradido_login/ && for f in *.sql; do cp -- "$f" "../b1_$f"; sed -i '1i use gradido_login;' "../b1_$f"; done
|
|
||||||
COPY ./configs/login_server/setup_db_tables ./gradido_login/insert
|
|
||||||
RUN cd ./gradido_login/insert && for f in *.sql; do cp -- "$f" "../../c1_$f"; sed -i '1i use gradido_login;' "../../c1_$f"; done
|
|
||||||
|
|
||||||
#########################################################################################################
|
|
||||||
# mariadb server with test dbs
|
|
||||||
#########################################################################################################
|
|
||||||
FROM mariadb_server as mariadb_server_test
|
|
||||||
|
|
||||||
# create test databases
|
|
||||||
COPY ./mariadb/setup_test_dbs.sql a2_setup_dbs.sql
|
|
||||||
|
|
||||||
# login server test db
|
|
||||||
COPY ./login_server/skeema/ .
|
|
||||||
RUN cd ./gradido_login/ && for f in *.sql; do cp -- "$f" "../b2_$f"; sed -i '1i use gradido_login_test;' "../b2_$f"; done
|
|
||||||
COPY ./configs/login_server/setup_db_tables ./gradido_login/insert
|
|
||||||
RUN cd ./gradido_login/insert && for f in *.sql; do cp -- "$f" "../../c2_$f"; sed -i '1i use gradido_login_test;' "../../c2_$f"; done
|
|
||||||
|
|||||||
@ -1,7 +0,0 @@
|
|||||||
create database gradido_login
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database IF NOT EXISTS _skeema_tmp
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
FLUSH PRIVILEGES;
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
COLOR_GREEN="\033[0;32m"
|
|
||||||
COLOR_YELLOW="\e[33m"
|
|
||||||
COLOR_NONE="\033[0m"
|
|
||||||
|
|
||||||
LOGIN_DB_USER=gradido_login_live
|
|
||||||
LOGIN_DB_NAME=gradido_login_live
|
|
||||||
LOGIN_DB_PASSWD=$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c${1:-32};echo);
|
|
||||||
|
|
||||||
COMMUNITY_DB_USER=gradido_community_live
|
|
||||||
COMMUNITY_DB_NAME=gradido_community_live
|
|
||||||
COMMUNITY_DB_PASSWD=$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c${1:-32};echo);
|
|
||||||
|
|
||||||
# create table
|
|
||||||
mysql <<EOFMYSQL
|
|
||||||
create database $LOGIN_DB_NAME
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database $COMMUNITY_DB_NAME
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database IF NOT EXISTS _skeema_tmp
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
CREATE USER '$LOGIN_DB_USER'@'localhost' IDENTIFIED BY '$LOGIN_DB_PASSWD';
|
|
||||||
GRANT ALL PRIVILEGES ON $LOGIN_DB_NAME.* TO '$LOGIN_DB_USER'@'localhost';
|
|
||||||
GRANT ALL PRIVILEGES ON _skeema_tmp.* TO '$LOGIN_DB_USER'@'localhost';
|
|
||||||
|
|
||||||
CREATE USER '$COMMUNITY_DB_USER'@'localhost' IDENTIFIED BY '$COMMUNITY_DB_PASSWD';
|
|
||||||
GRANT ALL PRIVILEGES ON $COMMUNITY_DB_NAME.* TO '$COMMUNITY_DB_USER'@'localhost';
|
|
||||||
GRANT ALL PRIVILEGES ON _skeema_tmp.* TO '$COMMUNITY_DB_USER'@'localhost';
|
|
||||||
FLUSH PRIVILEGES;
|
|
||||||
EOFMYSQL
|
|
||||||
|
|
||||||
# populate db of login-server
|
|
||||||
cd ../login_server/skeema
|
|
||||||
sudo cat << EOF > .skeema
|
|
||||||
[production]
|
|
||||||
flavor=mariadb:10.3.25
|
|
||||||
host=127.0.0.1
|
|
||||||
port=3306
|
|
||||||
user=$LOGIN_DB_USER
|
|
||||||
EOF
|
|
||||||
cd gradido_login
|
|
||||||
sudo cat << EOF > .skeema
|
|
||||||
default-character-set=utf8mb4
|
|
||||||
default-collation=utf8mb4_unicode_ci
|
|
||||||
schema=$LOGIN_DB_NAME
|
|
||||||
EOF
|
|
||||||
|
|
||||||
source $HOME/.gvm/scripts/gvm
|
|
||||||
gvm use go1.14.4
|
|
||||||
skeema push -p$LOGIN_DB_PASSWD
|
|
||||||
|
|
||||||
echo -e "${COLOR_YELLOW}Login-Server db password: $LOGIN_DB_PASSWD${COLOR_NONE}"
|
|
||||||
@ -1,10 +0,0 @@
|
|||||||
create database gradido_login_test
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database gradido_community_test
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
create database IF NOT EXISTS _skeema_tmp
|
|
||||||
DEFAULT CHARACTER SET utf8mb4
|
|
||||||
DEFAULT COLLATE utf8mb4_unicode_ci;
|
|
||||||
FLUSH PRIVILEGES;
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
[production]
|
|
||||||
flavor=mariadb:10.5
|
|
||||||
host=mariadb
|
|
||||||
port=3306
|
|
||||||
user=root
|
|
||||||
@ -1,39 +0,0 @@
|
|||||||
#########################################################################################################
|
|
||||||
# Build skeema
|
|
||||||
#########################################################################################################
|
|
||||||
FROM golang:1.17.1 as skeema_build
|
|
||||||
RUN go get -d -v github.com/skeema/skeema
|
|
||||||
WORKDIR /go/src/github.com/skeema/skeema
|
|
||||||
RUN go install github.com/skeema/skeema@v1.5.3
|
|
||||||
|
|
||||||
#########################################################################################################
|
|
||||||
# Run skeema for dev (dynamic)
|
|
||||||
#########################################################################################################
|
|
||||||
FROM skeema_build as skeema_dev_run
|
|
||||||
|
|
||||||
ENV DOCKER_WORKDIR="/skeema"
|
|
||||||
|
|
||||||
RUN mkdir -p ${DOCKER_WORKDIR}
|
|
||||||
WORKDIR ${DOCKER_WORKDIR}
|
|
||||||
|
|
||||||
COPY ./skeema/.skeema .
|
|
||||||
COPY ./mariadb/.skeema.login .
|
|
||||||
|
|
||||||
CMD cp .skeema.login ./gradido_login/.skeema && skeema push --allow-unsafe && rm ./gradido_login/.skeema
|
|
||||||
|
|
||||||
#########################################################################################################
|
|
||||||
# Run skeema
|
|
||||||
#########################################################################################################
|
|
||||||
FROM skeema_build as skeema_run
|
|
||||||
|
|
||||||
ENV DOCKER_WORKDIR="/skeema"
|
|
||||||
|
|
||||||
RUN mkdir -p ${DOCKER_WORKDIR}
|
|
||||||
WORKDIR ${DOCKER_WORKDIR}
|
|
||||||
|
|
||||||
COPY ./skeema/.skeema .
|
|
||||||
COPY ./login_server/skeema/ .
|
|
||||||
COPY ./mariadb/.skeema.login ./gradido_login/.skeema
|
|
||||||
|
|
||||||
CMD skeema push --allow-unsafe
|
|
||||||
|
|
||||||
Loading…
x
Reference in New Issue
Block a user