mirror of
https://github.com/IT4Change/gradido.git
synced 2025-12-13 07:45:54 +00:00
Merge pull request #1324 from gradido/import_production_data
import_production_data
This commit is contained in:
commit
c595c47d90
14
.github/workflows/test.yml
vendored
14
.github/workflows/test.yml
vendored
@ -507,9 +507,17 @@ jobs:
|
||||
# UNIT TESTS BACKEND #####################################################
|
||||
##########################################################################
|
||||
- name: backend | docker-compose
|
||||
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb database
|
||||
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
|
||||
- name: Sleep for 30 seconds
|
||||
run: sleep 30s
|
||||
shell: bash
|
||||
- name: backend | docker-compose database
|
||||
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps database
|
||||
- name: Sleep for 30 seconds
|
||||
run: sleep 30s
|
||||
shell: bash
|
||||
- name: backend Unit tests | test
|
||||
run: cd database && yarn && yarn build && cd ../backend && yarn && yarn CI_worklfow_test
|
||||
run: cd database && yarn && yarn build && cd ../backend && yarn && yarn CI_workflow_test
|
||||
# run: docker-compose -f docker-compose.yml -f docker-compose.test.yml exec -T backend yarn test
|
||||
##########################################################################
|
||||
# COVERAGE CHECK BACKEND #################################################
|
||||
@ -520,7 +528,7 @@ jobs:
|
||||
report_name: Coverage Backend
|
||||
type: lcov
|
||||
result_path: ./backend/coverage/lcov.info
|
||||
min_coverage: 45
|
||||
min_coverage: 38
|
||||
token: ${{ github.token }}
|
||||
|
||||
##############################################################################
|
||||
|
||||
@ -2,7 +2,7 @@ PORT=4000
|
||||
JWT_SECRET=$JWT_SECRET
|
||||
JWT_EXPIRES_IN=10m
|
||||
GRAPHIQL=false
|
||||
GDT_API_URL=https://gdt.gradido.net
|
||||
GDT_API_URL=$GDT_API_URL
|
||||
DB_HOST=localhost
|
||||
DB_PORT=3306
|
||||
DB_USER=$DB_USER
|
||||
|
||||
@ -9,10 +9,13 @@ module.exports = async () => {
|
||||
moduleNameMapper: {
|
||||
'@entity/(.*)': '<rootDir>/../database/build/entity/$1',
|
||||
// This is hack to fix a problem with the library `ts-mysql-migrate` which does differentiate between its ts/js state
|
||||
'@dbTools/(.*)': '<rootDir>/../database/src/$1',
|
||||
/*
|
||||
'@dbTools/(.*)':
|
||||
process.env.NODE_ENV === 'development'
|
||||
? '<rootDir>/../database/src/$1'
|
||||
: '<rootDir>/../database/build/src/$1',
|
||||
*/
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -13,7 +13,7 @@
|
||||
"start": "node build/index.js",
|
||||
"dev": "nodemon -w src --ext ts --exec ts-node src/index.ts",
|
||||
"lint": "eslint . --ext .js,.ts",
|
||||
"CI_worklfow_test": "jest --runInBand --coverage ",
|
||||
"CI_workflow_test": "jest --runInBand --coverage ",
|
||||
"test": "NODE_ENV=development jest --runInBand --coverage "
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
1
backend/src/config/mnemonic.uncompressed_buffer13116.txt
Normal file
1
backend/src/config/mnemonic.uncompressed_buffer13116.txt
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -37,11 +37,11 @@ export class User {
|
||||
@Field(() => String)
|
||||
lastName: string
|
||||
|
||||
@Field(() => String)
|
||||
username: string
|
||||
@Field(() => String, { nullable: true })
|
||||
username?: string
|
||||
|
||||
@Field(() => String)
|
||||
description: string
|
||||
@Field(() => String, { nullable: true })
|
||||
description?: string
|
||||
|
||||
@Field(() => String)
|
||||
pubkey: string
|
||||
|
||||
@ -584,6 +584,7 @@ export class TransactionResolver {
|
||||
-centAmount,
|
||||
queryRunner,
|
||||
)
|
||||
|
||||
// Insert Transaction: recipient + amount
|
||||
const recipiantUserTransactionBalance = await addUserTransaction(
|
||||
recipiantUser,
|
||||
@ -599,6 +600,7 @@ export class TransactionResolver {
|
||||
transaction.received,
|
||||
queryRunner,
|
||||
)
|
||||
|
||||
// Update Balance: recipiant + amount
|
||||
const recipiantStateBalance = await updateStateBalance(
|
||||
recipiantUser,
|
||||
|
||||
@ -49,7 +49,10 @@ const isLanguage = (language: string): boolean => {
|
||||
}
|
||||
|
||||
const PHRASE_WORD_COUNT = 24
|
||||
const WORDS = fs.readFileSync('src/config/mnemonic.english.txt').toString().split('\n')
|
||||
const WORDS = fs
|
||||
.readFileSync('src/config/mnemonic.uncompressed_buffer13116.txt')
|
||||
.toString()
|
||||
.split(',')
|
||||
const PassphraseGenerate = (): string[] => {
|
||||
const result = []
|
||||
for (let i = 0; i < PHRASE_WORD_COUNT; i++) {
|
||||
@ -418,7 +421,7 @@ export class UserResolver {
|
||||
// Table: login_user_backups
|
||||
const loginUserBackup = new LoginUserBackup()
|
||||
loginUserBackup.userId = loginUserId
|
||||
loginUserBackup.passphrase = passphrase.join(' ') + ' ' // login server saves trailing space
|
||||
loginUserBackup.passphrase = passphrase.join(' ') // login server saves trailing space
|
||||
loginUserBackup.mnemonicType = 2 // ServerConfig::MNEMONIC_BIP0039_SORTED_ORDER;
|
||||
|
||||
await queryRunner.manager.save(loginUserBackup).catch((error) => {
|
||||
@ -585,15 +588,23 @@ export class UserResolver {
|
||||
})
|
||||
|
||||
const loginUserBackupRepository = await getRepository(LoginUserBackup)
|
||||
const loginUserBackup = await loginUserBackupRepository
|
||||
.findOneOrFail({ userId: loginUser.id })
|
||||
.catch(() => {
|
||||
throw new Error('Could not find corresponding BackupUser')
|
||||
})
|
||||
let loginUserBackup = await loginUserBackupRepository.findOne({ userId: loginUser.id })
|
||||
|
||||
const passphrase = loginUserBackup.passphrase.slice(0, -1).split(' ')
|
||||
// Generate Passphrase if needed
|
||||
if (!loginUserBackup) {
|
||||
const passphrase = PassphraseGenerate()
|
||||
loginUserBackup = new LoginUserBackup()
|
||||
loginUserBackup.userId = loginUser.id
|
||||
loginUserBackup.passphrase = passphrase.join(' ') // login server saves trailing space
|
||||
loginUserBackup.mnemonicType = 2 // ServerConfig::MNEMONIC_BIP0039_SORTED_ORDER;
|
||||
loginUserBackupRepository.save(loginUserBackup)
|
||||
}
|
||||
|
||||
const passphrase = loginUserBackup.passphrase.split(' ')
|
||||
if (passphrase.length < PHRASE_WORD_COUNT) {
|
||||
// TODO if this can happen we cannot recover from that
|
||||
// this seem to be good on production data, if we dont
|
||||
// make a coding mistake we do not have a problem here
|
||||
throw new Error('Could not load a correct passphrase')
|
||||
}
|
||||
|
||||
|
||||
@ -28,7 +28,7 @@ import { elopageWebhook } from '../webhook/elopage'
|
||||
// TODO implement
|
||||
// import queryComplexity, { simpleEstimator, fieldConfigEstimator } from "graphql-query-complexity";
|
||||
|
||||
const DB_VERSION = '0006-login_users_collation'
|
||||
const DB_VERSION = '0012-login_user_backups_unify_wordlist'
|
||||
|
||||
const createServer = async (context: any = serverContext): Promise<any> => {
|
||||
// open mysql connection
|
||||
|
||||
@ -37,8 +37,8 @@ export const elopageWebhook = async (req: any, res: any): Promise<void> => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('Elopage Hook received', req.body)
|
||||
res.status(200).end() // Responding is important
|
||||
const loginElopgaeBuyRepository = await getCustomRepository(LoginElopageBuysRepository)
|
||||
const loginElopgaeBuy = new LoginElopageBuys()
|
||||
const loginElopageBuyRepository = await getCustomRepository(LoginElopageBuysRepository)
|
||||
const loginElopageBuy = new LoginElopageBuys()
|
||||
let firstName = ''
|
||||
let lastName = ''
|
||||
const entries = req.body.split('&')
|
||||
@ -51,39 +51,39 @@ export const elopageWebhook = async (req: any, res: any): Promise<void> => {
|
||||
const val = decodeURIComponent(keyVal[1]).replace('+', ' ').trim()
|
||||
switch (key) {
|
||||
case 'product[affiliate_program_id]':
|
||||
loginElopgaeBuy.affiliateProgramId = parseInt(val)
|
||||
loginElopageBuy.affiliateProgramId = parseInt(val)
|
||||
break
|
||||
case 'publisher[id]':
|
||||
loginElopgaeBuy.publisherId = parseInt(val)
|
||||
loginElopageBuy.publisherId = parseInt(val)
|
||||
break
|
||||
case 'order_id':
|
||||
loginElopgaeBuy.orderId = parseInt(val)
|
||||
loginElopageBuy.orderId = parseInt(val)
|
||||
break
|
||||
case 'product_id':
|
||||
loginElopgaeBuy.productId = parseInt(val)
|
||||
loginElopageBuy.productId = parseInt(val)
|
||||
break
|
||||
case 'product[price]':
|
||||
// TODO: WHAT THE ACTUAL FUK? Please save this as float in the future directly in the database
|
||||
loginElopgaeBuy.productPrice = Math.trunc(parseFloat(val) * 100)
|
||||
loginElopageBuy.productPrice = Math.trunc(parseFloat(val) * 100)
|
||||
break
|
||||
case 'payer[email]':
|
||||
loginElopgaeBuy.payerEmail = val
|
||||
loginElopageBuy.payerEmail = val
|
||||
break
|
||||
case 'publisher[email]':
|
||||
loginElopgaeBuy.publisherEmail = val
|
||||
loginElopageBuy.publisherEmail = val
|
||||
break
|
||||
case 'payment_state':
|
||||
loginElopgaeBuy.payed = val === 'paid'
|
||||
loginElopageBuy.payed = val === 'paid'
|
||||
break
|
||||
case 'success_date':
|
||||
loginElopgaeBuy.successDate = new Date(val)
|
||||
loginElopageBuy.successDate = new Date(val)
|
||||
break
|
||||
case 'event':
|
||||
loginElopgaeBuy.event = val
|
||||
loginElopageBuy.event = val
|
||||
break
|
||||
case 'membership[id]':
|
||||
// TODO this was never set on login_server - its unclear if this is the correct value
|
||||
loginElopgaeBuy.elopageUserId = parseInt(val)
|
||||
loginElopageBuy.elopageUserId = parseInt(val)
|
||||
break
|
||||
case 'payer[first_name]':
|
||||
firstName = val
|
||||
@ -100,14 +100,14 @@ export const elopageWebhook = async (req: any, res: any): Promise<void> => {
|
||||
})
|
||||
|
||||
// Do not process certain events
|
||||
if (['lesson.viewed', 'lesson.completed', 'lesson.commented'].includes(loginElopgaeBuy.event)) {
|
||||
if (['lesson.viewed', 'lesson.completed', 'lesson.commented'].includes(loginElopageBuy.event)) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('User viewed, completed or commented - not saving hook')
|
||||
return
|
||||
}
|
||||
|
||||
// Save the hook data
|
||||
await loginElopgaeBuyRepository.save(loginElopgaeBuy)
|
||||
await loginElopageBuyRepository.save(loginElopageBuy)
|
||||
|
||||
// create user for certain products
|
||||
/*
|
||||
@ -118,8 +118,8 @@ export const elopageWebhook = async (req: any, res: any): Promise<void> => {
|
||||
Business-Mitgliedschaft, 43960
|
||||
Förderbeitrag: 49106
|
||||
*/
|
||||
if ([36001, 43741, 43870, 43944, 43960, 49106].includes(loginElopgaeBuy.productId)) {
|
||||
const email = loginElopgaeBuy.payerEmail
|
||||
if ([36001, 43741, 43870, 43944, 43960, 49106].includes(loginElopageBuy.productId)) {
|
||||
const email = loginElopageBuy.payerEmail
|
||||
|
||||
const VALIDATE_EMAIL = /^[a-zA-Z0-9.!#$%&?*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$/
|
||||
const VALIDATE_NAME = /^<>&;]{2,}$/
|
||||
@ -152,7 +152,7 @@ export const elopageWebhook = async (req: any, res: any): Promise<void> => {
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
publisherId: loginElopgaeBuy.publisherId,
|
||||
publisherId: loginElopageBuy.publisherId,
|
||||
})
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line no-console
|
||||
|
||||
@ -98,6 +98,8 @@ COPY --from=build ${DOCKER_WORKDIR}/node_modules ./node_modules
|
||||
# COPY --from=build ${DOCKER_WORKDIR}/public ./public
|
||||
# Copy package.json for script definitions (lock file should not be needed)
|
||||
COPY --from=build ${DOCKER_WORKDIR}/package.json ./package.json
|
||||
# Copy Mnemonic files
|
||||
COPY --from=build ${DOCKER_WORKDIR}/src/config/*.txt ./src/config/
|
||||
# Copy run scripts run/
|
||||
# COPY --from=build ${DOCKER_WORKDIR}/run ./run
|
||||
|
||||
@ -112,7 +114,7 @@ CMD /bin/sh -c "yarn run up"
|
||||
##################################################################################
|
||||
# PRODUCTION RESET ###############################################################
|
||||
##################################################################################
|
||||
# FROM production as production_reset
|
||||
FROM production as production_reset
|
||||
|
||||
# Run command
|
||||
CMD /bin/sh -c "yarn run reset"
|
||||
|
||||
@ -19,7 +19,7 @@ export class LoginUser extends BaseEntity {
|
||||
@Column({ length: 255, default: '' })
|
||||
username: string
|
||||
|
||||
@Column({ default: '' })
|
||||
@Column({ default: '', nullable: true })
|
||||
description: string
|
||||
|
||||
@Column({ type: 'bigint', default: 0, unsigned: true })
|
||||
|
||||
@ -19,7 +19,7 @@ export class LoginUser extends BaseEntity {
|
||||
@Column({ length: 255, default: '', collation: 'utf8mb4_unicode_ci' })
|
||||
username: string
|
||||
|
||||
@Column({ default: '', collation: 'utf8mb4_unicode_ci' })
|
||||
@Column({ default: '', collation: 'utf8mb4_unicode_ci', nullable: true })
|
||||
description: string
|
||||
|
||||
@Column({ type: 'bigint', default: 0, unsigned: true })
|
||||
|
||||
86
database/integrity/0013-test.ts.keep
Normal file
86
database/integrity/0013-test.ts.keep
Normal file
@ -0,0 +1,86 @@
|
||||
/* MIGRATION TO CLEAN PRODUCTION DATA
|
||||
*
|
||||
* the way the passphrases are stored in login_user_backups is inconsistent.
|
||||
* we need to try to detect which word list was used and transform it accordingly
|
||||
*/
|
||||
|
||||
import fs from 'fs'
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const sodium = require('sodium-native')
|
||||
|
||||
const PHRASE_WORD_COUNT = 24
|
||||
const WORDS = fs
|
||||
.readFileSync('src/config/mnemonic.uncompressed_buffer13116.txt')
|
||||
.toString()
|
||||
.split(',')
|
||||
|
||||
const KeyPairEd25519Create = (passphrase: string[]): Buffer[] => {
|
||||
if (!passphrase.length || passphrase.length < PHRASE_WORD_COUNT) {
|
||||
throw new Error('passphrase empty or to short')
|
||||
}
|
||||
|
||||
const state = Buffer.alloc(sodium.crypto_hash_sha512_STATEBYTES)
|
||||
sodium.crypto_hash_sha512_init(state)
|
||||
|
||||
for (let i = 0; i < PHRASE_WORD_COUNT; i++) {
|
||||
const value = Buffer.alloc(8)
|
||||
const wordIndex = WORDS.indexOf(passphrase[i])
|
||||
value.writeBigInt64LE(BigInt(wordIndex))
|
||||
sodium.crypto_hash_sha512_update(state, value)
|
||||
}
|
||||
// trailing space is part of the login_server implementation
|
||||
const clearPassphrase = passphrase.slice(0, PHRASE_WORD_COUNT).join(' ') + ' '
|
||||
sodium.crypto_hash_sha512_update(state, Buffer.from(clearPassphrase))
|
||||
const outputHashBuffer = Buffer.alloc(sodium.crypto_hash_sha512_BYTES)
|
||||
sodium.crypto_hash_sha512_final(state, outputHashBuffer)
|
||||
|
||||
const pubKey = Buffer.alloc(sodium.crypto_sign_PUBLICKEYBYTES)
|
||||
const privKey = Buffer.alloc(sodium.crypto_sign_SECRETKEYBYTES)
|
||||
|
||||
sodium.crypto_sign_seed_keypair(
|
||||
pubKey,
|
||||
privKey,
|
||||
outputHashBuffer.slice(0, sodium.crypto_sign_SEEDBYTES),
|
||||
)
|
||||
|
||||
return [pubKey, privKey]
|
||||
}
|
||||
|
||||
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
// Delete data with no reference in login_users table
|
||||
// eslint-disable-next-line no-console
|
||||
// 663 affected rows
|
||||
const userBackups = await queryFn(
|
||||
`SELECT passphrase, LOWER(HEX(pubkey)) as pubkey, user_id
|
||||
FROM login_user_backups
|
||||
LEFT JOIN login_users ON login_user_backups.user_id = login_users.id
|
||||
WHERE user_id=1503`,
|
||||
// WHERE pubkey is not null`, // todo fix this condition and regenerate
|
||||
)
|
||||
let i = 0
|
||||
// eslint-disable-next-line no-console
|
||||
userBackups.forEach(async (userBackup) => {
|
||||
const passphrase = userBackup.passphrase.split(' ')
|
||||
const keyPair = KeyPairEd25519Create(passphrase)
|
||||
if (keyPair[0].toString('hex') !== userBackup.pubkey) {
|
||||
i++
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(
|
||||
'Missmatch Pubkey',
|
||||
i,
|
||||
userBackup.user_id,
|
||||
`"${userBackup.passphrase}"`,
|
||||
`"${keyPair[0].toString('hex')}`,
|
||||
`"${userBackup.pubkey}"`,
|
||||
)
|
||||
} else {
|
||||
// eslint-disable-next-line no-console
|
||||
// console.log('SUCCESS: ', `"${keyPair[0].toString('hex')}`, `"${userBackup.pubkey}"`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
return [] // cannot transform things back
|
||||
}
|
||||
5
database/integrity/README.md
Normal file
5
database/integrity/README.md
Normal file
@ -0,0 +1,5 @@
|
||||
This is a test to find if all passphrases evaluate to the saved public key.
|
||||
|
||||
You need `yarn add sodium-native` in order to make it work.
|
||||
|
||||
This could be the start of database integrity tests in oder to evaluate the correctness of the database
|
||||
15
database/migrations/0007-login_pending_tasks_delete.ts
Normal file
15
database/migrations/0007-login_pending_tasks_delete.ts
Normal file
@ -0,0 +1,15 @@
|
||||
/* MIGRATION TO CLEAN PRODUCTION DATA
|
||||
*
|
||||
* delete the pending tasks to not have any dead entries.
|
||||
* the way we interact with the table is now differently
|
||||
* and therefore we should clear it to avoid conflicts
|
||||
* and dead entries
|
||||
*/
|
||||
|
||||
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
await queryFn('DELETE FROM `login_pending_tasks`;')
|
||||
}
|
||||
|
||||
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
return [] // cannot undelete things
|
||||
}
|
||||
23
database/migrations/0008-state_users_plug_holes.ts
Normal file
23
database/migrations/0008-state_users_plug_holes.ts
Normal file
@ -0,0 +1,23 @@
|
||||
/* MIGRATION TO CLEAN PRODUCTION DATA
|
||||
*
|
||||
* some entries in the state_users table do not have an email.
|
||||
* this is required tho to work with the new environment.
|
||||
* to mitigate 38 out of 50 emails could be restored from
|
||||
* login_users.
|
||||
*/
|
||||
|
||||
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
// Fill in missing emails from login_users
|
||||
await queryFn(
|
||||
`UPDATE state_users
|
||||
INNER JOIN login_users ON state_users.public_key = login_users.pubkey
|
||||
SET state_users.email = login_users.email
|
||||
WHERE state_users.email = '';`,
|
||||
)
|
||||
// Delete remaining ones
|
||||
await queryFn(`DELETE FROM state_users WHERE email = ''`)
|
||||
}
|
||||
|
||||
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
return [] // cannot undelete things
|
||||
}
|
||||
28
database/migrations/0009-login_users_plug_holes.ts
Normal file
28
database/migrations/0009-login_users_plug_holes.ts
Normal file
@ -0,0 +1,28 @@
|
||||
/* MIGRATION TO CLEAN PRODUCTION DATA
|
||||
*
|
||||
* some entries in the login_users table are inconsistent.
|
||||
* As solution the inconsistent data is purged and the corresponding
|
||||
* account is set as not yet activated
|
||||
*/
|
||||
|
||||
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
// Generate a random private key where the required data is present (pubkey + password + passphrase).
|
||||
// Furthermore the email needs to be confirmed
|
||||
await queryFn(
|
||||
`UPDATE login_users SET privkey = UNHEX(SHA1(RAND()))
|
||||
WHERE privkey IS NULL
|
||||
AND pubkey IS NOT NULL
|
||||
AND password != 0
|
||||
AND email_checked = 1
|
||||
AND id IN (SELECT user_id FROM login_user_backups);`,
|
||||
)
|
||||
|
||||
// Remove incomplete data and set account as not activated yet.
|
||||
await queryFn(
|
||||
`UPDATE login_users SET password = 0, pubkey = NULL, email_checked = 0 WHERE privkey IS NULL;`,
|
||||
)
|
||||
}
|
||||
|
||||
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
return [] // cannot undelete things
|
||||
}
|
||||
45
database/migrations/0010-login_users_state_users_sync.ts
Normal file
45
database/migrations/0010-login_users_state_users_sync.ts
Normal file
@ -0,0 +1,45 @@
|
||||
/* MIGRATION TO CLEAN PRODUCTION DATA
|
||||
*
|
||||
* login_users and state_users are not in sync.
|
||||
* Copy missing data from login_users to state_users.
|
||||
*/
|
||||
|
||||
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
// Copy data with intact private key
|
||||
await queryFn(
|
||||
`INSERT INTO state_users
|
||||
(public_key, email, first_name, last_name, username, disabled)
|
||||
(SELECT pubkey as public_key, email, first_name, last_name, username, disabled
|
||||
FROM login_users
|
||||
WHERE email NOT IN (SELECT email from state_users)
|
||||
AND privkey IS NOT NULL
|
||||
)`,
|
||||
)
|
||||
// Copy data without intact private key, generate random pubkey
|
||||
await queryFn(
|
||||
`INSERT INTO state_users
|
||||
(public_key, email, first_name, last_name, username, disabled)
|
||||
(SELECT UNHEX(SHA1(RAND())) as public_key, email, first_name, last_name, username, disabled
|
||||
FROM login_users
|
||||
WHERE email NOT IN (SELECT email from state_users)
|
||||
AND privkey IS NULL
|
||||
)`,
|
||||
)
|
||||
// Remove duplicate data from state_users with dead pubkeys
|
||||
// 18 entries
|
||||
await queryFn(
|
||||
`DELETE FROM state_users
|
||||
WHERE id IN
|
||||
(SELECT state_users.id FROM state_users
|
||||
WHERE public_key NOT IN
|
||||
(SELECT pubkey FROM login_users
|
||||
WHERE pubkey IS NOT NULL)
|
||||
AND email IN (SELECT email FROM state_users GROUP BY email HAVING COUNT(*) > 1
|
||||
)
|
||||
)`,
|
||||
)
|
||||
}
|
||||
|
||||
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
return [] // cannot undelete things
|
||||
}
|
||||
18
database/migrations/0011-login_user_backups_plug_holes.ts
Normal file
18
database/migrations/0011-login_user_backups_plug_holes.ts
Normal file
@ -0,0 +1,18 @@
|
||||
/* MIGRATION TO CLEAN PRODUCTION DATA
|
||||
*
|
||||
* cleanup the login_user_backups.
|
||||
* Delete data with no reference in login_users table and
|
||||
* delete the right one of the duplicate keys
|
||||
*/
|
||||
|
||||
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
// Delete data with no reference in login_users table
|
||||
await queryFn(`DELETE FROM login_user_backups WHERE user_id NOT IN (SELECT id FROM login_users)`)
|
||||
|
||||
// Delete duplicates which have changed for some reasons
|
||||
await queryFn(`DELETE FROM login_user_backups WHERE id IN (21, 103, 313, 325, 726, 750, 1098)`)
|
||||
}
|
||||
|
||||
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
return [] // cannot undelete things
|
||||
}
|
||||
@ -0,0 +1,66 @@
|
||||
/* MIGRATION TO CLEAN PRODUCTION DATA
|
||||
*
|
||||
* the way the passphrases are stored in login_user_backups is inconsistent.
|
||||
* we need to detect which word list was used and transform it accordingly.
|
||||
* This also removes the trailing space
|
||||
*/
|
||||
import fs from 'fs'
|
||||
|
||||
const TARGET_MNEMONIC_TYPE = 2
|
||||
const PHRASE_WORD_COUNT = 24
|
||||
const WORDS_MNEMONIC_0 = fs
|
||||
.readFileSync('src/config/mnemonic.uncompressed_buffer18112.txt')
|
||||
.toString()
|
||||
.split(',')
|
||||
const WORDS_MNEMONIC_1 = fs
|
||||
.readFileSync('src/config/mnemonic.uncompressed_buffer18113.txt')
|
||||
.toString()
|
||||
.split(',')
|
||||
const WORDS_MNEMONIC_2 = fs
|
||||
.readFileSync('src/config/mnemonic.uncompressed_buffer13116.txt')
|
||||
.toString()
|
||||
.split(',')
|
||||
const WORDS_MNEMONIC = [WORDS_MNEMONIC_0, WORDS_MNEMONIC_1, WORDS_MNEMONIC_2]
|
||||
|
||||
const detectMnemonic = (passphrase: string[]): string[] => {
|
||||
if (passphrase.length < PHRASE_WORD_COUNT) {
|
||||
throw new Error(
|
||||
`Passphrase is not long enough ${passphrase.length}/${PHRASE_WORD_COUNT}; passphrase: ${passphrase}`,
|
||||
)
|
||||
}
|
||||
|
||||
const passphraseSliced = passphrase.slice(0, PHRASE_WORD_COUNT)
|
||||
|
||||
// Loop through all word lists
|
||||
for (let i = 0; i < WORDS_MNEMONIC.length; i++) {
|
||||
// Does the wordlist contain all elements of the passphrase
|
||||
if (passphraseSliced.every((word) => WORDS_MNEMONIC[i].includes(word))) {
|
||||
if (i === TARGET_MNEMONIC_TYPE) {
|
||||
return passphraseSliced
|
||||
} else {
|
||||
return passphraseSliced.map((word) => WORDS_MNEMONIC_2[WORDS_MNEMONIC[i].indexOf(word)])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Could not find mnemonic type for passphrase: ${passphrase}`)
|
||||
}
|
||||
|
||||
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
// Loop through all user backups and update passphrase and mnemonic type if needed
|
||||
const userBackups = await queryFn(`SELECT * FROM login_user_backups`)
|
||||
userBackups.forEach(async (userBackup) => {
|
||||
const passphrase = userBackup.passphrase.split(' ')
|
||||
const newPassphrase = detectMnemonic(passphrase).join(' ')
|
||||
if (newPassphrase !== userBackup.passphrase) {
|
||||
await queryFn(
|
||||
`UPDATE login_user_backups SET passphrase = ?, mnemonic_type = ? WHERE id = ?`,
|
||||
[newPassphrase, TARGET_MNEMONIC_TYPE, userBackup.id],
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
|
||||
return [] // cannot transform things back
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -30,6 +30,8 @@ TYPEORM_LOGGING_RELATIVE_PATH=../deployment/bare_metal/log/typeorm.backend.log
|
||||
|
||||
WEBHOOK_ELOPAGE_SECRET=secret
|
||||
|
||||
GDT_API_URL=https://gdt.gradido.net
|
||||
|
||||
COMMUNITY_NAME=Gradido Development Stage1
|
||||
COMMUNITY_URL=https://stage1.gradido.net/
|
||||
COMMUNITY_REGISTER_URL=https://stage1.gradido.net/register
|
||||
|
||||
@ -22,7 +22,7 @@ fi
|
||||
pm2 stop gradido-backend
|
||||
|
||||
# Backup data
|
||||
mysqldump --databases --single-transaction --quick --lock-tables=false > ${SCRIPT_DIR}/backup/mariadb-backup-$(date +%d-%m-%Y_%H-%M-%S).sql -u ${DB_USER} -p${DB_PASSWORD} ${DB_DATABASE}
|
||||
mysqldump --databases --single-transaction --quick --hex-blob --lock-tables=false > ${SCRIPT_DIR}/backup/mariadb-backup-$(date +%d-%m-%Y_%H-%M-%S).sql -u ${DB_USER} -p${DB_PASSWORD} ${DB_DATABASE}
|
||||
|
||||
# Start Services
|
||||
pm2 start gradido-backend
|
||||
35
deployment/bare_metal/import_old_production.sh
Executable file
35
deployment/bare_metal/import_old_production.sh
Executable file
@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -o allexport
|
||||
SCRIPT_PATH=$(realpath $0)
|
||||
SCRIPT_DIR=$(dirname $SCRIPT_PATH)
|
||||
PROJECT_ROOT=$SCRIPT_DIR/../..
|
||||
set +o allexport
|
||||
|
||||
BACKUP_FILE_LOGIN=$1 #gradido_login_22-01-24.sql
|
||||
BACKUP_FILE_COMMUNITY=$2 #gradido_node_22-01-24.sql
|
||||
|
||||
# Load backend .env for DB_USERNAME, DB_PASSWORD & DB_DATABASE
|
||||
# NOTE: all config values will be in process.env when starting
|
||||
# the services and will therefore take precedence over the .env
|
||||
if [ -f "$PROJECT_ROOT/backend/.env" ]; then
|
||||
export $(cat $PROJECT_ROOT/backend/.env | sed 's/#.*//g' | xargs)
|
||||
else
|
||||
export $(cat $PROJECT_ROOT/backend/.env.dist | sed 's/#.*//g' | xargs)
|
||||
fi
|
||||
|
||||
# Delete whole database
|
||||
sudo mysql -uroot -e "show databases" | grep -v Database | grep -v mysql| grep -v information_schema| gawk '{print "drop database `" $1 "`;select sleep(0.1);"}' | sudo mysql -uroot
|
||||
|
||||
BACKUP_LOGIN=$SCRIPT_DIR/backup/$BACKUP_FILE_LOGIN
|
||||
BACKUP_COMMUNITY=$SCRIPT_DIR/backup/$BACKUP_FILE_COMMUNITY
|
||||
|
||||
# import backup login
|
||||
mysql -u ${DB_USER} -p${DB_PASSWORD} <<EOFMYSQL
|
||||
source $BACKUP_LOGIN
|
||||
EOFMYSQL
|
||||
|
||||
# import backup community
|
||||
mysql -u ${DB_USER} -p${DB_PASSWORD} <<EOFMYSQL
|
||||
source $BACKUP_COMMUNITY
|
||||
EOFMYSQL
|
||||
@ -28,7 +28,7 @@ fi
|
||||
pm2 stop gradido-backend
|
||||
|
||||
# Backup data
|
||||
mysqldump --databases --single-transaction --quick --lock-tables=false > ${SCRIPT_DIR}/backup/mariadb-restore-backup-$(date +%d-%m-%Y_%H-%M-%S).sql -u ${DB_USER} -p${DB_PASSWORD} ${DB_DATABASE}
|
||||
mysqldump --databases --single-transaction --quick --hex-blob --lock-tables=false > ${SCRIPT_DIR}/backup/mariadb-restore-backup-$(date +%d-%m-%Y_%H-%M-%S).sql -u ${DB_USER} -p${DB_PASSWORD} ${DB_DATABASE}
|
||||
|
||||
# Restore Data
|
||||
mysql -u ${DB_USER} -p${DB_PASSWORD} <<EOFMYSQL
|
||||
|
||||
@ -77,6 +77,14 @@
|
||||
> git clone https://github.com/gradido/gradido.git
|
||||
> cd gradido/deployment/bare_metal
|
||||
|
||||
# Timezone
|
||||
# Note: This is not needed - UTC(default) is REQUIRED for production data
|
||||
# > sudo timedatectl set-timezone UTC
|
||||
# > sudo timedatectl set-ntp on
|
||||
# > sudo apt purge ntp
|
||||
# > sudo systemctl start systemd-timesyncd
|
||||
# >> timedatectl to verify
|
||||
|
||||
# Adjust .env
|
||||
# NOTE ';' can not be part of any value
|
||||
> cd gradido/deployment/bare_metal
|
||||
|
||||
@ -20,24 +20,18 @@ services:
|
||||
# DATABASE #############################################
|
||||
########################################################
|
||||
database:
|
||||
restart: always # this is very dangerous, but worth a test for the delayed mariadb startup at first run
|
||||
build:
|
||||
context: ./database
|
||||
target: test_up
|
||||
# restart: always # this is very dangerous, but worth a test for the delayed mariadb startup at first run
|
||||
|
||||
#########################################################
|
||||
## MARIADB ##############################################
|
||||
#########################################################
|
||||
mariadb:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./mariadb/Dockerfile
|
||||
target: mariadb_server
|
||||
environment:
|
||||
- MARIADB_ALLOW_EMPTY_PASSWORD=1
|
||||
- MARIADB_USER=root
|
||||
networks:
|
||||
- internal-net
|
||||
- external-net
|
||||
ports:
|
||||
- 3306:3306
|
||||
volumes:
|
||||
- db_test_vol:/var/lib/mysql
|
||||
|
||||
|
||||
@ -108,7 +108,7 @@ services:
|
||||
# DATABASE #############################################
|
||||
########################################################
|
||||
database:
|
||||
image: gradido/database:production_up
|
||||
#image: gradido/database:production_up
|
||||
build:
|
||||
context: ./database
|
||||
target: production_up
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user