Merge branch 'master' into authenticated_tests_for_TransactionLinkResolver

This commit is contained in:
Ulf Gebhardt 2023-02-17 12:09:12 +01:00
commit d26c0bd78a
Signed by: ulfgebhardt
GPG Key ID: DA6B843E748679C9
121 changed files with 5997 additions and 2322 deletions

View File

@ -163,7 +163,6 @@ jobs:
locales_frontend:
name: Locales - Frontend
runs-on: ubuntu-latest
needs: [build_test_frontend]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -171,20 +170,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGE ##################################################
##########################################################################
- name: Download Docker Image (Frontend)
uses: actions/download-artifact@v3
with:
name: docker-frontend-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/frontend.tar
##########################################################################
# LOCALES FRONTEND #######################################################
##########################################################################
- name: Frontend | Locales
run: docker run --rm gradido/frontend:test yarn run locales
run: cd frontend && yarn && yarn run locales
##############################################################################
# JOB: LINT FRONTEND #########################################################
@ -192,7 +181,6 @@ jobs:
lint_frontend:
name: Lint - Frontend
runs-on: ubuntu-latest
needs: [build_test_frontend]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -200,20 +188,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGE ##################################################
##########################################################################
- name: Download Docker Image (Frontend)
uses: actions/download-artifact@v3
with:
name: docker-frontend-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/frontend.tar
##########################################################################
# LINT FRONTEND ##########################################################
##########################################################################
- name: Frontend | Lint
run: docker run --rm gradido/frontend:test yarn run lint
run: cd frontend && yarn && yarn run lint
##############################################################################
# JOB: STYLELINT FRONTEND ####################################################
@ -221,7 +199,6 @@ jobs:
stylelint_frontend:
name: Stylelint - Frontend
runs-on: ubuntu-latest
needs: [build_test_frontend]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -229,20 +206,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGE ##################################################
##########################################################################
- name: Download Docker Image (Frontend)
uses: actions/download-artifact@v3
with:
name: docker-frontend-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/frontend.tar
##########################################################################
# STYLELINT FRONTEND #####################################################
##########################################################################
- name: Frontend | Stylelint
run: docker run --rm gradido/frontend:test yarn run stylelint
run: cd frontend && yarn && yarn run stylelint
##############################################################################
# JOB: LINT ADMIN INTERFACE ##################################################
@ -250,7 +217,6 @@ jobs:
lint_admin:
name: Lint - Admin Interface
runs-on: ubuntu-latest
needs: [build_test_admin]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -258,28 +224,17 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGE ##################################################
##########################################################################
- name: Download Docker Image (Admin Interface)
uses: actions/download-artifact@v3
with:
name: docker-admin-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/admin.tar
##########################################################################
# LINT ADMIN INTERFACE ###################################################
##########################################################################
- name: Admin Interface | Lint
run: docker run --rm gradido/admin:test yarn run lint
run: cd admin && yarn && yarn run lint
##############################################################################
# JOB: STYLELINT ADMIN INTERFACE ##############################################
# JOB: STYLELINT ADMIN INTERFACE #############################################
##############################################################################
stylelint_admin:
name: Stylelint - Admin Interface
runs-on: ubuntu-latest
needs: [build_test_admin]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -287,20 +242,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGE ##################################################
##########################################################################
- name: Download Docker Image (Admin Interface)
uses: actions/download-artifact@v3
with:
name: docker-admin-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/admin.tar
##########################################################################
# STYLELINT ADMIN INTERFACE ##############################################
##########################################################################
- name: Admin Interface | Stylelint
run: docker run --rm gradido/admin:test yarn run stylelint
run: cd admin && yarn && yarn run stylelint
##############################################################################
# JOB: LOCALES ADMIN #########################################################
@ -308,7 +253,6 @@ jobs:
locales_admin:
name: Locales - Admin Interface
runs-on: ubuntu-latest
needs: [build_test_admin]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -316,20 +260,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGE ##################################################
##########################################################################
- name: Download Docker Image (Admin Interface)
uses: actions/download-artifact@v3
with:
name: docker-admin-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/admin.tar
##########################################################################
# LOCALES FRONTEND #######################################################
##########################################################################
- name: admin | Locales
run: docker run --rm gradido/admin:test yarn run locales
- name: Admin | Locales
run: cd admin && yarn && yarn run locales
##############################################################################
# JOB: LINT BACKEND ##########################################################
@ -337,7 +271,6 @@ jobs:
lint_backend:
name: Lint - Backend
runs-on: ubuntu-latest
needs: [build_test_backend]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -345,20 +278,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGE ##################################################
##########################################################################
- name: Download Docker Image (Backend)
uses: actions/download-artifact@v3
with:
name: docker-backend-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/backend.tar
##########################################################################
# LINT BACKEND ###########################################################
##########################################################################
- name: backend | Lint
run: docker run --rm gradido/backend:test yarn run lint
run: cd backend && yarn && yarn run lint
##############################################################################
# JOB: LOCALES BACKEND #######################################################
@ -366,7 +289,6 @@ jobs:
locales_backend:
name: Locales - Backend
runs-on: ubuntu-latest
needs: [build_test_backend]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -385,7 +307,6 @@ jobs:
lint_database_up:
name: Lint - Database Up
runs-on: ubuntu-latest
needs: [build_test_database_up]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -393,20 +314,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGE ##################################################
##########################################################################
- name: Download Docker Image (Backend)
uses: actions/download-artifact@v3
with:
name: docker-database-test_up
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/database_up.tar
##########################################################################
# LINT DATABASE ##########################################################
##########################################################################
- name: database | Lint
run: docker run --rm gradido/database:test_up yarn run lint
- name: Database | Lint
run: cd database && yarn && yarn run lint
##############################################################################
# JOB: UNIT TEST FRONTEND ###################################################
@ -414,7 +325,6 @@ jobs:
unit_test_frontend:
name: Unit tests - Frontend
runs-on: ubuntu-latest
needs: [build_test_frontend]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -422,30 +332,12 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGES #################################################
##########################################################################
- name: Download Docker Image (Frontend)
uses: actions/download-artifact@v3
with:
name: docker-frontend-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/frontend.tar
##########################################################################
# UNIT TESTS FRONTEND ####################################################
##########################################################################
- name: frontend | Unit tests
- name: Frontend | Unit tests
run: |
docker run --env NODE_ENV=test -v ~/coverage:/app/coverage --rm gradido/frontend:test yarn run test
cp -r ~/coverage ./coverage
##########################################################################
# COVERAGE REPORT FRONTEND ###############################################
##########################################################################
#- name: frontend | Coverage report
# uses: romeovs/lcov-reporter-action@v0.2.21
# with:
# github-token: ${{ secrets.GITHUB_TOKEN }}
# lcov-file: ./coverage/lcov.info
cd frontend && yarn && yarn run test
cp -r ./coverage ../
##########################################################################
# COVERAGE CHECK FRONTEND ################################################
##########################################################################
@ -454,7 +346,7 @@ jobs:
with:
report_name: Coverage Frontend
type: lcov
result_path: ./coverage/lcov.info
result_path: ./frontend/coverage/lcov.info
min_coverage: 95
token: ${{ github.token }}
@ -464,7 +356,6 @@ jobs:
unit_test_admin:
name: Unit tests - Admin Interface
runs-on: ubuntu-latest
needs: [build_test_admin]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -472,22 +363,12 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3
##########################################################################
# DOWNLOAD DOCKER IMAGES #################################################
##########################################################################
- name: Download Docker Image (Admin Interface)
uses: actions/download-artifact@v3
with:
name: docker-admin-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/admin.tar
##########################################################################
# UNIT TESTS ADMIN INTERFACE #############################################
##########################################################################
- name: Admin Interface | Unit tests
run: |
docker run -v ~/coverage:/app/coverage --rm gradido/admin:test yarn run test
cp -r ~/coverage ./coverage
cd admin && yarn && yarn run test
cp -r ./coverage ../
##########################################################################
# COVERAGE CHECK ADMIN INTERFACE #########################################
##########################################################################
@ -496,8 +377,8 @@ jobs:
with:
report_name: Coverage Admin Interface
type: lcov
result_path: ./coverage/lcov.info
min_coverage: 96
result_path: ./admin/coverage/lcov.info
min_coverage: 97
token: ${{ github.token }}
##############################################################################
@ -534,8 +415,9 @@ jobs:
- name: backend | docker-compose database
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps database
- name: backend Unit tests | test
run: cd database && yarn && yarn build && cd ../backend && yarn && yarn test
# run: docker-compose -f docker-compose.yml -f docker-compose.test.yml exec -T backend yarn test
run: |
cd database && yarn && yarn build && cd ../backend && yarn && yarn test
cp -r ./coverage ../
##########################################################################
# COVERAGE CHECK BACKEND #################################################
##########################################################################
@ -577,7 +459,7 @@ jobs:
end-to-end-tests:
name: End-to-End Tests
runs-on: ubuntu-latest
needs: [build_test_mariadb, build_test_database_up, build_test_backend, build_test_admin, build_test_frontend, build_test_nginx]
needs: [build_test_mariadb, build_test_database_up, build_test_admin, build_test_frontend, build_test_nginx]
steps:
##########################################################################
# CHECKOUT CODE ##########################################################
@ -601,13 +483,6 @@ jobs:
path: /tmp
- name: Load Docker Image (Database Up)
run: docker load < /tmp/database_up.tar
- name: Download Docker Image (Backend)
uses: actions/download-artifact@v3
with:
name: docker-backend-test
path: /tmp
- name: Load Docker Image (Backend)
run: docker load < /tmp/backend.tar
- name: Download Docker Image (Frontend)
uses: actions/download-artifact@v3
with:
@ -640,7 +515,11 @@ jobs:
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps database
- name: Boot up test system | docker-compose backend
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps backend
run: |
cd backend
cp .env.test_e2e .env
cd ..
docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps backend
- name: Sleep for 10 seconds
run: sleep 10s
@ -657,6 +536,9 @@ jobs:
- name: Boot up test system | docker-compose frontends
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps frontend admin nginx
- name: Boot up test system | docker-compose mailserver
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mailserver
- name: Sleep for 15 seconds
run: sleep 15s
@ -666,12 +548,12 @@ jobs:
- name: End-to-end tests | run tests
id: e2e-tests
run: |
cd e2e-tests/cypress/tests/
cd e2e-tests/
yarn
yarn run cypress run --spec cypress/e2e/User.Authentication.feature
yarn run cypress run --spec cypress/e2e/User.Authentication.feature,cypress/e2e/User.Authentication.ResetPassword.feature
- name: End-to-end tests | if tests failed, upload screenshots
if: steps.e2e-tests.outcome == 'failure'
if: ${{ failure() && steps.e2e-tests.conclusion == 'failure' }}
uses: actions/upload-artifact@v3
with:
name: cypress-screenshots
path: /home/runner/work/gradido/gradido/e2e-tests/cypress/tests/cypress/screenshots/
path: /home/runner/work/gradido/gradido/e2e-tests/cypress/screenshots/

98
.github/workflows/test_federation.yml vendored Normal file
View File

@ -0,0 +1,98 @@
name: gradido test_federation CI
on: push
jobs:
##############################################################################
# JOB: DOCKER BUILD TEST #####################################################
##############################################################################
build:
name: Docker Build Test
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Build `test` image
run: |
docker build --target test -t "gradido/federation:test" -f federation/Dockerfile .
docker save "gradido/federation:test" > /tmp/federation.tar
- name: Upload Artifact
uses: actions/upload-artifact@v3
with:
name: docker-federation-test
path: /tmp/federation.tar
##############################################################################
# JOB: LINT ##################################################################
##############################################################################
lint:
name: Lint
runs-on: ubuntu-latest
needs: [build]
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Download Docker Image
uses: actions/download-artifact@v3
with:
name: docker-federation-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/federation.tar
- name: Lint
run: docker run --rm gradido/federation:test yarn run lint
##############################################################################
# JOB: UNIT TEST #############################################################
##############################################################################
unit_test:
name: Unit tests
runs-on: ubuntu-latest
needs: [build]
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Download Docker Image
uses: actions/download-artifact@v3
with:
name: docker-federation-test
path: /tmp
- name: Load Docker Image
run: docker load < /tmp/federation.tar
- name: docker-compose mariadb
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps mariadb
- name: Sleep for 30 seconds
run: sleep 30s
shell: bash
- name: docker-compose database
run: docker-compose -f docker-compose.yml -f docker-compose.test.yml up --detach --no-deps database
- name: Sleep for 30 seconds
run: sleep 30s
shell: bash
#- name: Unit tests
# run: cd database && yarn && yarn build && cd ../dht-node && yarn && yarn test
- name: Unit tests
run: |
docker run --env NODE_ENV=test --env DB_HOST=mariadb --network gradido_internal-net -v ~/coverage:/app/coverage --rm gradido/federation:test yarn run test
cp -r ~/coverage ./coverage
- name: Coverage check
uses: webcraftmedia/coverage-check-action@master
with:
report_name: Coverage federation
type: lcov
#result_path: ./federation/coverage/lcov.info
result_path: ./coverage/lcov.info
min_coverage: 72
token: ${{ github.token }}

View File

@ -42,29 +42,72 @@ describe('ContributionLink', () => {
expect(wrapper.find('div.contribution-link').exists()).toBe(true)
})
describe('function editContributionLinkData', () => {
beforeEach(() => {
wrapper.vm.editContributionLinkData()
it('has one contribution link in table', () => {
expect(wrapper.find('div.contribution-link-list').find('tbody').findAll('tr')).toHaveLength(1)
})
it('has contribution form not visible by default', () => {
expect(wrapper.find('#newContribution').isVisible()).toBe(false)
})
describe('click on create new contribution', () => {
beforeEach(async () => {
await wrapper.find('[data-test="new-contribution-link-button"]').trigger('click')
})
it('emits toggle::collapse new Contribution', async () => {
await expect(wrapper.vm.$root.$emit('bv::toggle::collapse', 'newContribution')).toBeTruthy()
it('shows the contribution form', () => {
expect(wrapper.find('#newContribution').isVisible()).toBe(true)
})
describe('click on create new contribution again', () => {
beforeEach(async () => {
await wrapper.find('[data-test="new-contribution-link-button"]').trigger('click')
})
it('closes the contribution form', () => {
expect(wrapper.find('#newContribution').isVisible()).toBe(false)
})
})
describe('click on close button', () => {
beforeEach(async () => {
await wrapper.find('button.btn-secondary').trigger('click')
})
it('closes the contribution form', () => {
expect(wrapper.find('#newContribution').isVisible()).toBe(false)
})
})
})
describe('function closeContributionForm', () => {
describe('edit contribution link', () => {
beforeEach(async () => {
await wrapper.setData({ visible: true })
wrapper.vm.closeContributionForm()
await wrapper
.find('div.contribution-link-list')
.find('tbody')
.findAll('tr')
.at(0)
.findAll('button')
.at(1)
.trigger('click')
})
it('emits toggle::collapse close Contribution-Form ', async () => {
await expect(wrapper.vm.$root.$emit('bv::toggle::collapse', 'newContribution')).toBeTruthy()
it('shows the contribution form', () => {
expect(wrapper.find('#newContribution').isVisible()).toBe(true)
})
it('editContributionLink is false', async () => {
await expect(wrapper.vm.editContributionLink).toBe(false)
it('does not show the new contribution button', () => {
expect(wrapper.find('[data-test="new-contribution-link-button"]').exists()).toBe(false)
})
it('contributionLinkData is empty', async () => {
await expect(wrapper.vm.contributionLinkData).toEqual({})
describe('click on close button', () => {
beforeEach(async () => {
await wrapper.find('button.btn-secondary').trigger('click')
})
it('closes the contribution form', () => {
expect(wrapper.find('#newContribution').isVisible()).toBe(false)
})
})
})
})

View File

@ -10,8 +10,9 @@
>
<b-button
v-if="!editContributionLink"
v-b-toggle.newContribution
@click="visible = !visible"
class="my-3 d-flex justify-content-left"
data-test="new-contribution-link-button"
>
{{ $t('math.plus') }} {{ $t('contributionLink.newContributionLink') }}
</b-button>

View File

@ -70,8 +70,6 @@ export default {
formatter: (value, key, item) => {
if (value) {
return this.$d(new Date(value))
} else {
return null
}
},
},
@ -81,8 +79,6 @@ export default {
formatter: (value, key, item) => {
if (value) {
return this.$d(new Date(value))
} else {
return null
}
},
},

View File

@ -68,13 +68,23 @@ describe('NavBar', () => {
})
describe('wallet', () => {
const assignLocationSpy = jest.fn()
const windowLocationMock = jest.fn()
const windowLocation = window.location
beforeEach(async () => {
delete window.location
window.location = {
assign: windowLocationMock,
}
await wrapper.findAll('.nav-item').at(5).find('a').trigger('click')
})
afterEach(() => {
delete window.location
window.location = windowLocation
})
it.skip('changes window location to wallet', () => {
expect(assignLocationSpy).toBeCalledWith('valid-token')
expect(windowLocationMock()).toBe('valid-token')
})
it('dispatches logout to store', () => {
@ -84,6 +94,7 @@ describe('NavBar', () => {
describe('logout', () => {
const windowLocationMock = jest.fn()
const windowLocation = window.location
beforeEach(async () => {
delete window.location
window.location = {
@ -92,6 +103,11 @@ describe('NavBar', () => {
await wrapper.findAll('.nav-item').at(5).find('a').trigger('click')
})
afterEach(() => {
delete window.location
window.location = windowLocation
})
it('redirects to /logout', () => {
expect(windowLocationMock).toBeCalledWith('http://localhost/login')
})

View File

@ -10,12 +10,11 @@
<b-collapse id="nav-collapse" is-nav>
<b-navbar-nav>
<b-nav-item to="/user">{{ $t('navbar.user_search') }}</b-nav-item>
<b-nav-item
v-show="$store.state.openCreations > 0"
class="bg-color-creation p-1"
to="/creation-confirm"
>
{{ $store.state.openCreations }} {{ $t('navbar.open_creation') }}
<b-nav-item class="bg-color-creation p-1" to="/creation-confirm">
{{ $t('creation') }}
<b-badge v-show="$store.state.openCreations > 0" variant="danger">
{{ $store.state.openCreations }}
</b-badge>
</b-nav-item>
<b-nav-item to="/contribution-links">
{{ $t('navbar.automaticContributions') }}
@ -55,7 +54,4 @@ export default {
height: 2rem;
padding-left: 10px;
}
.bg-color-creation {
background-color: #cf1010dc;
}
</style>

View File

@ -13,7 +13,8 @@
<b-row>
<b-col class="col-3">{{ $t('creation_for_month') }}</b-col>
<b-col class="h3">
{{ $d(new Date(item.date), 'month') }} {{ $d(new Date(item.date), 'year') }}
{{ $d(new Date(item.contributionDate), 'month') }}
{{ $d(new Date(item.contributionDate), 'year') }}
</b-col>
</b-row>
<b-row>

View File

@ -1,6 +1,17 @@
<template>
<div class="open-creations-table">
<b-table-lite :items="items" :fields="fields" caption-top striped hover stacked="md">
<b-table-lite
:items="items"
:fields="fields"
caption-top
striped
hover
stacked="md"
:tbody-tr-class="rowClass"
>
<template #cell(state)="row">
<b-icon :icon="getStatusIcon(row.item.state)"></b-icon>
</template>
<template #cell(bookmark)="row">
<b-button
variant="danger"
@ -37,6 +48,16 @@
</b-button>
</div>
</template>
<template #cell(reActive)>
<b-button variant="warning" size="md" class="mr-2">
<b-icon icon="arrow-up" variant="light"></b-icon>
</b-button>
</template>
<template #cell(chatCreation)="row">
<b-button v-if="row.item.messagesCount > 0" @click="rowToggleDetails(row, 0)">
<b-icon icon="chat-dots"></b-icon>
</b-button>
</template>
<template #cell(deny)="row">
<div v-if="$store.state.moderator.id !== row.item.userId">
<b-button
@ -100,6 +121,14 @@ import RowDetails from '../RowDetails.vue'
import EditCreationFormular from '../EditCreationFormular.vue'
import ContributionMessagesList from '../ContributionMessages/ContributionMessagesList.vue'
const iconMap = {
IN_PROGRESS: 'question-square',
PENDING: 'bell-fill',
CONFIRMED: 'check',
DELETED: 'trash',
DENIED: 'x-circle',
}
export default {
name: 'OpenCreationsTable',
mixins: [toggleRowDetails],
@ -129,6 +158,14 @@ export default {
}
},
methods: {
getStatusIcon(status) {
return iconMap[status] ? iconMap[status] : 'default-icon'
},
rowClass(item, type) {
if (!item || type !== 'row') return
if (item.state === 'CONFIRMED') return 'table-success'
if (item.state === 'DENIED') return 'table-info'
},
updateCreationData(data) {
const row = data.row
this.$emit('update-contributions', data)

View File

@ -0,0 +1,34 @@
import gql from 'graphql-tag'
export const listAllContributions = gql`
query (
$currentPage: Int = 1
$pageSize: Int = 25
$order: Order = DESC
$statusFilter: [ContributionStatus!]
) {
listAllContributions(
currentPage: $currentPage
pageSize: $pageSize
order: $order
statusFilter: $statusFilter
) {
contributionCount
contributionList {
id
firstName
lastName
amount
memo
createdAt
contributionDate
confirmedAt
confirmedBy
state
messagesCount
deniedAt
deniedBy
}
}
}
`

View File

@ -1,20 +0,0 @@
import gql from 'graphql-tag'
export const listUnconfirmedContributions = gql`
query {
listUnconfirmedContributions {
id
firstName
lastName
userId
email
amount
memo
date
moderator
creation
state
messageCount
}
}
`

View File

@ -1,6 +1,7 @@
{
"all_emails": "Alle Nutzer",
"back": "zurück",
"chat": "Chat",
"contributionLink": {
"amount": "Betrag",
"changeSaved": "Änderungen gespeichert",
@ -29,6 +30,15 @@
"validFrom": "Startdatum",
"validTo": "Enddatum"
},
"contributions": {
"all": "Alle",
"confirms": "Bestätigt",
"deleted": "Gelöscht",
"denied": "Abgelehnt",
"open": "Offen"
},
"created": "Geschöpft",
"createdAt": "Angelegt",
"creation": "Schöpfung",
"creationList": "Schöpfungsliste",
"creation_form": {
@ -48,7 +58,6 @@
"update_creation": "Schöpfung aktualisieren"
},
"creation_for_month": "Schöpfung für Monat",
"date": "Datum",
"delete": "Löschen",
"deleted": "gelöscht",
"deleted_user": "Alle gelöschten Nutzer",
@ -92,13 +101,13 @@
"message": {
"request": "Die Anfrage wurde gesendet."
},
"mod": "Mod",
"moderator": "Moderator",
"name": "Name",
"navbar": {
"automaticContributions": "Automatische Beiträge",
"logout": "Abmelden",
"my-account": "Mein Konto",
"open_creation": "Offene Schöpfungen",
"statistic": "Statistik",
"user_search": "Nutzersuche"
},

View File

@ -1,6 +1,7 @@
{
"all_emails": "All users",
"back": "back",
"chat": "Chat",
"contributionLink": {
"amount": "Amount",
"changeSaved": "Changes saved",
@ -29,6 +30,15 @@
"validFrom": "Start-date",
"validTo": "End-Date"
},
"contributions": {
"all": "All",
"confirms": "Confirmed",
"deleted": "Deleted",
"denied": "Denied",
"open": "Open"
},
"created": "Confirmed",
"createdAt": "Created",
"creation": "Creation",
"creationList": "Creation list",
"creation_form": {
@ -48,7 +58,6 @@
"update_creation": "Creation update"
},
"creation_for_month": "Creation for month",
"date": "Date",
"delete": "Delete",
"deleted": "deleted",
"deleted_user": "All deleted user",
@ -92,13 +101,13 @@
"message": {
"request": "Request has been sent."
},
"mod": "Mod",
"moderator": "Moderator",
"name": "Name",
"navbar": {
"automaticContributions": "Automatic Contributions",
"logout": "Logout",
"my-account": "My Account",
"open_creation": "Open creations",
"statistic": "Statistic",
"user_search": "User search"
},

View File

@ -5,7 +5,7 @@ import { toastErrorSpy } from '../../test/testSetup'
const localVue = global.localVue
const apolloQueryMock = jest.fn().mockResolvedValueOnce({
const apolloQueryMock = jest.fn().mockResolvedValue({
data: {
listContributionLinks: {
links: [
@ -47,6 +47,7 @@ describe('ContributionLinks', () => {
beforeEach(() => {
wrapper = Wrapper()
})
describe('apollo returns', () => {
it('calls listContributionLinks', () => {
expect(apolloQueryMock).toBeCalledWith(
@ -57,7 +58,7 @@ describe('ContributionLinks', () => {
})
})
describe.skip('query transaction with error', () => {
describe('query transaction with error', () => {
beforeEach(() => {
apolloQueryMock.mockRejectedValue({ message: 'OUCH!' })
wrapper = Wrapper()

View File

@ -2,7 +2,7 @@ import { mount } from '@vue/test-utils'
import CreationConfirm from './CreationConfirm.vue'
import { adminDeleteContribution } from '../graphql/adminDeleteContribution'
import { denyContribution } from '../graphql/denyContribution'
import { listUnconfirmedContributions } from '../graphql/listUnconfirmedContributions'
import { listAllContributions } from '../graphql/listAllContributions'
import { confirmContribution } from '../graphql/confirmContribution'
import { toastErrorSpy, toastSuccessSpy } from '../../test/testSetup'
import VueApollo from 'vue-apollo'
@ -38,50 +38,68 @@ const mocks = {
const defaultData = () => {
return {
listUnconfirmedContributions: [
{
id: 1,
firstName: 'Bibi',
lastName: 'Bloxberg',
userId: 99,
email: 'bibi@bloxberg.de',
amount: 500,
memo: 'Danke für alles',
date: new Date(),
moderator: 1,
state: 'PENDING',
creation: [500, 500, 500],
messageCount: 0,
},
{
id: 2,
firstName: 'Räuber',
lastName: 'Hotzenplotz',
userId: 100,
email: 'raeuber@hotzenplotz.de',
amount: 1000000,
memo: 'Gut Ergattert',
date: new Date(),
moderator: 1,
state: 'PENDING',
creation: [500, 500, 500],
messageCount: 0,
},
],
listAllContributions: {
contributionCount: 2,
contributionList: [
{
id: 1,
firstName: 'Bibi',
lastName: 'Bloxberg',
userId: 99,
email: 'bibi@bloxberg.de',
amount: 500,
memo: 'Danke für alles',
date: new Date(),
moderator: 1,
state: 'PENDING',
creation: [500, 500, 500],
messagesCount: 0,
deniedBy: null,
deniedAt: null,
confirmedBy: null,
confirmedAt: null,
contributionDate: new Date(),
deletedBy: null,
deletedAt: null,
createdAt: new Date(),
},
{
id: 2,
firstName: 'Räuber',
lastName: 'Hotzenplotz',
userId: 100,
email: 'raeuber@hotzenplotz.de',
amount: 1000000,
memo: 'Gut Ergattert',
date: new Date(),
moderator: 1,
state: 'PENDING',
creation: [500, 500, 500],
messagesCount: 0,
deniedBy: null,
deniedAt: null,
confirmedBy: null,
confirmedAt: null,
contributionDate: new Date(),
deletedBy: null,
deletedAt: null,
createdAt: new Date(),
},
],
},
}
}
describe('CreationConfirm', () => {
let wrapper
const listUnconfirmedContributionsMock = jest.fn()
const adminDeleteContributionMock = jest.fn()
const adminDenyContributionMock = jest.fn()
const confirmContributionMock = jest.fn()
mockClient.setRequestHandler(
listUnconfirmedContributions,
listUnconfirmedContributionsMock
listAllContributions,
jest
.fn()
.mockRejectedValueOnce({ message: 'Ouch!' })
.mockResolvedValue({ data: defaultData() }),
)
@ -117,6 +135,10 @@ describe('CreationConfirm', () => {
it('toast an error message', () => {
expect(toastErrorSpy).toBeCalledWith('Ouch!')
})
it('has statusFilter ["IN_PROGRESS", "PENDING"]', () => {
expect(wrapper.vm.statusFilter).toEqual(['IN_PROGRESS', 'PENDING'])
})
})
describe('server response is succes', () => {
@ -125,17 +147,7 @@ describe('CreationConfirm', () => {
})
it('has two pending creations', () => {
expect(wrapper.vm.pendingCreations).toHaveLength(2)
})
})
describe('store', () => {
it('commits resetOpenCreations to store', () => {
expect(storeCommitMock).toBeCalledWith('resetOpenCreations')
})
it('commits setOpenCreations to store', () => {
expect(storeCommitMock).toBeCalledWith('setOpenCreations', 2)
expect(wrapper.find('tbody').findAll('tr')).toHaveLength(2)
})
})
@ -316,5 +328,94 @@ describe('CreationConfirm', () => {
})
})
})
describe('filter tabs', () => {
describe('click tab "confirmed"', () => {
let refetchSpy
beforeEach(async () => {
jest.clearAllMocks()
refetchSpy = jest.spyOn(wrapper.vm.$apollo.queries.ListAllContributions, 'refetch')
await wrapper.find('a[data-test="confirmed"]').trigger('click')
})
it('has statusFilter set to ["CONFIRMED"]', () => {
expect(
wrapper.vm.$apollo.queries.ListAllContributions.observer.options.variables,
).toMatchObject({ statusFilter: ['CONFIRMED'] })
})
it('refetches contributions', () => {
expect(refetchSpy).toBeCalled()
})
describe('click tab "open"', () => {
beforeEach(async () => {
jest.clearAllMocks()
refetchSpy = jest.spyOn(wrapper.vm.$apollo.queries.ListAllContributions, 'refetch')
await wrapper.find('a[data-test="open"]').trigger('click')
})
it('has statusFilter set to ["IN_PROGRESS", "PENDING"]', () => {
expect(
wrapper.vm.$apollo.queries.ListAllContributions.observer.options.variables,
).toMatchObject({ statusFilter: ['IN_PROGRESS', 'PENDING'] })
})
it('refetches contributions', () => {
expect(refetchSpy).toBeCalled()
})
})
describe('click tab "denied"', () => {
beforeEach(async () => {
jest.clearAllMocks()
refetchSpy = jest.spyOn(wrapper.vm.$apollo.queries.ListAllContributions, 'refetch')
await wrapper.find('a[data-test="denied"]').trigger('click')
})
it('has statusFilter set to ["DENIED"]', () => {
expect(
wrapper.vm.$apollo.queries.ListAllContributions.observer.options.variables,
).toMatchObject({ statusFilter: ['DENIED'] })
})
it('refetches contributions', () => {
expect(refetchSpy).toBeCalled()
})
})
describe('click tab "all"', () => {
beforeEach(async () => {
jest.clearAllMocks()
refetchSpy = jest.spyOn(wrapper.vm.$apollo.queries.ListAllContributions, 'refetch')
await wrapper.find('a[data-test="all"]').trigger('click')
})
it('has statusFilter set to ["IN_PROGRESS", "PENDING", "CONFIRMED", "DENIED", "DELETED"]', () => {
expect(
wrapper.vm.$apollo.queries.ListAllContributions.observer.options.variables,
).toMatchObject({
statusFilter: ['IN_PROGRESS', 'PENDING', 'CONFIRMED', 'DENIED', 'DELETED'],
})
})
it('refetches contributions', () => {
expect(refetchSpy).toBeCalled()
})
})
})
})
describe('update status', () => {
beforeEach(async () => {
await wrapper.findComponent({ name: 'OpenCreationsTable' }).vm.$emit('update-state', 2)
})
it.skip('updates the status', () => {
expect(wrapper.vm.items.find((obj) => obj.id === 2).messagesCount).toBe(1)
expect(wrapper.vm.items.find((obj) => obj.id === 2).state).toBe('IN_PROGRESS')
})
})
})
})

View File

@ -1,6 +1,50 @@
<!-- eslint-disable @intlify/vue-i18n/no-dynamic-keys -->
<template>
<div class="creation-confirm">
<div>
<b-tabs v-model="tabIndex" content-class="mt-3" fill>
<b-tab active :title-link-attributes="{ 'data-test': 'open' }">
<template #title>
{{ $t('contributions.open') }}
<b-badge v-if="$store.state.openCreations > 0" variant="danger">
{{ $store.state.openCreations }}
</b-badge>
</template>
</b-tab>
<b-tab
:title="$t('contributions.confirms')"
:title-link-attributes="{ 'data-test': 'confirmed' }"
/>
<b-tab
:title="$t('contributions.denied')"
:title-link-attributes="{ 'data-test': 'denied' }"
/>
<b-tab
:title="$t('contributions.deleted')"
:title-link-attributes="{ 'data-test': 'deleted' }"
/>
<b-tab :title="$t('contributions.all')" :title-link-attributes="{ 'data-test': 'all' }" />
</b-tabs>
</div>
<open-creations-table
class="mt-4"
:items="items"
:fields="fields"
@show-overlay="showOverlay"
@update-state="updateStatus"
@update-contributions="$apollo.queries.AllContributions.refetch()"
/>
<b-pagination
pills
size="lg"
v-model="currentPage"
:per-page="pageSize"
:total-rows="rows"
align="center"
:hide-ellipsis="true"
></b-pagination>
<div v-if="overlay" id="overlay" @dblclick="overlay = false">
<overlay :item="item" @overlay-cancel="overlay = false">
<template #title>
@ -24,24 +68,24 @@
</template>
</overlay>
</div>
<open-creations-table
class="mt-4"
:items="pendingCreations"
:fields="fields"
@show-overlay="showOverlay"
@update-state="updateState"
@update-contributions="$apollo.queries.PendingContributions.refetch()"
/>
</div>
</template>
<script>
import Overlay from '../components/Overlay.vue'
import OpenCreationsTable from '../components/Tables/OpenCreationsTable.vue'
import { listUnconfirmedContributions } from '../graphql/listUnconfirmedContributions'
import { listAllContributions } from '../graphql/listAllContributions'
import { adminDeleteContribution } from '../graphql/adminDeleteContribution'
import { confirmContribution } from '../graphql/confirmContribution'
import { denyContribution } from '../graphql/denyContribution'
const FILTER_TAB_MAP = [
['IN_PROGRESS', 'PENDING'],
['CONFIRMED'],
['DENIED'],
['DELETED'],
['IN_PROGRESS', 'PENDING', 'CONFIRMED', 'DENIED', 'DELETED'],
]
export default {
name: 'CreationConfirm',
components: {
@ -50,10 +94,14 @@ export default {
},
data() {
return {
pendingCreations: [],
tabIndex: 0,
items: [],
overlay: false,
item: {},
variant: 'confirm',
rows: 0,
currentPage: 1,
pageSize: 25,
}
},
methods: {
@ -112,7 +160,7 @@ export default {
})
},
updatePendingCreations(id) {
this.pendingCreations = this.pendingCreations.filter((obj) => obj.id !== id)
this.items = this.items.filter((obj) => obj.id !== id)
this.$store.commit('openCreationsMinus', 1)
},
showOverlay(item, variant) {
@ -120,38 +168,155 @@ export default {
this.item = item
this.variant = variant
},
updateState(id) {
this.pendingCreations.find((obj) => obj.id === id).messagesCount++
this.pendingCreations.find((obj) => obj.id === id).state = 'IN_PROGRESS'
updateStatus(id) {
this.items.find((obj) => obj.id === id).messagesCount++
this.items.find((obj) => obj.id === id).state = 'IN_PROGRESS'
},
},
watch: {
statusFilter() {
this.$apollo.queries.ListAllContributions.refetch()
},
},
computed: {
fields() {
return [
{ key: 'bookmark', label: this.$t('delete') },
{ key: 'deny', label: this.$t('deny') },
{ key: 'email', label: this.$t('e_mail') },
{ key: 'firstName', label: this.$t('firstname') },
{ key: 'lastName', label: this.$t('lastname') },
{
key: 'amount',
label: this.$t('creation'),
formatter: (value) => {
return value + ' GDD'
[
{ key: 'bookmark', label: this.$t('delete') },
{ key: 'deny', label: this.$t('deny') },
{ key: 'email', label: this.$t('e_mail') },
{ key: 'firstName', label: this.$t('firstname') },
{ key: 'lastName', label: this.$t('lastname') },
{
key: 'amount',
label: this.$t('creation'),
formatter: (value) => {
return value + ' GDD'
},
},
},
{ key: 'memo', label: this.$t('text'), class: 'text-break' },
{
key: 'date',
label: this.$t('date'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
{ key: 'memo', label: this.$t('text'), class: 'text-break' },
{
key: 'contributionDate',
label: this.$t('created'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
},
{ key: 'moderator', label: this.$t('moderator') },
{ key: 'editCreation', label: this.$t('edit') },
{ key: 'confirm', label: this.$t('save') },
]
{ key: 'moderator', label: this.$t('moderator') },
{ key: 'editCreation', label: this.$t('edit') },
{ key: 'confirm', label: this.$t('save') },
],
[
{ key: 'firstName', label: this.$t('firstname') },
{ key: 'lastName', label: this.$t('lastname') },
{
key: 'amount',
label: this.$t('creation'),
formatter: (value) => {
return value + ' GDD'
},
},
{ key: 'memo', label: this.$t('text'), class: 'text-break' },
{
key: 'contributionDate',
label: this.$t('created'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
{
key: 'createdAt',
label: this.$t('createdAt'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
{
key: 'confirmedAt',
label: this.$t('contributions.confirms'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
{ key: 'chatCreation', label: this.$t('chat') },
],
[
{ key: 'reActive', label: 'reActive' },
{ key: 'firstName', label: this.$t('firstname') },
{ key: 'lastName', label: this.$t('lastname') },
{
key: 'amount',
label: this.$t('creation'),
formatter: (value) => {
return value + ' GDD'
},
},
{ key: 'memo', label: this.$t('text'), class: 'text-break' },
{
key: 'contributionDate',
label: this.$t('created'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
{
key: 'createdAt',
label: this.$t('createdAt'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
{
key: 'deniedAt',
label: this.$t('contributions.denied'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
{ key: 'deniedBy', label: this.$t('mod') },
{ key: 'chatCreation', label: this.$t('chat') },
],
[],
[
{ key: 'state', label: 'state' },
{ key: 'firstName', label: this.$t('firstname') },
{ key: 'lastName', label: this.$t('lastname') },
{
key: 'amount',
label: this.$t('creation'),
formatter: (value) => {
return value + ' GDD'
},
},
{ key: 'memo', label: this.$t('text'), class: 'text-break' },
{
key: 'contributionDate',
label: this.$t('created'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
{
key: 'createdAt',
label: this.$t('createdAt'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
{
key: 'confirmedAt',
label: this.$t('contributions.confirms'),
formatter: (value) => {
return this.$d(new Date(value), 'short')
},
},
{ key: 'confirmedBy', label: this.$t('mod') },
{ key: 'chatCreation', label: this.$t('chat') },
],
][this.tabIndex]
},
statusFilter() {
return FILTER_TAB_MAP[this.tabIndex]
},
overlayTitle() {
return `overlay.${this.variant}.title`
@ -182,18 +347,21 @@ export default {
},
},
apollo: {
PendingContributions: {
ListAllContributions: {
query() {
return listUnconfirmedContributions
return listAllContributions
},
variables() {
// may be at some point we need a pagination here
return {}
return {
currentPage: this.currentPage,
pageSize: this.pageSize,
statusFilter: this.statusFilter,
}
},
update({ listUnconfirmedContributions }) {
this.$store.commit('resetOpenCreations')
this.pendingCreations = listUnconfirmedContributions
this.$store.commit('setOpenCreations', listUnconfirmedContributions.length)
update({ listAllContributions }) {
this.rows = listAllContributions.contributionCount
this.items = listAllContributions.contributionList
},
error({ message }) {
this.toastError(message)

View File

@ -1,41 +1,18 @@
import { mount } from '@vue/test-utils'
import Overview from './Overview.vue'
import { listUnconfirmedContributions } from '@/graphql/listUnconfirmedContributions.js'
import { listAllContributions } from '../graphql/listAllContributions'
import VueApollo from 'vue-apollo'
import { createMockClient } from 'mock-apollo-client'
import { toastErrorSpy } from '../../test/testSetup'
const mockClient = createMockClient()
const apolloProvider = new VueApollo({
defaultClient: mockClient,
})
const localVue = global.localVue
const apolloQueryMock = jest
.fn()
.mockResolvedValueOnce({
data: {
listUnconfirmedContributions: [
{
pending: true,
},
{
pending: true,
},
{
pending: true,
},
],
},
})
.mockResolvedValue({
data: {
listUnconfirmedContributions: [
{
pending: true,
},
{
pending: true,
},
{
pending: true,
},
],
},
})
localVue.use(VueApollo)
const storeCommitMock = jest.fn()
@ -43,44 +20,114 @@ const mocks = {
$t: jest.fn((t) => t),
$n: jest.fn((n) => n),
$d: jest.fn((d) => d),
$apollo: {
query: apolloQueryMock,
},
$store: {
commit: storeCommitMock,
state: {
openCreations: 2,
openCreations: 1,
},
},
}
const defaultData = () => {
return {
listAllContributions: {
contributionCount: 2,
contributionList: [
{
id: 1,
firstName: 'Bibi',
lastName: 'Bloxberg',
userId: 99,
email: 'bibi@bloxberg.de',
amount: 500,
memo: 'Danke für alles',
date: new Date(),
moderator: 1,
state: 'PENDING',
creation: [500, 500, 500],
messagesCount: 0,
deniedBy: null,
deniedAt: null,
confirmedBy: null,
confirmedAt: null,
contributionDate: new Date(),
deletedBy: null,
deletedAt: null,
createdAt: new Date(),
},
{
id: 2,
firstName: 'Räuber',
lastName: 'Hotzenplotz',
userId: 100,
email: 'raeuber@hotzenplotz.de',
amount: 1000000,
memo: 'Gut Ergattert',
date: new Date(),
moderator: 1,
state: 'PENDING',
creation: [500, 500, 500],
messagesCount: 0,
deniedBy: null,
deniedAt: null,
confirmedBy: null,
confirmedAt: null,
contributionDate: new Date(),
deletedBy: null,
deletedAt: null,
createdAt: new Date(),
},
],
},
}
}
describe('Overview', () => {
let wrapper
const listAllContributionsMock = jest.fn()
mockClient.setRequestHandler(
listAllContributions,
listAllContributionsMock
.mockRejectedValueOnce({ message: 'Ouch!' })
.mockResolvedValue({ data: defaultData() }),
)
const Wrapper = () => {
return mount(Overview, { localVue, mocks })
return mount(Overview, { localVue, mocks, apolloProvider })
}
describe('mount', () => {
beforeEach(() => {
jest.clearAllMocks()
wrapper = Wrapper()
})
it('calls listUnconfirmedContributions', () => {
expect(apolloQueryMock).toBeCalledWith(
expect.objectContaining({
query: listUnconfirmedContributions,
}),
)
describe('server response for get pending creations is error', () => {
it('toast an error message', () => {
expect(toastErrorSpy).toBeCalledWith('Ouch!')
})
})
it('calls the listAllContributions query', () => {
expect(listAllContributionsMock).toBeCalledWith({
currentPage: 1,
order: 'DESC',
pageSize: 25,
statusFilter: ['IN_PROGRESS', 'PENDING'],
})
})
it('commits three pending creations to store', () => {
expect(storeCommitMock).toBeCalledWith('setOpenCreations', 3)
expect(storeCommitMock).toBeCalledWith('setOpenCreations', 2)
})
describe('with open creations', () => {
it('renders a link to confirm creations', () => {
expect(wrapper.find('a[href="creation-confirm"]').text()).toContain('2')
beforeEach(() => {
mocks.$store.state.openCreations = 2
})
it('renders a link to confirm 2 creations', () => {
expect(wrapper.find('[data-test="open-creation"]').text()).toContain('2')
expect(wrapper.find('a[href="creation-confirm"]').exists()).toBeTruthy()
})
})
@ -91,7 +138,7 @@ describe('Overview', () => {
})
it('renders a link to confirm creations', () => {
expect(wrapper.find('a[href="creation-confirm"]').text()).toContain('0')
expect(wrapper.find('[data-test="open-creation"]').text()).toContain('0')
expect(wrapper.find('a[href="creation-confirm"]').exists()).toBeTruthy()
})
})

View File

@ -24,31 +24,40 @@
>
<b-card-text>
<b-link to="creation-confirm">
<h1>{{ $store.state.openCreations }}</h1>
<h1 data-test="open-creation">{{ $store.state.openCreations }}</h1>
</b-link>
</b-card-text>
</b-card>
</div>
</template>
<script>
import { listUnconfirmedContributions } from '@/graphql/listUnconfirmedContributions.js'
import { listAllContributions } from '../graphql/listAllContributions'
export default {
name: 'overview',
methods: {
getPendingCreations() {
this.$apollo
.query({
query: listUnconfirmedContributions,
fetchPolicy: 'network-only',
})
.then((result) => {
this.$store.commit('setOpenCreations', result.data.listUnconfirmedContributions.length)
})
},
data() {
return {
statusFilter: ['IN_PROGRESS', 'PENDING'],
}
},
created() {
this.getPendingCreations()
apollo: {
AllContributions: {
query() {
return listAllContributions
},
variables() {
// may be at some point we need a pagination here
return {
statusFilter: this.statusFilter,
}
},
update({ listAllContributions }) {
this.$store.commit('setOpenCreations', listAllContributions.contributionCount)
},
error({ message }) {
this.toastError(message)
},
},
},
}
</script>

View File

@ -58,8 +58,4 @@ WEBHOOK_ELOPAGE_SECRET=secret
# LOG_LEVEL=info
# Federation
# if you set the value of FEDERATION_DHT_TOPIC, the DHT hyperswarm will start to announce and listen
# on an hash created from this topic
# FEDERATION_DHT_TOPIC=GRADIDO_HUB
# FEDERATION_DHT_SEED=64ebcb0e3ad547848fef4197c6e2332f
# FEDERATION_COMMUNITY_URL=http://localhost:4000/api
FEDERATION_VALIDATE_COMMUNITY_TIMER=60000

View File

@ -55,6 +55,4 @@ EMAIL_CODE_REQUEST_TIME=$EMAIL_CODE_REQUEST_TIME
WEBHOOK_ELOPAGE_SECRET=$WEBHOOK_ELOPAGE_SECRET
# Federation
FEDERATION_DHT_TOPIC=$FEDERATION_DHT_TOPIC
FEDERATION_DHT_SEED=$FEDERATION_DHT_SEED
FEDERATION_COMMUNITY_URL=$FEDERATION_COMMUNITY_URL
FEDERATION_VALIDATE_COMMUNITY_TIMER=$FEDERATION_VALIDATE_COMMUNITY_TIMER

9
backend/.env.test_e2e Normal file
View File

@ -0,0 +1,9 @@
# Server
JWT_EXPIRES_IN=1m
# Email
EMAIL=true
EMAIL_TEST_MODUS=false
EMAIL_TLS=false
# for testing password reset
EMAIL_CODE_REQUEST_TIME=1

View File

@ -19,7 +19,6 @@
"locales": "scripts/sort.sh"
},
"dependencies": {
"@hyperswarm/dht": "^6.2.0",
"apollo-server-express": "^2.25.2",
"await-semaphore": "^0.1.3",
"axios": "^0.21.1",
@ -31,6 +30,7 @@
"email-templates": "^10.0.1",
"express": "^4.17.1",
"graphql": "^15.5.1",
"graphql-request": "5.0.0",
"i18n": "^0.15.1",
"jsonwebtoken": "^8.5.1",
"lodash.clonedeep": "^4.5.0",

View File

@ -10,7 +10,7 @@ Decimal.set({
})
const constants = {
DB_VERSION: '0059-add_hide_amount_to_users',
DB_VERSION: '0060-update_communities_table',
DECAY_START_TIME: new Date('2021-05-13 17:46:31-0000'), // GMT+0
LOG4JS_CONFIG: 'log4js-config.json',
// default log level on production should be info
@ -115,14 +115,8 @@ if (
}
const federation = {
FEDERATION_DHT_TOPIC: process.env.FEDERATION_DHT_TOPIC || null,
FEDERATION_DHT_SEED: process.env.FEDERATION_DHT_SEED || null,
FEDERATION_COMMUNITY_URL:
process.env.FEDERATION_COMMUNITY_URL === undefined
? null
: process.env.FEDERATION_COMMUNITY_URL.endsWith('/')
? process.env.FEDERATION_COMMUNITY_URL
: process.env.FEDERATION_COMMUNITY_URL + '/',
FEDERATION_VALIDATE_COMMUNITY_TIMER:
Number(process.env.FEDERATION_VALIDATE_COMMUNITY_TIMER) || 60000,
}
const CONFIG = {

View File

@ -0,0 +1,34 @@
import { gql } from 'graphql-request'
import { backendLogger as logger } from '@/server/logger'
import { Community as DbCommunity } from '@entity/Community'
import { GraphQLGetClient } from '../GraphQLGetClient'
import LogError from '@/server/LogError'
export async function requestGetPublicKey(dbCom: DbCommunity): Promise<string | undefined> {
let endpoint = dbCom.endPoint.endsWith('/') ? dbCom.endPoint : dbCom.endPoint + '/'
endpoint = `${endpoint}${dbCom.apiVersion}/`
logger.info(`requestGetPublicKey with endpoint='${endpoint}'...`)
const graphQLClient = GraphQLGetClient.getInstance(endpoint)
logger.debug(`graphQLClient=${JSON.stringify(graphQLClient)}`)
const query = gql`
query {
getPublicKey {
publicKey
}
}
`
try {
const { data, errors, extensions, headers, status } = await graphQLClient.rawRequest(query)
logger.debug(`Response-Data:`, data, errors, extensions, headers, status)
if (data) {
logger.debug(`Response-PublicKey:`, data.getPublicKey.publicKey)
logger.info(`requestGetPublicKey processed successfully`)
return data.getPublicKey.publicKey
}
logger.warn(`requestGetPublicKey processed without response data`)
} catch (err) {
throw new LogError(`Request-Error:`, err)
}
}

View File

@ -0,0 +1,34 @@
import { gql } from 'graphql-request'
import { backendLogger as logger } from '@/server/logger'
import { Community as DbCommunity } from '@entity/Community'
import { GraphQLGetClient } from '../GraphQLGetClient'
import LogError from '@/server/LogError'
export async function requestGetPublicKey(dbCom: DbCommunity): Promise<string | undefined> {
let endpoint = dbCom.endPoint.endsWith('/') ? dbCom.endPoint : dbCom.endPoint + '/'
endpoint = `${endpoint}${dbCom.apiVersion}/`
logger.info(`requestGetPublicKey with endpoint='${endpoint}'...`)
const graphQLClient = GraphQLGetClient.getInstance(endpoint)
logger.debug(`graphQLClient=${JSON.stringify(graphQLClient)}`)
const query = gql`
query {
getPublicKey {
publicKey
}
}
`
try {
const { data, errors, extensions, headers, status } = await graphQLClient.rawRequest(query)
logger.debug(`Response-Data:`, data, errors, extensions, headers, status)
if (data) {
logger.debug(`Response-PublicKey:`, data.getPublicKey.publicKey)
logger.info(`requestGetPublicKey processed successfully`)
return data.getPublicKey.publicKey
}
logger.warn(`requestGetPublicKey processed without response data`)
} catch (err) {
throw new LogError(`Request-Error:`, err)
}
}

View File

@ -0,0 +1,35 @@
import { GraphQLClient } from 'graphql-request'
import { PatchedRequestInit } from 'graphql-request/dist/types'
export class GraphQLGetClient extends GraphQLClient {
private static instance: GraphQLGetClient
/**
* The Singleton's constructor should always be private to prevent direct
* construction calls with the `new` operator.
*/
// eslint-disable-next-line no-useless-constructor
private constructor(url: string, options?: PatchedRequestInit) {
super(url, options)
}
/**
* The static method that controls the access to the singleton instance.
*
* This implementation let you subclass the Singleton class while keeping
* just one instance of each subclass around.
*/
public static getInstance(url: string): GraphQLGetClient {
if (!GraphQLGetClient.instance) {
GraphQLGetClient.instance = new GraphQLGetClient(url, {
method: 'GET',
jsonSerializer: {
parse: JSON.parse,
stringify: JSON.stringify,
},
})
}
return GraphQLGetClient.instance
}
}

View File

@ -0,0 +1,4 @@
export enum ApiVersionType {
V1_0 = '1_0',
V1_1 = '1_1',
}

View File

@ -0,0 +1,158 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import { logger } from '@test/testSetup'
import { Community as DbCommunity } from '@entity/Community'
import { testEnvironment, cleanDB } from '@test/helpers'
import { validateCommunities } from './validateCommunities'
let con: any
let testEnv: any
beforeAll(async () => {
testEnv = await testEnvironment(logger)
con = testEnv.con
await cleanDB()
})
afterAll(async () => {
// await cleanDB()
await con.close()
})
describe('validate Communities', () => {
/*
describe('start validation loop', () => {
beforeEach(async () => {
jest.clearAllMocks()
startValidateCommunities(0)
})
it('logs loop started', () => {
expect(logger.info).toBeCalledWith(
`Federation: startValidateCommunities loop with an interval of 0 ms...`,
)
})
})
*/
describe('start validation logic without loop', () => {
beforeEach(async () => {
jest.clearAllMocks()
await validateCommunities()
})
it('logs zero communities found', () => {
expect(logger.debug).toBeCalledWith(`Federation: found 0 dbCommunities`)
})
describe('with one Community of api 1_0', () => {
beforeEach(async () => {
const variables1 = {
publicKey: Buffer.from('11111111111111111111111111111111'),
apiVersion: '1_0',
endPoint: 'http//localhost:5001/api/',
lastAnnouncedAt: new Date(),
}
await DbCommunity.createQueryBuilder()
.insert()
.into(DbCommunity)
.values(variables1)
.orUpdate({
conflict_target: ['id', 'publicKey', 'apiVersion'],
overwrite: ['end_point', 'last_announced_at'],
})
.execute()
jest.clearAllMocks()
await validateCommunities()
})
it('logs one community found', () => {
expect(logger.debug).toBeCalledWith(`Federation: found 1 dbCommunities`)
})
it('logs requestGetPublicKey for community api 1_0 ', () => {
expect(logger.info).toBeCalledWith(
`requestGetPublicKey with endpoint='http//localhost:5001/api/1_0/'...`,
)
})
})
describe('with two Communities of api 1_0 and 1_1', () => {
beforeEach(async () => {
const variables2 = {
publicKey: Buffer.from('11111111111111111111111111111111'),
apiVersion: '1_1',
endPoint: 'http//localhost:5001/api/',
lastAnnouncedAt: new Date(),
}
await DbCommunity.createQueryBuilder()
.insert()
.into(DbCommunity)
.values(variables2)
.orUpdate({
conflict_target: ['id', 'publicKey', 'apiVersion'],
overwrite: ['end_point', 'last_announced_at'],
})
.execute()
jest.clearAllMocks()
await validateCommunities()
})
it('logs two communities found', () => {
expect(logger.debug).toBeCalledWith(`Federation: found 2 dbCommunities`)
})
it('logs requestGetPublicKey for community api 1_0 ', () => {
expect(logger.info).toBeCalledWith(
`requestGetPublicKey with endpoint='http//localhost:5001/api/1_0/'...`,
)
})
it('logs requestGetPublicKey for community api 1_1 ', () => {
expect(logger.info).toBeCalledWith(
`requestGetPublicKey with endpoint='http//localhost:5001/api/1_1/'...`,
)
})
})
describe('with three Communities of api 1_0, 1_1 and 2_0', () => {
let dbCom: DbCommunity
beforeEach(async () => {
const variables3 = {
publicKey: Buffer.from('11111111111111111111111111111111'),
apiVersion: '2_0',
endPoint: 'http//localhost:5001/api/',
lastAnnouncedAt: new Date(),
}
await DbCommunity.createQueryBuilder()
.insert()
.into(DbCommunity)
.values(variables3)
.orUpdate({
conflict_target: ['id', 'publicKey', 'apiVersion'],
overwrite: ['end_point', 'last_announced_at'],
})
.execute()
dbCom = await DbCommunity.findOneOrFail({
where: { publicKey: variables3.publicKey, apiVersion: variables3.apiVersion },
})
jest.clearAllMocks()
await validateCommunities()
})
it('logs three community found', () => {
expect(logger.debug).toBeCalledWith(`Federation: found 3 dbCommunities`)
})
it('logs requestGetPublicKey for community api 1_0 ', () => {
expect(logger.info).toBeCalledWith(
`requestGetPublicKey with endpoint='http//localhost:5001/api/1_0/'...`,
)
})
it('logs requestGetPublicKey for community api 1_1 ', () => {
expect(logger.info).toBeCalledWith(
`requestGetPublicKey with endpoint='http//localhost:5001/api/1_1/'...`,
)
})
it('logs unsupported api for community with api 2_0 ', () => {
expect(logger.warn).toBeCalledWith(
`Federation: dbCom: ${dbCom.id} with unsupported apiVersion=2_0; supported versions=1_0,1_1`,
)
})
})
})
})

View File

@ -0,0 +1,80 @@
import { Community as DbCommunity } from '@entity/Community'
import { IsNull } from '@dbTools/typeorm'
// eslint-disable-next-line camelcase
import { requestGetPublicKey as v1_0_requestGetPublicKey } from './client/1_0/FederationClient'
// eslint-disable-next-line camelcase
import { requestGetPublicKey as v1_1_requestGetPublicKey } from './client/1_1/FederationClient'
import { backendLogger as logger } from '@/server/logger'
import { ApiVersionType } from './enum/apiVersionType'
import LogError from '@/server/LogError'
export async function startValidateCommunities(timerInterval: number): Promise<void> {
logger.info(
`Federation: startValidateCommunities loop with an interval of ${timerInterval} ms...`,
)
// TODO: replace the timer-loop by an event-based communication to verify announced foreign communities
// better to use setTimeout twice than setInterval once -> see https://javascript.info/settimeout-setinterval
setTimeout(function run() {
validateCommunities()
setTimeout(run, timerInterval)
}, timerInterval)
}
export async function validateCommunities(): Promise<void> {
const dbCommunities: DbCommunity[] = await DbCommunity.createQueryBuilder()
.where({ foreign: true, verifiedAt: IsNull() })
.orWhere('verified_at < last_announced_at')
.getMany()
logger.debug(`Federation: found ${dbCommunities.length} dbCommunities`)
dbCommunities.forEach(async function (dbCom) {
logger.debug(`Federation: dbCom: ${JSON.stringify(dbCom)}`)
const apiValueStrings: string[] = Object.values(ApiVersionType)
logger.debug(`suppported ApiVersions=`, apiValueStrings)
if (apiValueStrings.includes(dbCom.apiVersion)) {
logger.debug(
`Federation: validate publicKey for dbCom: ${dbCom.id} with apiVersion=${dbCom.apiVersion}`,
)
try {
const pubKey = await invokeVersionedRequestGetPublicKey(dbCom)
logger.info(
`Federation: received publicKey=${pubKey} from endpoint=${dbCom.endPoint}/${dbCom.apiVersion}`,
)
if (pubKey && pubKey === dbCom.publicKey.toString('hex')) {
logger.info(`Federation: matching publicKey: ${pubKey}`)
DbCommunity.update({ id: dbCom.id }, { verifiedAt: new Date() })
logger.debug(`Federation: updated dbCom: ${JSON.stringify(dbCom)}`)
}
/*
else {
logger.warn(`Federation: received unknown publicKey -> delete dbCom with id=${dbCom.id} `)
DbCommunity.delete({ id: dbCom.id })
}
*/
} catch (err) {
if (!isLogError(err)) {
logger.error(`Error:`, err)
}
}
} else {
logger.warn(
`Federation: dbCom: ${dbCom.id} with unsupported apiVersion=${dbCom.apiVersion}; supported versions=${apiValueStrings}`,
)
}
})
}
function isLogError(err: unknown) {
return err instanceof LogError
}
async function invokeVersionedRequestGetPublicKey(dbCom: DbCommunity): Promise<string | undefined> {
switch (dbCom.apiVersion) {
case ApiVersionType.V1_0:
return v1_0_requestGetPublicKey(dbCom)
case ApiVersionType.V1_1:
return v1_1_requestGetPublicKey(dbCom)
default:
return undefined
}
}

View File

@ -257,17 +257,13 @@ describe('Contribution Links', () => {
}),
).resolves.toEqual(
expect.objectContaining({
errors: [
new GraphQLError('Start-Date is not initialized. A Start-Date must be set!'),
],
errors: [new GraphQLError('A Start-Date must be set')],
}),
)
})
it('logs the error thrown', () => {
expect(logger.error).toBeCalledWith(
'Start-Date is not initialized. A Start-Date must be set!',
)
expect(logger.error).toBeCalledWith('A Start-Date must be set')
})
it('returns an error if missing endDate', async () => {
@ -282,15 +278,13 @@ describe('Contribution Links', () => {
}),
).resolves.toEqual(
expect.objectContaining({
errors: [new GraphQLError('End-Date is not initialized. An End-Date must be set!')],
errors: [new GraphQLError('An End-Date must be set')],
}),
)
})
it('logs the error thrown', () => {
expect(logger.error).toBeCalledWith(
'End-Date is not initialized. An End-Date must be set!',
)
expect(logger.error).toBeCalledWith('An End-Date must be set')
})
it('returns an error if endDate is before startDate', async () => {
@ -307,7 +301,7 @@ describe('Contribution Links', () => {
).resolves.toEqual(
expect.objectContaining({
errors: [
new GraphQLError(`The value of validFrom must before or equals the validTo!`),
new GraphQLError(`The value of validFrom must before or equals the validTo`),
],
}),
)
@ -315,7 +309,7 @@ describe('Contribution Links', () => {
it('logs the error thrown', () => {
expect(logger.error).toBeCalledWith(
`The value of validFrom must before or equals the validTo!`,
`The value of validFrom must before or equals the validTo`,
)
})

File diff suppressed because it is too large Load Diff

View File

@ -181,6 +181,7 @@ export class ContributionResolver {
.select('c')
.from(DbContribution, 'c')
.innerJoinAndSelect('c.user', 'u')
.leftJoinAndSelect('c.messages', 'm')
.where(where)
.orderBy('c.createdAt', order)
.limit(pageSize)

View File

@ -1,3 +1,4 @@
import LogError from '@/server/LogError'
import { backendLogger as logger } from '@/server/logger'
import { getConnection } from '@dbTools/typeorm'
import { Contribution } from '@entity/Contribution'
@ -19,19 +20,14 @@ export const validateContribution = (
const index = getCreationIndex(creationDate.getMonth(), timezoneOffset)
if (index < 0) {
logger.error(
'No information for available creations with the given creationDate=',
creationDate.toString(),
)
throw new Error('No information for available creations for the given date')
throw new LogError('No information for available creations for the given date', creationDate)
}
if (amount.greaterThan(creations[index].toString())) {
logger.error(
`The amount (${amount} GDD) to be created exceeds the amount (${creations[index]} GDD) still available for this month.`,
)
throw new Error(
`The amount (${amount} GDD) to be created exceeds the amount (${creations[index]} GDD) still available for this month.`,
throw new LogError(
'The amount to be created exceeds the amount still available for this month',
amount,
creations[index],
)
}
}
@ -126,19 +122,16 @@ export const isStartEndDateValid = (
endDate: string | null | undefined,
): void => {
if (!startDate) {
logger.error('Start-Date is not initialized. A Start-Date must be set!')
throw new Error('Start-Date is not initialized. A Start-Date must be set!')
throw new LogError('A Start-Date must be set')
}
if (!endDate) {
logger.error('End-Date is not initialized. An End-Date must be set!')
throw new Error('End-Date is not initialized. An End-Date must be set!')
throw new LogError('An End-Date must be set')
}
// check if endDate is before startDate
if (new Date(endDate).getTime() - new Date(startDate).getTime() < 0) {
logger.error(`The value of validFrom must before or equals the validTo!`)
throw new Error(`The value of validFrom must before or equals the validTo!`)
throw new LogError(`The value of validFrom must before or equals the validTo`)
}
}

View File

@ -4,6 +4,7 @@ import createServer from './server/createServer'
// config
import CONFIG from './config'
import { startValidateCommunities } from './federation/validateCommunities'
async function main() {
const { app } = await createServer()
@ -16,6 +17,7 @@ async function main() {
console.log(`GraphIQL available at http://localhost:${CONFIG.PORT}`)
}
})
startValidateCommunities(Number(CONFIG.FEDERATION_VALIDATE_COMMUNITY_TIMER))
}
main().catch((e) => {

View File

@ -16,7 +16,7 @@ export const nMonthsBefore = (date: Date, months = 1): string => {
export const creationFactory = async (
client: ApolloServerTestClient,
creation: CreationInterface,
): Promise<Contribution | void> => {
): Promise<Contribution> => {
const { mutate } = client
await mutate({ mutation: login, variables: { email: creation.email, password: 'Aa12345_' } })
@ -51,6 +51,7 @@ export const creationFactory = async (
await confirmedContribution.save()
}
}
return confirmedContribution
} else {
return contribution
}

View File

@ -272,6 +272,12 @@ export const deleteContribution = gql`
}
`
export const denyContribution = gql`
mutation ($id: Int!) {
denyContribution(id: $id)
}
`
export const createContributionMessage = gql`
mutation ($contributionId: Float!, $message: String!) {
createContributionMessage(contributionId: $contributionId, message: $message) {

View File

@ -166,6 +166,15 @@ export const listContributions = gql`
id
amount
memo
createdAt
contributionDate
confirmedAt
confirmedBy
deletedAt
state
messagesCount
deniedAt
deniedBy
}
}
}

View File

@ -404,6 +404,11 @@
minimatch "^3.0.4"
strip-json-comments "^3.1.1"
"@graphql-typed-document-node/core@^3.1.1":
version "3.1.1"
resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.1.tgz#076d78ce99822258cf813ecc1e7fa460fa74d052"
integrity sha512-NQ17ii0rK1b34VZonlmT2QMJFI70m0TRwbknO/ihlbatXyaktDhN/98vBiUU6kNBPljqGqyIrl2T4nY2RpFANg==
"@hapi/boom@^10.0.0":
version "10.0.0"
resolved "https://registry.yarnpkg.com/@hapi/boom/-/boom-10.0.0.tgz#3624831d0a26b3378423b246f50eacea16e04a08"
@ -430,42 +435,6 @@
resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz#87de7af9c231826fdd68ac7258f77c429e0e5fcf"
integrity sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==
"@hyperswarm/dht@^6.2.0":
version "6.2.0"
resolved "https://registry.yarnpkg.com/@hyperswarm/dht/-/dht-6.2.0.tgz#b2cb1218752b52fabb66f304e73448a108d1effd"
integrity sha512-AeyfRdAkfCz/J3vTC4rdpzEpT7xQ+tls87Zpzw9Py3VGUZD8hMT7pr43OOdkCBNvcln6K/5/Lxhnq5lBkzH3yw==
dependencies:
"@hyperswarm/secret-stream" "^6.0.0"
b4a "^1.3.1"
bogon "^1.0.0"
compact-encoding "^2.4.1"
compact-encoding-net "^1.0.1"
debugging-stream "^2.0.0"
dht-rpc "^6.0.0"
events "^3.3.0"
hypercore-crypto "^3.3.0"
noise-curve-ed "^1.0.2"
noise-handshake "^2.1.0"
record-cache "^1.1.1"
safety-catch "^1.0.1"
sodium-universal "^3.0.4"
udx-native "^1.1.0"
xache "^1.1.0"
"@hyperswarm/secret-stream@^6.0.0":
version "6.0.0"
resolved "https://registry.yarnpkg.com/@hyperswarm/secret-stream/-/secret-stream-6.0.0.tgz#67db820308cc9fed899cb8f5e9f47ae819d5a4e3"
integrity sha512-0xuyJIJDe8JYk4uWUx25qJvWqybdjKU2ZIfP1GTqd7dQxwdR0bpYrQKdLkrn5txWSK4a28ySC2AjH0G3I0gXTA==
dependencies:
b4a "^1.1.0"
hypercore-crypto "^3.3.0"
noise-curve-ed "^1.0.2"
noise-handshake "^2.1.0"
sodium-secretstream "^1.0.0"
sodium-universal "^3.0.4"
streamx "^2.10.2"
timeout-refresh "^2.0.0"
"@istanbuljs/load-nyc-config@^1.0.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced"
@ -1655,11 +1624,6 @@ axios@^0.21.1:
dependencies:
follow-redirects "^1.14.0"
b4a@^1.0.1, b4a@^1.1.0, b4a@^1.1.1, b4a@^1.3.0, b4a@^1.3.1, b4a@^1.5.0:
version "1.5.3"
resolved "https://registry.yarnpkg.com/b4a/-/b4a-1.5.3.tgz#56293b5607aeda3fd81c481e516e9f103fc88341"
integrity sha512-1aCQIzQJK7G0z1Una75tWMlwVAR8o+QHoAlnWc5XAxRVBESY9WsitfBgM5nPyDBP5HrhPU1Np4Pq2Y7CJQ+tVw==
babel-jest@^27.2.5:
version "27.2.5"
resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.2.5.tgz#6bbbc1bb4200fe0bfd1b1fbcbe02fc62ebed16aa"
@ -1743,22 +1707,6 @@ binary-extensions@^2.0.0:
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==
blake2b-wasm@^2.4.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/blake2b-wasm/-/blake2b-wasm-2.4.0.tgz#9115649111edbbd87eb24ce7c04b427e4e2be5be"
integrity sha512-S1kwmW2ZhZFFFOghcx73+ZajEfKBqhP82JMssxtLVMxlaPea1p9uoLiUZ5WYyHn0KddwbLc+0vh4wR0KBNoT5w==
dependencies:
b4a "^1.0.1"
nanoassert "^2.0.0"
blake2b@^2.1.1:
version "2.1.4"
resolved "https://registry.yarnpkg.com/blake2b/-/blake2b-2.1.4.tgz#817d278526ddb4cd673bfb1af16d1ad61e393ba3"
integrity sha512-AyBuuJNI64gIvwx13qiICz6H6hpmjvYS5DGkG6jbXMOT8Z3WUJ3V1X0FlhIoT1b/5JtHE3ki+xjtMvu1nn+t9A==
dependencies:
blake2b-wasm "^2.4.0"
nanoassert "^2.0.0"
bluebird@^3.7.2:
version "3.7.2"
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f"
@ -1780,11 +1728,6 @@ body-parser@1.19.0, body-parser@^1.18.3:
raw-body "2.4.0"
type-is "~1.6.17"
bogon@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/bogon/-/bogon-1.0.0.tgz#66b8cdd269f790e3aa988e157bb34d4ba75ee586"
integrity sha512-mXxtlBtnW8koqFWPUBtKJm97vBSKZRpOvxvMRVun33qQXwMNfQzq9eTcQzKzqEoNUhNqF9t8rDc/wakKCcHMTg==
boolbase@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e"
@ -1917,13 +1860,6 @@ caniuse-lite@^1.0.30001264:
resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001442.tgz"
integrity sha512-239m03Pqy0hwxYPYR5JwOIxRJfLTWtle9FV8zosfV5pHg+/51uD4nxcUlM8+mWWGfwKtt8lJNHnD3cWw9VZ6ow==
chacha20-universal@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/chacha20-universal/-/chacha20-universal-1.0.4.tgz#e8a33a386500b1ce5361b811ec5e81f1797883f5"
integrity sha512-/IOxdWWNa7nRabfe7+oF+jVkGjlr2xUL4J8l/OvzZhj+c9RpMqoo3Dq+5nU1j/BflRV4BKnaQ4+4oH1yBpQG1Q==
dependencies:
nanoassert "^2.0.0"
chalk@^2.0.0:
version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
@ -2093,20 +2029,6 @@ commander@^6.1.0:
resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c"
integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==
compact-encoding-net@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/compact-encoding-net/-/compact-encoding-net-1.0.1.tgz#4da743d52721f5d0cc73a6d00556a96bc9b9fa1b"
integrity sha512-N9k1Qwg9b1ENk+TZsZhthzkuMtn3rn4ZinN75gf3/LplE+uaTCKjyaau5sK0m2NEUa/MmR77VxiGfD/Qz1ar0g==
dependencies:
compact-encoding "^2.4.1"
compact-encoding@^2.1.0, compact-encoding@^2.4.1, compact-encoding@^2.5.1:
version "2.7.0"
resolved "https://registry.yarnpkg.com/compact-encoding/-/compact-encoding-2.7.0.tgz#e6a0df408c25cbcdf7d619c97527074478cafd06"
integrity sha512-2I0A+pYKXYwxewbLxj26tU4pJyKlFNjadzjZ+36xJ5HwTrnhD9KcMQk3McEQRl1at6jrwA8E7UjmBdsGhEAPMw==
dependencies:
b4a "^1.3.0"
concat-map@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
@ -2193,6 +2115,13 @@ cross-env@^7.0.3:
dependencies:
cross-spawn "^7.0.1"
cross-fetch@^3.1.5:
version "3.1.5"
resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f"
integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==
dependencies:
node-fetch "2.6.7"
cross-spawn@^6.0.0:
version "6.0.5"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
@ -2305,13 +2234,6 @@ debug@^4.3.3, debug@^4.3.4:
dependencies:
ms "2.1.2"
debugging-stream@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/debugging-stream/-/debugging-stream-2.0.0.tgz#515cad5a35299cf4b4bc0afcbd69d52c809c84ce"
integrity sha512-xwfl6wB/3xc553uwtGnSa94jFxnGOc02C0WU2Nmzwr80gzeqn1FX4VcbvoKIhe8L/lPq4BTQttAbrTN94uN8rA==
dependencies:
streamx "^2.12.4"
decimal.js-light@^2.5.1:
version "2.5.1"
resolved "https://registry.yarnpkg.com/decimal.js-light/-/decimal.js-light-2.5.1.tgz#134fd32508f19e208f4fb2f8dac0d2626a867934"
@ -2391,23 +2313,6 @@ detect-newline@^3.0.0:
resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651"
integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==
dht-rpc@^6.0.0:
version "6.1.1"
resolved "https://registry.yarnpkg.com/dht-rpc/-/dht-rpc-6.1.1.tgz#a292a22aa19b05136978d33528cb571d6e32502f"
integrity sha512-wo0nMXwn/rhxVz62V0d+l/0HuikxLQh6lkwlUIdoaUzGl9DobFj4epSScD3/lTMwKts+Ih0DFNqP+j0tYwdajQ==
dependencies:
b4a "^1.3.1"
compact-encoding "^2.1.0"
compact-encoding-net "^1.0.1"
events "^3.3.0"
fast-fifo "^1.0.0"
kademlia-routing-table "^1.0.0"
nat-sampler "^1.0.1"
sodium-universal "^3.0.4"
streamx "^2.10.3"
time-ordered-set "^1.0.2"
udx-native "^1.1.0"
dicer@0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/dicer/-/dicer-0.3.0.tgz#eacd98b3bfbf92e8ab5c2fdb71aaac44bb06b872"
@ -2899,11 +2804,6 @@ eventemitter3@^3.1.0:
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7"
integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q==
events@^3.3.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400"
integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
execa@^0.10.0:
version "0.10.0"
resolved "https://registry.yarnpkg.com/execa/-/execa-0.10.0.tgz#ff456a8f53f90f8eccc71a96d11bdfc7f082cb50"
@ -2985,6 +2885,11 @@ express@^4.17.1:
utils-merge "1.0.1"
vary "~1.1.2"
extract-files@^9.0.0:
version "9.0.0"
resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-9.0.0.tgz#8a7744f2437f81f5ed3250ed9f1550de902fe54a"
integrity sha512-CvdFfHkC95B4bBBk36hcEmvdR2awOdhhVUYH6S/zrVj3477zven/fJMYg7121h4T1xHZC+tetUpubpAhxwI7hQ==
faker@^5.5.3:
version "5.5.3"
resolved "https://registry.yarnpkg.com/faker/-/faker-5.5.3.tgz#c57974ee484431b25205c2c8dc09fda861e51e0e"
@ -3000,11 +2905,6 @@ fast-diff@^1.1.2:
resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03"
integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==
fast-fifo@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.1.0.tgz#17d1a3646880b9891dfa0c54e69c5fef33cad779"
integrity sha512-Kl29QoNbNvn4nhDsLYjyIAaIqaJB6rBx5p3sL9VjaefJ+eMFBWVZiaoguaoZfzEKr5RhAti0UgM8703akGPJ6g==
fast-glob@^3.1.1:
version "3.2.7"
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.7.tgz#fd6cb7a2d7e9aa7a7846111e85a196d6b2f766a1"
@ -3340,6 +3240,16 @@ graphql-query-complexity@^0.7.0:
dependencies:
lodash.get "^4.4.2"
graphql-request@5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-5.0.0.tgz#7504a807d0e11be11a3c448e900f0cc316aa18ef"
integrity sha512-SpVEnIo2J5k2+Zf76cUkdvIRaq5FMZvGQYnA4lUWYbc99m+fHh4CZYRRO/Ff4tCLQ613fzCm3SiDT64ubW5Gyw==
dependencies:
"@graphql-typed-document-node/core" "^3.1.1"
cross-fetch "^3.1.5"
extract-files "^9.0.0"
form-data "^3.0.0"
graphql-subscriptions@^1.0.0, graphql-subscriptions@^1.1.0:
version "1.2.1"
resolved "https://registry.yarnpkg.com/graphql-subscriptions/-/graphql-subscriptions-1.2.1.tgz#2142b2d729661ddf967b7388f7cf1dd4cf2e061d"
@ -3414,15 +3324,6 @@ he@1.2.0, he@^1.2.0:
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
hmac-blake2b@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/hmac-blake2b/-/hmac-blake2b-2.0.0.tgz#09494e5d245d7afe45d157093080b159f7bacf15"
integrity sha512-JbGNtM1YRd8EQH/2vNTAP1oy5lJVPlBFYZfCJTu3k8sqOUm0rRIf/3+MCd5noVykETwTbun6jEOc+4Tu78ubHA==
dependencies:
nanoassert "^1.1.0"
sodium-native "^3.1.1"
sodium-universal "^3.0.0"
hosted-git-info@^2.1.4:
version "2.8.9"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9"
@ -3544,15 +3445,6 @@ human-signals@^2.1.0:
resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==
hypercore-crypto@^3.3.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/hypercore-crypto/-/hypercore-crypto-3.3.0.tgz#03ab5b44608a563e131f629f671c6f90a83c52e6"
integrity sha512-zAWbDqG7kWwS6rCxxTUeB/OeFAz3PoOmouKaoMubtDJYJsLHqXtA3wE2mLsw+E2+iYyom5zrFyBTFVYxmgwW6g==
dependencies:
b4a "^1.1.0"
compact-encoding "^2.5.1"
sodium-universal "^3.0.0"
i18n-locales@^0.0.5:
version "0.0.5"
resolved "https://registry.yarnpkg.com/i18n-locales/-/i18n-locales-0.0.5.tgz#8f587e598ab982511d7c7db910cb45b8d93cd96a"
@ -4517,11 +4409,6 @@ jws@^3.2.2:
jwa "^1.4.1"
safe-buffer "^5.0.1"
kademlia-routing-table@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/kademlia-routing-table/-/kademlia-routing-table-1.0.1.tgz#6f18416f612e885a8d4df128f04c490a90d772f6"
integrity sha512-dKk19sC3/+kWhBIvOKCthxVV+JH0NrswSBq4sA4eOkkPMqQM1rRuOWte1WSKXeP8r9Nx4NuiH2gny3lMddJTpw==
keyv@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9"
@ -4932,26 +4819,6 @@ named-placeholders@^1.1.2:
dependencies:
lru-cache "^4.1.3"
nanoassert@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/nanoassert/-/nanoassert-1.1.0.tgz#4f3152e09540fde28c76f44b19bbcd1d5a42478d"
integrity sha512-C40jQ3NzfkP53NsO8kEOFd79p4b9kDXQMwgiY1z8ZwrDZgUyom0AHwGegF4Dm99L+YoYhuaB0ceerUcXmqr1rQ==
nanoassert@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/nanoassert/-/nanoassert-2.0.0.tgz#a05f86de6c7a51618038a620f88878ed1e490c09"
integrity sha512-7vO7n28+aYO4J+8w96AzhmU8G+Y/xpPDJz/se19ICsqj/momRbb9mh9ZUtkoJ5X3nTnPdhEJyc0qnM6yAsHBaA==
napi-macros@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b"
integrity sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg==
nat-sampler@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/nat-sampler/-/nat-sampler-1.0.1.tgz#2b68338ea6d4c139450cd971fd00a4ac1b33d923"
integrity sha512-yQvyNN7xbqR8crTKk3U8gRgpcV1Az+vfCEijiHu9oHHsnIl8n3x+yXNHl42M6L3czGynAVoOT9TqBfS87gDdcw==
natural-compare@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
@ -4977,7 +4844,7 @@ nice-try@^1.0.4:
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
node-fetch@^2.6.0:
node-fetch@2.6.7, node-fetch@^2.6.0:
version "2.6.7"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
@ -4991,7 +4858,7 @@ node-fetch@^2.6.1:
dependencies:
whatwg-url "^5.0.0"
node-gyp-build@^4.3.0, node-gyp-build@^4.4.0:
node-gyp-build@^4.3.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.5.0.tgz#7a64eefa0b21112f89f58379da128ac177f20e40"
integrity sha512-2iGbaQBV+ITgCz76ZEjmhUKAKVf7xfY1sRl4UiKQspfZMH2h06SyhNsnSVy50cwkFQDGLyif6m/6uFXHkOZ6rg==
@ -5042,25 +4909,6 @@ nodemon@^2.0.7:
undefsafe "^2.0.3"
update-notifier "^5.1.0"
noise-curve-ed@^1.0.2:
version "1.0.4"
resolved "https://registry.yarnpkg.com/noise-curve-ed/-/noise-curve-ed-1.0.4.tgz#8ae83f5d2d2e31d0c9c069271ca6e462d31cd884"
integrity sha512-plUUSEOU66FZ9TaBKpk4+fgQeeS+OLlThS2o8a1TxVpMWV2v1izvEnjSpFV9gEPZl4/1yN+S5KqLubFjogqQOw==
dependencies:
b4a "^1.1.0"
nanoassert "^2.0.0"
sodium-universal "^3.0.4"
noise-handshake@^2.1.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/noise-handshake/-/noise-handshake-2.2.0.tgz#24c98f502d49118770e1ec2af2894b8789f0ac7c"
integrity sha512-+0mFUc5YSnOPI+4K/7nr6XDGduITaUasPVurzrH03sk6yW+udKxP/qjEwEekRwIpnvcCKYnjiZ9HJenJv9ljZg==
dependencies:
b4a "^1.1.0"
hmac-blake2b "^2.0.0"
nanoassert "^2.0.0"
sodium-universal "^3.0.4"
nopt@~1.0.10:
version "1.0.10"
resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee"
@ -5666,11 +5514,6 @@ queue-microtask@^1.2.2:
resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
queue-tick@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/queue-tick/-/queue-tick-1.0.0.tgz#011104793a3309ae86bfeddd54e251dc94a36725"
integrity sha512-ULWhjjE8BmiICGn3G8+1L9wFpERNxkf8ysxkAer4+TFdRefDaXOCV5m92aMB9FtBVmn/8sETXLXY6BfW7hyaWQ==
railroad-diagrams@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/railroad-diagrams/-/railroad-diagrams-1.0.0.tgz#eb7e6267548ddedfb899c1b90e57374559cddb7e"
@ -5743,13 +5586,6 @@ readdirp@~3.6.0:
dependencies:
picomatch "^2.2.1"
record-cache@^1.1.1:
version "1.2.0"
resolved "https://registry.yarnpkg.com/record-cache/-/record-cache-1.2.0.tgz#e601bc4f164d58330cc00055e27aa4682291c882"
integrity sha512-kyy3HWCez2WrotaL3O4fTn0rsIdfRKOdQQcEJ9KpvmKmbffKVvwsloX063EgRUlpJIXHiDQFhJcTbZequ2uTZw==
dependencies:
b4a "^1.3.1"
reflect-metadata@^0.1.13:
version "0.1.13"
resolved "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.13.tgz#67ae3ca57c972a2aa1642b10fe363fe32d49dc08"
@ -5809,7 +5645,7 @@ resolve@^1.10.0, resolve@^1.10.1, resolve@^1.20.0:
is-core-module "^2.2.0"
path-parse "^1.0.6"
resolve@^1.15.1, resolve@^1.17.0:
resolve@^1.15.1:
version "1.22.1"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177"
integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==
@ -5886,11 +5722,6 @@ safe-identifier@^0.4.1:
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
safety-catch@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/safety-catch/-/safety-catch-1.0.2.tgz#d64cbd57fd601da91c356b6ab8902f3e449a7a4b"
integrity sha512-C1UYVZ4dtbBxEtvOcpjBaaD27nP8MlvyAQEp2fOTOEe6pfUpk1cDUxij6BR1jZup6rSyUTaBBplK7LanskrULA==
saxes@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d"
@ -5981,38 +5812,6 @@ sha.js@^2.4.11:
inherits "^2.0.1"
safe-buffer "^5.0.1"
sha256-universal@^1.1.0:
version "1.2.1"
resolved "https://registry.yarnpkg.com/sha256-universal/-/sha256-universal-1.2.1.tgz#051d92decce280cd6137d42d496eac88da942c0e"
integrity sha512-ghn3muhdn1ailCQqqceNxRgkOeZSVfSE13RQWEg6njB+itsFzGVSJv+O//2hvNXZuxVIRyNzrgsZ37SPDdGJJw==
dependencies:
b4a "^1.0.1"
sha256-wasm "^2.2.1"
sha256-wasm@^2.2.1:
version "2.2.2"
resolved "https://registry.yarnpkg.com/sha256-wasm/-/sha256-wasm-2.2.2.tgz#4940b6c9ba28f3f08b700efce587ef36d4d516d4"
integrity sha512-qKSGARvao+JQlFiA+sjJZhJ/61gmW/3aNLblB2rsgIxDlDxsJPHo8a1seXj12oKtuHVgJSJJ7QEGBUYQN741lQ==
dependencies:
b4a "^1.0.1"
nanoassert "^2.0.0"
sha512-universal@^1.1.0:
version "1.2.1"
resolved "https://registry.yarnpkg.com/sha512-universal/-/sha512-universal-1.2.1.tgz#829505a7586530515cc1a10b78815c99722c4df0"
integrity sha512-kehYuigMoRkIngCv7rhgruLJNNHDnitGTBdkcYbCbooL8Cidj/bS78MDxByIjcc69M915WxcQTgZetZ1JbeQTQ==
dependencies:
b4a "^1.0.1"
sha512-wasm "^2.3.1"
sha512-wasm@^2.3.1:
version "2.3.4"
resolved "https://registry.yarnpkg.com/sha512-wasm/-/sha512-wasm-2.3.4.tgz#b86b37112ff6d1fc3740f2484a6855f17a6e1300"
integrity sha512-akWoxJPGCB3aZCrZ+fm6VIFhJ/p8idBv7AWGFng/CZIrQo51oQNsvDbTSRXWAzIiZJvpy16oIDiCCPqTe21sKg==
dependencies:
b4a "^1.0.1"
nanoassert "^2.0.0"
shebang-command@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
@ -6056,13 +5855,6 @@ signal-exit@^3.0.2, signal-exit@^3.0.3:
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.5.tgz#9e3e8cc0c75a99472b44321033a7702e7738252f"
integrity sha512-KWcOiKeQj6ZyXx7zq4YxSMgHRlod4czeBQZrPb8OKcohcqAXShm7E20kEMle9WBt26hFcAf0qLOcp5zmY7kOqQ==
siphash24@^1.0.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/siphash24/-/siphash24-1.3.1.tgz#7f87fd2c5db88d8d46335a68f780f281641c8b22"
integrity sha512-moemC3ZKiTzH29nbFo3Iw8fbemWWod4vNs/WgKbQ54oEs6mE6XVlguxvinYjB+UmaE0PThgyED9fUkWvirT8hA==
dependencies:
nanoassert "^2.0.0"
sisteransi@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed"
@ -6087,50 +5879,13 @@ slick@^1.12.2:
resolved "https://registry.yarnpkg.com/slick/-/slick-1.12.2.tgz#bd048ddb74de7d1ca6915faa4a57570b3550c2d7"
integrity sha512-4qdtOGcBjral6YIBCWJ0ljFSKNLz9KkhbWtuGvUyRowl1kxfuE1x/Z/aJcaiilpb3do9bl5K7/1h9XC5wWpY/A==
sodium-javascript@~0.8.0:
version "0.8.0"
resolved "https://registry.yarnpkg.com/sodium-javascript/-/sodium-javascript-0.8.0.tgz#0a94d7bb58ab17be82255f3949259af59778fdbc"
integrity sha512-rEBzR5mPxPES+UjyMDvKPIXy9ImF17KOJ32nJNi9uIquWpS/nfj+h6m05J5yLJaGXjgM72LmQoUbWZVxh/rmGg==
dependencies:
blake2b "^2.1.1"
chacha20-universal "^1.0.4"
nanoassert "^2.0.0"
sha256-universal "^1.1.0"
sha512-universal "^1.1.0"
siphash24 "^1.0.1"
xsalsa20 "^1.0.0"
sodium-native@^3.1.1, sodium-native@^3.2.0, sodium-native@^3.3.0:
sodium-native@^3.3.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/sodium-native/-/sodium-native-3.3.0.tgz#50ee52ac843315866cce3d0c08ab03eb78f22361"
integrity sha512-rg6lCDM/qa3p07YGqaVD+ciAbUqm6SoO4xmlcfkbU5r1zIGrguXztLiEtaLYTV5U6k8KSIUFmnU3yQUSKmf6DA==
dependencies:
node-gyp-build "^4.3.0"
sodium-secretstream@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/sodium-secretstream/-/sodium-secretstream-1.0.2.tgz#ae6fec16555f1a1d9fd2460b41256736d5044e13"
integrity sha512-AsWztbBHhHid+w5g28ftXA0mTrS52Dup7FYI0GR7ri1TQTlVsw0z//FNlhIqWsgtBctO/DxQosacbElCpmdcZw==
dependencies:
b4a "^1.1.1"
sodium-universal "^3.0.4"
sodium-universal@^3.0.0, sodium-universal@^3.0.4:
version "3.1.0"
resolved "https://registry.yarnpkg.com/sodium-universal/-/sodium-universal-3.1.0.tgz#f2fa0384d16b7cb99b1c8551a39cc05391a3ed41"
integrity sha512-N2gxk68Kg2qZLSJ4h0NffEhp4BjgWHCHXVlDi1aG1hA3y+ZeWEmHqnpml8Hy47QzfL1xLy5nwr9LcsWAg2Ep0A==
dependencies:
blake2b "^2.1.1"
chacha20-universal "^1.0.4"
nanoassert "^2.0.0"
resolve "^1.17.0"
sha256-universal "^1.1.0"
sha512-universal "^1.1.0"
siphash24 "^1.0.1"
sodium-javascript "~0.8.0"
sodium-native "^3.2.0"
xsalsa20 "^1.0.0"
source-map-support@^0.5.6:
version "0.5.20"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.20.tgz#12166089f8f5e5e8c56926b377633392dd2cb6c9"
@ -6216,14 +5971,6 @@ streamsearch@0.1.2:
resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a"
integrity sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=
streamx@^2.10.2, streamx@^2.10.3, streamx@^2.12.0, streamx@^2.12.4:
version "2.12.4"
resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.12.4.tgz#0369848b20b8f79c65320735372df17cafcd9aff"
integrity sha512-K3xdIp8YSkvbdI0PrCcP0JkniN8cPCyeKlcZgRFSl1o1xKINCYM93FryvTSOY57x73pz5/AjO5B8b9BYf21wWw==
dependencies:
fast-fifo "^1.0.0"
queue-tick "^1.0.0"
string-length@^4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a"
@ -6388,16 +6135,6 @@ throat@^6.0.1:
resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375"
integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w==
time-ordered-set@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/time-ordered-set/-/time-ordered-set-1.0.2.tgz#3bd931fc048234147f8c2b8b1ebbebb0a3ecb96f"
integrity sha512-vGO99JkxvgX+u+LtOKQEpYf31Kj3i/GNwVstfnh4dyINakMgeZCpew1e3Aj+06hEslhtHEd52g7m5IV+o1K8Mw==
timeout-refresh@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/timeout-refresh/-/timeout-refresh-2.0.1.tgz#f8ec7cf1f9d93b2635b7d4388cb820c5f6c16f98"
integrity sha512-SVqEcMZBsZF9mA78rjzCrYrUs37LMJk3ShZ851ygZYW1cMeIjs9mL57KO6Iv5mmjSQnOe/29/VAfGXo+oRCiVw==
titleize@2:
version "2.1.0"
resolved "https://registry.yarnpkg.com/titleize/-/titleize-2.1.0.tgz#5530de07c22147a0488887172b5bd94f5b30a48f"
@ -6622,16 +6359,6 @@ uc.micro@^1.0.1:
resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-1.0.6.tgz#9c411a802a409a91fc6cf74081baba34b24499ac"
integrity sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==
udx-native@^1.1.0:
version "1.2.1"
resolved "https://registry.yarnpkg.com/udx-native/-/udx-native-1.2.1.tgz#a229b8bfab8c9c9eea05c7e0d68e671ab70d562d"
integrity sha512-hLoJ3rE1PuqO/A1YENG8oYNuAGltdwXofzavYwXbg2yk/qQgGBDpUQd/qtdENxkawad5cEEdJEdwvchslDl7OA==
dependencies:
b4a "^1.5.0"
napi-macros "^2.0.0"
node-gyp-build "^4.4.0"
streamx "^2.12.0"
unbox-primitive@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471"
@ -6936,11 +6663,6 @@ write-file-atomic@^3.0.0:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.5.tgz#8b4bc4af518cfabd0473ae4f99144287b33eb881"
integrity sha512-BAkMFcAzl8as1G/hArkxOxq3G7pjUqQ3gzYbLL0/5zNkph70e+lCoxBGnm6AW1+/aiNeV4fnKqZ8m4GZewmH2w==
xache@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/xache/-/xache-1.1.0.tgz#afc20dec9ff8b2260eea03f5ad9422dc0200c6e9"
integrity sha512-RQGZDHLy/uCvnIrAvaorZH/e6Dfrtxj16iVlGjkj4KD2/G/dNXNqhk5IdSucv5nSSnDK00y8Y/2csyRdHveJ+Q==
xdg-basedir@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
@ -6956,11 +6678,6 @@ xmlchars@^2.2.0:
resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb"
integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==
xsalsa20@^1.0.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/xsalsa20/-/xsalsa20-1.2.0.tgz#e5a05cb26f8cef723f94a559102ed50c1b44c25c"
integrity sha512-FIr/DEeoHfj7ftfylnoFt3rAIRoWXpx2AoDfrT2qD2wtp7Dp+COajvs/Icb7uHqRW9m60f5iXZwdsJJO3kvb7w==
xss@^1.0.8:
version "1.0.10"
resolved "https://registry.yarnpkg.com/xss/-/xss-1.0.10.tgz#5cd63a9b147a755a14cb0455c7db8866120eb4d2"

View File

@ -0,0 +1,51 @@
import {
BaseEntity,
Entity,
PrimaryGeneratedColumn,
Column,
CreateDateColumn,
UpdateDateColumn,
} from 'typeorm'
@Entity('communities')
export class Community extends BaseEntity {
@PrimaryGeneratedColumn('increment', { unsigned: true })
id: number
@Column({ name: 'foreign', type: 'bool', nullable: false, default: true })
foreign: boolean
@Column({ name: 'public_key', type: 'binary', length: 64, default: null, nullable: true })
publicKey: Buffer
@Column({ name: 'api_version', length: 10, nullable: false })
apiVersion: string
@Column({ name: 'end_point', length: 255, nullable: false })
endPoint: string
@Column({ name: 'last_announced_at', type: 'datetime', nullable: true })
lastAnnouncedAt: Date
@Column({ name: 'verified_at', type: 'datetime', nullable: true })
verifiedAt: Date
@Column({ name: 'last_error_at', type: 'datetime', nullable: true })
lastErrorAt: Date
@CreateDateColumn({
name: 'created_at',
type: 'datetime',
default: () => 'CURRENT_TIMESTAMP(3)',
nullable: false,
})
createdAt: Date
@UpdateDateColumn({
name: 'updated_at',
type: 'datetime',
onUpdate: 'CURRENT_TIMESTAMP(3)',
nullable: true,
})
updatedAt: Date | null
}

View File

@ -1 +1 @@
export { Community } from './0058-add_communities_table/Community'
export { Community } from './0060-update_communities_table/Community'

View File

@ -0,0 +1,32 @@
/* MIGRATION TO CREATE THE FEDERATION COMMUNITY TABLES
*
* This migration creates the `community` and 'communityfederation' tables in the `apollo` database (`gradido_community`).
*/
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
/* eslint-disable @typescript-eslint/no-explicit-any */
export async function upgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
await queryFn(
'ALTER TABLE `communities` MODIFY COLUMN `last_announced_at` datetime(3) AFTER `end_point`;',
)
await queryFn(
'ALTER TABLE `communities` ADD COLUMN `foreign` tinyint(4) NOT NULL DEFAULT 1 AFTER `id`;',
)
await queryFn(
'ALTER TABLE `communities` ADD COLUMN `verified_at` datetime(3) AFTER `last_announced_at`;',
)
await queryFn(
'ALTER TABLE `communities` ADD COLUMN `last_error_at` datetime(3) AFTER `verified_at`;',
)
}
export async function downgrade(queryFn: (query: string, values?: any[]) => Promise<Array<any>>) {
// write downgrade logic as parameter of queryFn
await queryFn(
'ALTER TABLE `communities` MODIFY COLUMN `last_announced_at` datetime(3) NOT NULL AFTER `end_point`;',
)
await queryFn('ALTER TABLE `communities` DROP COLUMN `foreign`;')
await queryFn('ALTER TABLE `communities` DROP COLUMN `verified_at`;')
await queryFn('ALTER TABLE `communities` DROP COLUMN `last_error_at`;')
}

View File

@ -15,3 +15,5 @@ TYPEORM_LOGGING_RELATIVE_PATH=typeorm.dht-node.log
# on an hash created from this topic
FEDERATION_DHT_TOPIC=GRADIDO_HUB
# FEDERATION_DHT_SEED=64ebcb0e3ad547848fef4197c6e2332f
# FEDERATION_COMMUNITY_URL=http://localhost
# FEDERATION_COMMUNITY_API_PORT=5000

View File

@ -12,3 +12,4 @@ TYPEORM_LOGGING_RELATIVE_PATH=$TYPEORM_LOGGING_RELATIVE_PATH
FEDERATION_DHT_TOPIC=$FEDERATION_DHT_TOPIC
FEDERATION_DHT_SEED=$FEDERATION_DHT_SEED
FEDERATION_COMMUNITY_URL=$FEDERATION_COMMUNITY_URL
FEDERATION_COMMUNITY_API_PORT=$FEDERATION_COMMUNITY_API_PORT

View File

@ -3,7 +3,7 @@ import dotenv from 'dotenv'
dotenv.config()
const constants = {
DB_VERSION: '0059-add_hide_amount_to_users',
DB_VERSION: '0060-update_communities_table',
LOG4JS_CONFIG: 'log4js-config.json',
// default log level on production should be info
LOG_LEVEL: process.env.LOG_LEVEL || 'info',
@ -31,7 +31,8 @@ const database = {
const federation = {
FEDERATION_DHT_TOPIC: process.env.FEDERATION_DHT_TOPIC || 'GRADIDO_HUB',
FEDERATION_DHT_SEED: process.env.FEDERATION_DHT_SEED || null,
FEDERATION_COMMUNITY_URL: process.env.FEDERATION_COMMUNITY_URL || null,
FEDERATION_COMMUNITY_URL: process.env.FEDERATION_COMMUNITY_URL || 'http://localhost',
FEDERATION_COMMUNITY_API_PORT: process.env.FEDERATION_COMMUNITY_API_PORT || '5000',
}
// Check config version

View File

@ -116,6 +116,7 @@ describe('federation', () => {
beforeEach(async () => {
DHT.mockClear()
jest.clearAllMocks()
await cleanDB()
await startDHT(TEST_TOPIC)
})
@ -234,18 +235,18 @@ describe('federation', () => {
beforeEach(async () => {
jest.clearAllMocks()
jsonArray = [
{ api: 'v1_0', url: 'too much versions at the same time test' },
{ api: 'v1_0', url: 'url2' },
{ api: 'v1_0', url: 'url3' },
{ api: 'v1_0', url: 'url4' },
{ api: 'v1_0', url: 'url5' },
{ api: '1_0', url: 'too much versions at the same time test' },
{ api: '1_0', url: 'url2' },
{ api: '1_0', url: 'url3' },
{ api: '1_0', url: 'url4' },
{ api: '1_0', url: 'url5' },
]
await socketEventMocks.data(Buffer.from(JSON.stringify(jsonArray)))
})
it('logs the received data', () => {
expect(logger.info).toBeCalledWith(
'data: [{"api":"v1_0","url":"too much versions at the same time test"},{"api":"v1_0","url":"url2"},{"api":"v1_0","url":"url3"},{"api":"v1_0","url":"url4"},{"api":"v1_0","url":"url5"}]',
'data: [{"api":"1_0","url":"too much versions at the same time test"},{"api":"1_0","url":"url2"},{"api":"1_0","url":"url3"},{"api":"1_0","url":"url4"},{"api":"1_0","url":"url5"}]',
)
})
@ -266,17 +267,17 @@ describe('federation', () => {
jsonArray = [
{
wrong: 'wrong but tolerated property test',
api: 'v1_0',
api: '1_0',
url: 'url1',
},
{
api: 'v2_0',
api: '2_0',
url: 'url2',
wrong: 'wrong but tolerated property test',
},
]
await socketEventMocks.data(Buffer.from(JSON.stringify(jsonArray)))
result = await DbCommunity.find()
result = await DbCommunity.find({ foreign: true })
})
afterAll(async () => {
@ -287,13 +288,14 @@ describe('federation', () => {
expect(result).toHaveLength(2)
})
it('has an entry for api version v1_0', () => {
it('has an entry for api version 1_0', () => {
expect(result).toEqual(
expect.arrayContaining([
expect.objectContaining({
id: expect.any(Number),
foreign: true,
publicKey: expect.any(Buffer),
apiVersion: 'v1_0',
apiVersion: '1_0',
endPoint: 'url1',
lastAnnouncedAt: expect.any(Date),
createdAt: expect.any(Date),
@ -303,13 +305,14 @@ describe('federation', () => {
)
})
it('has an entry for api version v2_0', () => {
it('has an entry for api version 2_0', () => {
expect(result).toEqual(
expect.arrayContaining([
expect.objectContaining({
id: expect.any(Number),
foreign: true,
publicKey: expect.any(Buffer),
apiVersion: 'v2_0',
apiVersion: '2_0',
endPoint: 'url2',
lastAnnouncedAt: expect.any(Date),
createdAt: expect.any(Date),
@ -535,7 +538,7 @@ describe('federation', () => {
{ api: 'toolong api', url: 'some valid url' },
]
await socketEventMocks.data(Buffer.from(JSON.stringify(jsonArray)))
result = await DbCommunity.find()
result = await DbCommunity.find({ foreign: true })
})
afterAll(async () => {
@ -551,6 +554,7 @@ describe('federation', () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(Number),
foreign: true,
publicKey: expect.any(Buffer),
apiVersion: 'valid api',
endPoint:
@ -588,7 +592,7 @@ describe('federation', () => {
},
]
await socketEventMocks.data(Buffer.from(JSON.stringify(jsonArray)))
result = await DbCommunity.find()
result = await DbCommunity.find({ foreign: true })
})
afterAll(async () => {
@ -604,6 +608,7 @@ describe('federation', () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(Number),
foreign: true,
publicKey: expect.any(Buffer),
apiVersion: 'valid api1',
endPoint:
@ -621,6 +626,7 @@ describe('federation', () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(Number),
foreign: true,
publicKey: expect.any(Buffer),
apiVersion: 'valid api2',
endPoint:
@ -638,6 +644,7 @@ describe('federation', () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(Number),
foreign: true,
publicKey: expect.any(Buffer),
apiVersion: 'valid api3',
endPoint:
@ -655,6 +662,7 @@ describe('federation', () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(Number),
foreign: true,
publicKey: expect.any(Buffer),
apiVersion: 'valid api4',
endPoint:
@ -710,17 +718,17 @@ describe('federation', () => {
Buffer.from(
JSON.stringify([
{
api: 'v1_0',
url: 'http://localhost:4000/api/v1_0',
api: '1_0',
url: 'http://localhost:5001/api/',
},
{
api: 'v2_0',
url: 'http://localhost:4000/api/v2_0',
api: '2_0',
url: 'http://localhost:5002/api/',
},
]),
),
)
result = await DbCommunity.find()
result = await DbCommunity.find({ foreign: true })
})
afterAll(async () => {
@ -736,9 +744,10 @@ describe('federation', () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(Number),
foreign: true,
publicKey: expect.any(Buffer),
apiVersion: 'v1_0',
endPoint: 'http://localhost:4000/api/v1_0',
apiVersion: '1_0',
endPoint: 'http://localhost:5001/api/',
lastAnnouncedAt: expect.any(Date),
createdAt: expect.any(Date),
updatedAt: null,
@ -747,14 +756,15 @@ describe('federation', () => {
)
})
it('has an entry for api version v2_0', () => {
it('has an entry for api version 2_0', () => {
expect(result).toEqual(
expect.arrayContaining([
expect.objectContaining({
id: expect.any(Number),
foreign: true,
publicKey: expect.any(Buffer),
apiVersion: 'v2_0',
endPoint: 'http://localhost:4000/api/v2_0',
apiVersion: '2_0',
endPoint: 'http://localhost:5002/api/',
lastAnnouncedAt: expect.any(Date),
createdAt: expect.any(Date),
updatedAt: null,
@ -775,16 +785,16 @@ describe('federation', () => {
Buffer.from(
JSON.stringify([
{
api: 'v1_0',
url: 'http://localhost:4000/api/v1_0',
api: '1_0',
url: 'http://localhost:5001/api/',
},
{
api: 'v1_1',
url: 'http://localhost:4000/api/v1_1',
api: '1_1',
url: 'http://localhost:5002/api/',
},
{
api: 'v2_0',
url: 'http://localhost:4000/api/v2_0',
api: '2_0',
url: 'http://localhost:5003/api/',
},
]),
),

View File

@ -15,9 +15,9 @@ const ERRORTIME = 240000
const ANNOUNCETIME = 30000
enum ApiVersionType {
V1_0 = 'v1_0',
V1_1 = 'v1_1',
V2_0 = 'v2_0',
V1_0 = '1_0',
V1_1 = '1_1',
V2_0 = '2_0',
}
type CommunityApi = {
api: string
@ -31,13 +31,7 @@ export const startDHT = async (topic: string): Promise<void> => {
logger.info(`keyPairDHT: publicKey=${keyPair.publicKey.toString('hex')}`)
logger.debug(`keyPairDHT: secretKey=${keyPair.secretKey.toString('hex')}`)
const ownApiVersions = Object.values(ApiVersionType).map(function (apiEnum) {
const comApi: CommunityApi = {
api: apiEnum,
url: CONFIG.FEDERATION_COMMUNITY_URL + apiEnum,
}
return comApi
})
const ownApiVersions = writeHomeCommunityEnries(keyPair.publicKey)
logger.debug(`ApiList: ${JSON.stringify(ownApiVersions)}`)
const node = new DHT({ keyPair })
@ -184,3 +178,34 @@ export const startDHT = async (topic: string): Promise<void> => {
logger.error('DHT unexpected error:', err)
}
}
async function writeHomeCommunityEnries(pubKey: any): Promise<CommunityApi[]> {
const homeApiVersions: CommunityApi[] = Object.values(ApiVersionType).map(function (apiEnum) {
const port =
Number.parseInt(CONFIG.FEDERATION_COMMUNITY_API_PORT) + Number(apiEnum.replace('_', ''))
const comApi: CommunityApi = {
api: apiEnum,
url: CONFIG.FEDERATION_COMMUNITY_URL + ':' + port.toString() + '/api/',
}
return comApi
})
try {
// first remove privious existing homeCommunity entries
DbCommunity.createQueryBuilder().delete().where({ foreign: false }).execute()
homeApiVersions.forEach(async function (homeApi) {
const homeCom = new DbCommunity()
homeCom.foreign = false
homeCom.apiVersion = homeApi.api
homeCom.endPoint = homeApi.url
homeCom.publicKey = pubKey.toString('hex')
// this will NOT update the updatedAt column, to distingue between a normal update and the last announcement
await DbCommunity.insert(homeCom)
logger.info(`federation home-community inserted successfully: ${JSON.stringify(homeCom)}`)
})
} catch (err) {
throw new Error(`Federation: Error writing HomeCommunity-Entries: ${err}`)
}
return homeApiVersions
}

View File

@ -84,6 +84,29 @@ services:
- ./dht-node:/app
- ./database:/database
########################################################
# FEDERATION ###########################################
########################################################
federation:
# name the image so that it cannot be found in a DockerHub repository, otherwise it will not be built locally from the 'dockerfile' but pulled from there
image: gradido/federation:local-development
build:
target: development
networks:
- external-net
- internal-net
environment:
- NODE_ENV="development"
volumes:
# This makes sure the docker container has its own node modules.
# Therefore it is possible to have a different node version on the host machine
- federation_node_modules:/app/node_modules
- federation_database_node_modules:/database/node_modules
- federation_database_build:/database/build
# bind the local folder to the docker to allow live reload
- ./federation:/app
- ./database:/database
########################################################
# DATABASE ##############################################
########################################################
@ -155,5 +178,8 @@ volumes:
dht_node_modules:
dht_database_node_modules:
dht_database_build:
federation_node_modules:
federation_database_node_modules:
federation_database_build:
database_node_modules:
database_build:

View File

@ -36,6 +36,21 @@ services:
- NODE_ENV="test"
- DB_HOST=mariadb
########################################################
# FEDERATION ###########################################
########################################################
federation:
# name the image so that it cannot be found in a DockerHub repository, otherwise it will not be built locally from the 'dockerfile' but pulled from there
image: gradido/federation:test
build:
target: test
networks:
- external-net
- internal-net
environment:
- NODE_ENV="test"
- DB_HOST=mariadb
########################################################
# DATABASE #############################################
########################################################

View File

@ -147,6 +147,42 @@ services:
# <host_machine_directory>:<container_directory> mirror bidirectional path in local context with path in Docker container
- ./logs/dht-node:/logs/dht-node
########################################################
# FEDERATION ###########################################
########################################################
federation:
# name the image so that it cannot be found in a DockerHub repository, otherwise it will not be built locally from the 'dockerfile' but pulled from there
image: gradido/federation:local-production
build:
# since we have to include the entities from ./database we cannot define the context as ./federation
# this might blow build image size to the moon ?!
context: ./
dockerfile: ./federation/Dockerfile
target: production
networks:
- internal-net
- external-net
ports:
- 5010:5010
depends_on:
- mariadb
restart: always
environment:
# Envs used in Dockerfile
# - DOCKER_WORKDIR="/app"
- PORT=5010
- BUILD_DATE
- BUILD_VERSION
- BUILD_COMMIT
- NODE_ENV="production"
- DB_HOST=mariadb
# Application only envs
#env_file:
# - ./frontend/.env
volumes:
# <host_machine_directory>:<container_directory> mirror bidirectional path in local context with path in Docker container
- ./logs/federation:/logs/federation
########################################################
# DATABASE #############################################
########################################################

26
e2e-tests/.eslintrc.js Normal file
View File

@ -0,0 +1,26 @@
module.exports = {
root: true,
env: {
node: true,
cypress: true,
},
parser: '@typescript-eslint/parser',
plugins: ['cypress', 'prettier', '@typescript-eslint' /*, 'jest' */],
extends: [
'standard',
'eslint:recommended',
'plugin:prettier/recommended',
'plugin:@typescript-eslint/recommended',
],
// add your custom rules here
rules: {
'no-console': ['error'],
'no-debugger': 'error',
'prettier/prettier': [
'error',
{
htmlWhitespaceSensitivity: 'ignore',
},
],
},
}

5
e2e-tests/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
node_modules/
cypress/screenshots/
cypress/videos/
cucumber-messages.ndjson

9
e2e-tests/.prettierrc.js Normal file
View File

@ -0,0 +1,9 @@
module.exports = {
semi: false,
printWidth: 100,
singleQuote: true,
trailingComma: "all",
tabWidth: 2,
bracketSpacing: true,
endOfLine: "auto",
};

View File

@ -11,7 +11,7 @@
###############################################################################
FROM cypress/base:16.14.2-slim
ARG DOCKER_WORKDIR=/tests/
ARG DOCKER_WORKDIR="/tests"
WORKDIR $DOCKER_WORKDIR
# install dependencies

View File

@ -1,7 +1,73 @@
# Gradido end-to-end tests
# Gradido End-to-End Testing with [Cypress](https://www.cypress.io/) (CI-ready via Docker)
This is still WIP.
A setup to show-case Cypress as an end-to-end testing tool for Gradido running in a Docker container.
The tests are organized in feature files written in Gherkin syntax.
For automated end-to-end testing one of the frameworks Cypress or Playwright will be utilized.
For more details on how to run them, see the subfolders' README instructions.
## Features under test
So far these features are initially tested
- [User authentication](https://github.com/gradido/gradido/blob/master/e2e-tests/cypress/tests/cypress/e2e/User.Authentication.feature)
- [User profile - change password](https://github.com/gradido/gradido/blob/master/e2e-tests/cypress/tests/cypress/e2e/UserProfile.ChangePassword.feature)
- [User registration]((https://github.com/gradido/gradido/blob/master/e2e-tests/cypress/tests/cypress/e2e/User.Registration.feature)) (WIP)
## Precondition
Before running the tests, change to the repo's root directory (gradido).
### Boot up the system under test
```bash
docker-compose up
```
### Seed the database
The database has to be seeded upfront to every test run.
```bash
# change to the backend directory
cd /path/to/gradido/gradido/backend
# install all dependencies
yarn
# seed the database (everytime before running the tests)
yarn seed
```
## Execute the test
This setup will be integrated in the Gradido Github Actions to automatically support the CI/CD process.
For now the test setup can only be used locally in two modes.
### Run Cypress directly from the code
```bash
# change to the tests directory
cd /path/to/gradido/e2e-tests/
# install all dependencies
yarn install
# a) run the tests on command line
yarn cypress run
# b) open the Cypress GUI to run the tests in interactive mode
yarn cypress open
```
### Run Cyprss from a separate Docker container
```bash
# change to the cypress directory
cd /path/to/gradido/e2e-tests/
# build a Docker image from the Dockerfile
docker build -t gradido_e2e-tests-cypress .
# run the Docker image and execute the given tests
docker run -it --network=host gradido_e2e-tests-cypress yarn cypress-e2e
```

View File

@ -0,0 +1,79 @@
import { defineConfig } from 'cypress'
import { addCucumberPreprocessorPlugin } from '@badeball/cypress-cucumber-preprocessor'
import browserify from '@badeball/cypress-cucumber-preprocessor/browserify'
let resetPasswordLink: string
async function setupNodeEvents(
on: Cypress.PluginEvents,
config: Cypress.PluginConfigOptions
): Promise<Cypress.PluginConfigOptions> {
await addCucumberPreprocessorPlugin(on, config)
on(
'file:preprocessor',
browserify(config, {
typescript: require.resolve('typescript'),
})
)
on('task', {
setResetPasswordLink: (val) => {
return (resetPasswordLink = val)
},
getResetPasswordLink: () => {
return resetPasswordLink
},
})
on('after:run', (results) => {
if (results) {
// results will be undefined in interactive mode
// eslint-disable-next-line no-console
console.log(results.status)
}
})
return config
}
export default defineConfig({
e2e: {
specPattern: '**/*.feature',
excludeSpecPattern: '*.js',
experimentalSessionAndOrigin: true,
baseUrl: 'http://localhost:3000',
chromeWebSecurity: false,
defaultCommandTimeout: 10000,
supportFile: 'cypress/support/index.ts',
viewportHeight: 720,
viewportWidth: 1280,
video: false,
retries: {
runMode: 2,
openMode: 0,
},
env: {
backendURL: 'http://localhost:4000',
mailserverURL: 'http://localhost:1080',
loginQuery: `query ($email: String!, $password: String!, $publisherId: Int) {
login(email: $email, password: $password, publisherId: $publisherId) {
email
firstName
lastName
language
klickTipp {
newsletterState
__typename
}
hasElopage
publisherId
isAdmin
creation
__typename
}
}`,
},
setupNodeEvents,
},
})

View File

@ -1,4 +0,0 @@
tests/node_modules/
tests/cypress/screenshots/
tests/cypress/videos/
tests/cucumber-messages.ndjson

View File

@ -1,73 +0,0 @@
# Gradido End-to-End Testing with [Cypress](https://www.cypress.io/) (CI-ready via Docker)
A setup to show-case Cypress as an end-to-end testing tool for Gradido running in a Docker container.
The tests are organized in feature files written in Gherkin syntax.
## Features under test
So far these features are initially tested
- [User authentication](https://github.com/gradido/gradido/blob/master/e2e-tests/cypress/tests/cypress/e2e/User.Authentication.feature)
- [User profile - change password](https://github.com/gradido/gradido/blob/master/e2e-tests/cypress/tests/cypress/e2e/UserProfile.ChangePassword.feature)
- [User registration]((https://github.com/gradido/gradido/blob/master/e2e-tests/cypress/tests/cypress/e2e/User.Registration.feature)) (WIP)
## Precondition
Before running the tests, change to the repo's root directory (gradido).
### Boot up the system under test
```bash
docker-compose up
```
### Seed the database
The database has to be seeded upfront to every test run.
```bash
# change to the backend directory
cd /path/to/gradido/gradido/backend
# install all dependencies
yarn
# seed the database (everytime before running the tests)
yarn seed
```
## Execute the test
This setup will be integrated in the Gradido Github Actions to automatically support the CI/CD process.
For now the test setup can only be used locally in two modes.
### Run Cypress directly from the code
```bash
# change to the tests directory
cd /path/to/gradido/e2e-tests/cypress/tests
# install all dependencies
yarn install
# a) run the tests on command line
yarn cypress run
# b) open the Cypress GUI to run the tests in interactive mode
yarn cypress open
```
### Run Cyprss from a separate Docker container
```bash
# change to the cypress directory
cd /path/to/gradido/e2e-tests/cypress/
# build a Docker image from the Dockerfile
docker build -t gradido_e2e-tests-cypress .
# run the Docker image and execute the given tests
docker run -it --network=host gradido_e2e-tests-cypress yarn cypress-e2e
```

View File

@ -0,0 +1,25 @@
Feature: User Authentication - reset password
As a user
I want to reset my password from the sign in page
# TODO for these pre-conditions utilize seeding or API check, if user exists in test system
# Background:
# Given the following "users" are in the database:
# | email | password | name |
# | bibi@bloxberg.de | Aa12345_ | Bibi Bloxberg |
Scenario: Reset password from signin page successfully
Given the user navigates to page "/login"
And the user navigates to the forgot password page
When the user enters the e-mail address "bibi@bloxberg.de"
And the user submits the e-mail form
Then the user receives an e-mail containing the password reset link
When the user opens the password reset link in the browser
And the user enters the password "12345Aa_"
And the user repeats the password "12345Aa_"
And the user submits the password form
And the user clicks the sign in button
Then the user submits the credentials "bibi@bloxberg.de" "Aa12345_"
And the user cannot login
But the user submits the credentials "bibi@bloxberg.de" "12345Aa_"
And the user is logged in with username "Bibi Bloxberg"

View File

@ -11,7 +11,7 @@ Feature: User authentication
# | bibi@bloxberg.de | Aa12345_ | Bibi Bloxberg |
Scenario: Log in successfully
Given the browser navigates to page "/login"
Given the user navigates to page "/login"
When the user submits the credentials "bibi@bloxberg.de" "Aa12345_"
Then the user is logged in with username "Bibi Bloxberg"

View File

@ -4,7 +4,7 @@ Feature: User registration
@skip
Scenario: Register successfully
Given the browser navigates to page "/register"
Given the user navigates to page "/register"
When the user fills name and email "Regina" "Register" "regina@register.com"
And the user agrees to the privacy policy
And the user submits the registration form

View File

@ -12,7 +12,7 @@ Feature: User profile - change password
Given the user is logged in as "bibi@bloxberg.de" "Aa12345_"
Scenario: Change password successfully
Given the browser navigates to page "/profile"
Given the user navigates to page "/profile"
And the user opens the change password menu
When the user fills the password form with:
| Old password | Aa12345_ |

View File

@ -0,0 +1,18 @@
/// <reference types='cypress' />
export class ForgotPasswordPage {
// selectors
emailInput = 'input[type=email]'
submitBtn = 'button[type=submit]'
successComponent = '[data-test="forgot-password-success"]'
enterEmail(email: string) {
cy.get(this.emailInput).clear().type(email)
return this
}
submitEmail() {
cy.get(this.submitBtn).click()
return this
}
}

View File

@ -0,0 +1,35 @@
/// <reference types='cypress' />
export class LoginPage {
// selectors
emailInput = 'input[type=email]'
passwordInput = 'input[type=password]'
forgotPasswordLink = '[data-test="forgot-password-link"]'
submitBtn = '[type=submit]'
emailHint = '#vee_Email'
passwordHint = '#vee_Password'
goto() {
cy.visit('/')
return this
}
enterEmail(email: string) {
cy.get(this.emailInput).clear().type(email)
return this
}
enterPassword(password: string) {
cy.get(this.passwordInput).clear().type(password)
return this
}
submitLogin() {
cy.get(this.submitBtn).click()
return this
}
openForgotPasswordPage() {
cy.get(this.forgotPasswordLink).click()
}
}

View File

@ -0,0 +1,10 @@
/// <reference types='cypress' />
export class OverviewPage {
navbarName = '[data-test="navbar-item-username"]'
goto() {
cy.visit('/overview')
return this
}
}

View File

@ -0,0 +1,35 @@
/// <reference types='cypress' />
export class ProfilePage {
// selectors
openChangePassword = '[data-test=open-password-change-form]'
oldPasswordInput = '#password-input-field'
newPasswordInput = '#new-password-input-field'
newPasswordRepeatInput = '#repeat-new-password-input-field'
submitNewPasswordBtn = '[data-test=submit-new-password-btn]'
goto() {
cy.visit('/profile')
return this
}
enterOldPassword(password: string) {
cy.get(this.oldPasswordInput).clear().type(password)
return this
}
enterNewPassword(password: string) {
cy.get(this.newPasswordInput).find('input').clear().type(password)
return this
}
enterRepeatPassword(password: string) {
cy.get(this.newPasswordRepeatInput).find('input').clear().type(password)
return this
}
submitPasswordForm() {
cy.get(this.submitNewPasswordBtn).click()
return this
}
}

View File

@ -0,0 +1,42 @@
/// <reference types='cypress' />
export class RegistrationPage {
// selectors
firstnameInput = '#registerFirstname'
lastnameInput = '#registerLastname'
emailInput = '#Email-input-field'
checkbox = '#registerCheckbox'
submitBtn = '[type=submit]'
RegistrationThanxHeadline = '.test-message-headline'
RegistrationThanxText = '.test-message-subtitle'
goto() {
cy.visit('/register')
return this
}
enterFirstname(firstname: string) {
cy.get(this.firstnameInput).clear().type(firstname)
return this
}
enterLastname(lastname: string) {
cy.get(this.lastnameInput).clear().type(lastname)
return this
}
enterEmail(email: string) {
cy.get(this.emailInput).clear().type(email)
return this
}
checkPrivacyCheckbox() {
cy.get(this.checkbox).click({ force: true })
}
submitRegistrationPage() {
cy.get(this.submitBtn).should('be.enabled')
cy.get(this.submitBtn).click()
}
}

View File

@ -0,0 +1,32 @@
/// <reference types='cypress' />
export class ResetPasswordPage {
// selectors
newPasswordBlock = '#new-password-input-field'
newPasswordRepeatBlock = '#repeat-new-password-input-field'
resetPasswordBtn = 'button[type=submit]'
resetPasswordMessageBlock = '[data-test="reset-password-message"]'
signinBtn = '.btn.test-message-button'
enterNewPassword(password: string) {
cy.get(this.newPasswordBlock).find('input[type=password]').type(password)
return this
}
repeatNewPassword(password: string) {
cy.get(this.newPasswordRepeatBlock)
.find('input[type=password]')
.type(password)
return this
}
submitNewPassword() {
cy.get(this.resetPasswordBtn).click()
return this
}
openSigninPage() {
cy.get(this.signinBtn).click()
return this
}
}

View File

@ -0,0 +1,17 @@
/// <reference types='cypress' />
export class SideNavMenu {
// selectors
profileMenu = '[data-test=profile-menu]'
logoutMenu = '[data-test=logout-menu]'
openUserProfile() {
cy.get(this.profileMenu).click()
return this
}
logout() {
cy.get(this.logoutMenu).click()
return this
}
}

View File

@ -0,0 +1,10 @@
/// <reference types='cypress' />
export class Toasts {
// selectors
toastSlot = '.b-toaster-slot'
toastTypeSuccess = '.b-toast-success'
toastTypeError = '.b-toast-danger'
toastTitle = '.gdd-toaster-title'
toastMessage = '.gdd-toaster-body'
}

View File

@ -0,0 +1,17 @@
/// <reference types='cypress' />
export class UserEMailSite {
// selectors
emailInbox = '.sidebar-emails-container'
emailList = '.email-list'
emailMeta = '.email-meta'
emailSubject = '.subject'
openRecentPasswordResetEMail() {
cy.get(this.emailList)
.find('email-item')
.filter(':contains(asswor)')
.click()
expect(cy.get(this.emailSubject)).to('contain', 'asswor')
}
}

View File

@ -0,0 +1,40 @@
import jwtDecode from 'jwt-decode'
Cypress.Commands.add('login', (email, password) => {
cy.clearLocalStorage('vuex')
cy.request({
method: 'POST',
url: Cypress.env('backendURL'),
body: {
operationName: null,
variables: {
email: email,
password: password,
},
query: Cypress.env('loginQuery'),
},
}).then(async (response) => {
const tokens = response.headers.token
const token = Array.isArray(tokens) ? tokens[0] : tokens
let tokenTime
if (!token) return
// to avoid JWT InvalidTokenError, the decoding of the token is wrapped
// in a try-catch block (see
// https://github.com/auth0/jwt-decode/issues/65#issuecomment-395493807)
try {
tokenTime = jwtDecode(token).exp
} catch (tokenDecodingError) {
cy.log('JWT decoding error: ', tokenDecodingError)
}
const vuexToken = {
token: token,
tokenTime: tokenTime,
}
cy.visit('/')
window.localStorage.setItem('vuex', JSON.stringify(vuexToken))
})
})

View File

@ -1,14 +1,14 @@
/* eslint-disable @typescript-eslint/no-namespace */
/* eslint-disable @typescript-eslint/no-explicit-any */
/// <reference types="cypress" />
/// <reference types='cypress' />
import "./e2e";
import './e2e'
declare global {
namespace Cypress {
interface Chainable<Subject> {
login(email: string, password: string): Chainable<any>;
login(email: string, password: string): Chainable<any>
}
}
}

View File

@ -0,0 +1,39 @@
import { Given, Then, When } from '@badeball/cypress-cucumber-preprocessor'
import { OverviewPage } from '../../e2e/models/OverviewPage'
import { SideNavMenu } from '../../e2e/models/SideNavMenu'
import { Toasts } from '../../e2e/models/Toasts'
Given('the user navigates to page {string}', (page: string) => {
cy.visit(page)
})
// login related
Given(
'the user is logged in as {string} {string}',
(email: string, password: string) => {
cy.login(email, password)
}
)
Then('the user is logged in with username {string}', (username: string) => {
const overviewPage = new OverviewPage()
cy.url().should('include', '/overview')
cy.get(overviewPage.navbarName).should('contain', username)
})
Then('the user cannot login', () => {
const toast = new Toasts()
cy.get(toast.toastSlot).within(() => {
cy.get(toast.toastTypeError)
cy.get(toast.toastTitle).should('be.visible')
cy.get(toast.toastMessage).should('be.visible')
})
})
// logout
Then('the user logs out', () => {
const sideNavMenu = new SideNavMenu()
sideNavMenu.logout()
})

View File

@ -0,0 +1,45 @@
import { Then, When } from '@badeball/cypress-cucumber-preprocessor'
import { ResetPasswordPage } from '../../e2e/models/ResetPasswordPage'
import { UserEMailSite } from '../../e2e/models/UserEMailSite'
const userEMailSite = new UserEMailSite()
const resetPasswordPage = new ResetPasswordPage()
Then('the user receives an e-mail containing the password reset link', () => {
cy.origin(
Cypress.env('mailserverURL'),
{ args: userEMailSite },
(userEMailSite) => {
const linkPattern = /\/reset-password\/[0-9]+\d/
cy.visit('/') // navigate to user's e-maile site (on fake mail server)
cy.get(userEMailSite.emailInbox).should('be.visible')
cy.get(userEMailSite.emailList)
.find('.email-item')
.filter(':contains(asswor)')
.first()
.click()
cy.get(userEMailSite.emailMeta)
.find(userEMailSite.emailSubject)
.contains('asswor')
cy.get('.email-content')
.find('.plain-text')
.contains(linkPattern)
.invoke('text')
.then((text) => {
const resetPasswordLink = text.match(linkPattern)[0]
cy.task('setResetPasswordLink', resetPasswordLink)
})
}
)
})
When('the user opens the password reset link in the browser', () => {
cy.task('getResetPasswordLink').then((passwordResetLink) => {
cy.visit(passwordResetLink)
})
cy.get(resetPasswordPage.newPasswordRepeatBlock).should('be.visible')
})

View File

@ -0,0 +1,69 @@
import { When, And } from '@badeball/cypress-cucumber-preprocessor'
import { ForgotPasswordPage } from '../../e2e/models/ForgotPasswordPage'
import { LoginPage } from '../../e2e/models/LoginPage'
import { ResetPasswordPage } from '../../e2e/models/ResetPasswordPage'
const loginPage = new LoginPage()
const forgotPasswordPage = new ForgotPasswordPage()
const resetPasswordPage = new ResetPasswordPage()
// login related
When('the user submits no credentials', () => {
loginPage.submitLogin()
})
When(
'the user submits the credentials {string} {string}',
(email: string, password: string) => {
cy.intercept('POST', '/graphql', (req) => {
if (
req.body.hasOwnProperty('query') &&
req.body.query.includes('mutation')
) {
req.alias = 'login'
}
})
loginPage.enterEmail(email)
loginPage.enterPassword(password)
loginPage.submitLogin()
cy.wait('@login').then((interception) => {
expect(interception.response.statusCode).equals(200)
})
}
)
// password reset related
And('the user navigates to the forgot password page', () => {
loginPage.openForgotPasswordPage()
cy.url().should('include', '/forgot-password')
})
When('the user enters the e-mail address {string}', (email: string) => {
forgotPasswordPage.enterEmail(email)
})
And('the user submits the e-mail form', () => {
forgotPasswordPage.submitEmail()
cy.get(forgotPasswordPage.successComponent).should('be.visible')
})
And('the user enters the password {string}', (password: string) => {
resetPasswordPage.enterNewPassword(password)
})
And('the user repeats the password {string}', (password: string) => {
resetPasswordPage.repeatNewPassword(password)
})
And('the user submits the new password', () => {
resetPasswordPage.submitNewPassword()
cy.get(resetPasswordPage.resetPasswordMessageBlock).should('be.visible')
})
And('the user clicks the sign in button', () => {
resetPasswordPage.openSigninPage()
cy.url().should('contain', '/login')
})

View File

@ -0,0 +1,32 @@
import { And, When } from '@badeball/cypress-cucumber-preprocessor'
import { ProfilePage } from '../../e2e/models/ProfilePage'
import { Toasts } from '../../e2e/models/Toasts'
const profilePage = new ProfilePage()
And('the user opens the change password menu', () => {
cy.get(profilePage.openChangePassword).click()
cy.get(profilePage.newPasswordRepeatInput).should('be.visible')
cy.get(profilePage.submitNewPasswordBtn).should('be.disabled')
})
When('the user fills the password form with:', (table) => {
let hashedTableRows = table.rowsHash()
profilePage.enterOldPassword(hashedTableRows['Old password'])
profilePage.enterNewPassword(hashedTableRows['New password'])
profilePage.enterRepeatPassword(hashedTableRows['Repeat new password'])
cy.get(profilePage.submitNewPasswordBtn).should('be.enabled')
})
And('the user submits the password form', () => {
profilePage.submitPasswordForm()
})
When('the user is presented a {string} message', (type: string) => {
const toast = new Toasts()
cy.get(toast.toastSlot).within(() => {
cy.get(toast.toastTypeSuccess)
cy.get(toast.toastTitle).should('be.visible')
cy.get(toast.toastMessage).should('be.visible')
})
})

View File

@ -0,0 +1,24 @@
import { And, When } from '@badeball/cypress-cucumber-preprocessor'
import { RegistrationPage } from '../../e2e/models/RegistrationPage'
const registrationPage = new RegistrationPage()
When(
'the user fills name and email {string} {string} {string}',
(firstname: string, lastname: string, email: string) => {
const registrationPage = new RegistrationPage()
registrationPage.enterFirstname(firstname)
registrationPage.enterLastname(lastname)
registrationPage.enterEmail(email)
}
)
And('the user agrees to the privacy policy', () => {
registrationPage.checkPrivacyCheckbox()
})
And('the user submits the registration form', () => {
registrationPage.submitRegistrationPage()
cy.get(registrationPage.RegistrationThanxHeadline).should('be.visible')
cy.get(registrationPage.RegistrationThanxText).should('be.visible')
})

View File

@ -1,24 +0,0 @@
module.exports = {
root: true,
env: {
node: true,
},
parser: "@typescript-eslint/parser",
plugins: ["cypress", "prettier", "@typescript-eslint"],
extends: [
"standard",
"eslint:recommended",
"plugin:prettier/recommended",
"plugin:@typescript-eslint/recommended",
],
rules: {
"no-console": ["error"],
"no-debugger": "error",
"prettier/prettier": [
"error",
{
htmlWhitespaceSensitivity: "ignore",
},
],
},
};

View File

@ -1,66 +0,0 @@
import { defineConfig } from "cypress";
import { addCucumberPreprocessorPlugin } from "@badeball/cypress-cucumber-preprocessor";
import browserify from "@badeball/cypress-cucumber-preprocessor/browserify";
async function setupNodeEvents(
on: Cypress.PluginEvents,
config: Cypress.PluginConfigOptions
): Promise<Cypress.PluginConfigOptions> {
await addCucumberPreprocessorPlugin(on, config);
on(
"file:preprocessor",
browserify(config, {
typescript: require.resolve("typescript"),
})
);
on("after:run", (results) => {
if (results) {
// results will be undefined in interactive mode
// eslint-disable-next-line no-console
console.log(results.status);
}
});
return config;
}
export default defineConfig({
e2e: {
specPattern: "**/*.feature",
excludeSpecPattern: "*.js",
baseUrl: "http://localhost:3000",
chromeWebSecurity: false,
defaultCommandTimeout: 10000,
supportFile: "cypress/support/index.ts",
viewportHeight: 720,
viewportWidth: 1280,
video: false,
retries: {
runMode: 2,
openMode: 0,
},
env: {
backendURL: "http://localhost:4000",
loginQuery: `query ($email: String!, $password: String!, $publisherId: Int) {
login(email: $email, password: $password, publisherId: $publisherId) {
email
firstName
lastName
language
klickTipp {
newsletterState
__typename
}
hasElopage
publisherId
isAdmin
creation
__typename
}
}`,
},
setupNodeEvents,
},
});

View File

@ -1,30 +0,0 @@
/// <reference types="cypress" />
export class LoginPage {
// selectors
emailInput = "input[type=email]";
passwordInput = "input[type=password]";
submitBtn = "[type=submit]";
emailHint = "#vee_Email";
passwordHint = "#vee_Password";
goto() {
cy.visit("/");
return this;
}
enterEmail(email: string) {
cy.get(this.emailInput).clear().type(email);
return this;
}
enterPassword(password: string) {
cy.get(this.passwordInput).clear().type(password);
return this;
}
submitLogin() {
cy.get(this.submitBtn).click();
return this;
}
}

View File

@ -1,10 +0,0 @@
/// <reference types="cypress" />
export class OverviewPage {
navbarName = '[data-test="navbar-item-username"]';
goto() {
cy.visit("/overview");
return this;
}
}

View File

@ -1,35 +0,0 @@
/// <reference types="cypress" />
export class ProfilePage {
// selectors
openChangePassword = "[data-test=open-password-change-form]";
oldPasswordInput = "#password-input-field";
newPasswordInput = "#new-password-input-field";
newPasswordRepeatInput = "#repeat-new-password-input-field";
submitNewPasswordBtn = "[data-test=submit-new-password-btn]";
goto() {
cy.visit("/profile");
return this;
}
enterOldPassword(password: string) {
cy.get(this.oldPasswordInput).clear().type(password);
return this;
}
enterNewPassword(password: string) {
cy.get(this.newPasswordInput).find("input").clear().type(password);
return this;
}
enterRepeatPassword(password: string) {
cy.get(this.newPasswordRepeatInput).find("input").clear().type(password);
return this;
}
submitPasswordForm() {
cy.get(this.submitNewPasswordBtn).click();
return this;
}
}

View File

@ -1,42 +0,0 @@
/// <reference types="cypress" />
export class RegistrationPage {
// selectors
firstnameInput = "#registerFirstname";
lastnameInput = "#registerLastname";
emailInput = "#Email-input-field";
checkbox = "#registerCheckbox";
submitBtn = "[type=submit]";
RegistrationThanxHeadline = ".test-message-headline";
RegistrationThanxText = ".test-message-subtitle";
goto() {
cy.visit("/register");
return this;
}
enterFirstname(firstname: string) {
cy.get(this.firstnameInput).clear().type(firstname);
return this;
}
enterLastname(lastname: string) {
cy.get(this.lastnameInput).clear().type(lastname);
return this;
}
enterEmail(email: string) {
cy.get(this.emailInput).clear().type(email);
return this;
}
checkPrivacyCheckbox() {
cy.get(this.checkbox).click({ force: true });
}
submitRegistrationPage() {
cy.get(this.submitBtn).should("be.enabled");
cy.get(this.submitBtn).click();
}
}

View File

@ -1,17 +0,0 @@
/// <reference types="cypress" />
export class SideNavMenu {
// selectors
profileMenu = "[data-test=profile-menu]";
logoutMenu = "[data-test=logout-menu]";
openUserProfile() {
cy.get(this.profileMenu).click();
return this;
}
logout() {
cy.get(this.logoutMenu).click();
return this;
}
}

View File

@ -1,10 +0,0 @@
/// <reference types="cypress" />
export class Toasts {
// selectors
toastSlot = ".b-toaster-slot";
toastTypeSuccess = ".b-toast-success";
toastTypeError = ".b-toast-danger";
toastTitle = ".gdd-toaster-title";
toastMessage = ".gdd-toaster-body";
}

View File

@ -1,38 +0,0 @@
import jwtDecode from "jwt-decode";
Cypress.Commands.add("login", (email, password) => {
cy.clearLocalStorage("vuex");
cy.request({
method: "POST",
url: Cypress.env("backendURL"),
body: {
operationName: null,
variables: {
email: email,
password: password,
},
query: Cypress.env("loginQuery"),
},
}).then(async (response) => {
const token = response.headers.token;
let tokenTime;
// to avoid JWT InvalidTokenError, the decoding of the token is wrapped
// in a try-catch block (see
// https://github.com/auth0/jwt-decode/issues/65#issuecomment-395493807)
try {
tokenTime = jwtDecode(token).exp;
} catch (tokenDecodingError) {
cy.log("JWT decoding error: ", tokenDecodingError);
}
const vuexToken = {
token: token,
tokenTime: tokenTime,
};
cy.visit("/");
window.localStorage.setItem("vuex", JSON.stringify(vuexToken));
});
});

View File

@ -1,52 +0,0 @@
import { Given, Then, When } from "@badeball/cypress-cucumber-preprocessor";
import { LoginPage } from "../../e2e/models/LoginPage";
import { OverviewPage } from "../../e2e/models/OverviewPage";
import { SideNavMenu } from "../../e2e/models/SideNavMenu";
import { Toasts } from "../../e2e/models/Toasts";
Given("the browser navigates to page {string}", (page: string) => {
cy.visit(page);
});
// login-related
Given(
"the user is logged in as {string} {string}",
(email: string, password: string) => {
cy.login(email, password);
}
);
Then("the user is logged in with username {string}", (username: string) => {
const overviewPage = new OverviewPage();
cy.url().should("include", "/overview");
cy.get(overviewPage.navbarName).should("contain", username);
});
Then("the user cannot login", () => {
const toast = new Toasts();
cy.get(toast.toastSlot).within(() => {
cy.get(toast.toastTypeError);
cy.get(toast.toastTitle).should("be.visible");
cy.get(toast.toastMessage).should("be.visible");
});
});
//
When(
"the user submits the credentials {string} {string}",
(email: string, password: string) => {
const loginPage = new LoginPage();
loginPage.enterEmail(email);
loginPage.enterPassword(password);
loginPage.submitLogin();
}
);
// logout
Then("the user logs out", () => {
const sideNavMenu = new SideNavMenu();
sideNavMenu.logout();
});

View File

@ -1,7 +0,0 @@
import { When } from "@badeball/cypress-cucumber-preprocessor";
import { LoginPage } from "../../e2e/models/LoginPage";
When("the user submits no credentials", () => {
const loginPage = new LoginPage();
loginPage.submitLogin();
});

View File

@ -1,32 +0,0 @@
import { And, When } from "@badeball/cypress-cucumber-preprocessor";
import { ProfilePage } from "../../e2e/models/ProfilePage";
import { Toasts } from "../../e2e/models/Toasts";
const profilePage = new ProfilePage();
And("the user opens the change password menu", () => {
cy.get(profilePage.openChangePassword).click();
cy.get(profilePage.newPasswordRepeatInput).should("be.visible");
cy.get(profilePage.submitNewPasswordBtn).should("be.disabled");
});
When("the user fills the password form with:", (table) => {
table = table.rowsHash();
profilePage.enterOldPassword(table["Old password"]);
profilePage.enterNewPassword(table["New password"]);
profilePage.enterRepeatPassword(table["Repeat new password"]);
cy.get(profilePage.submitNewPasswordBtn).should("be.enabled");
});
And("the user submits the password form", () => {
profilePage.submitPasswordForm();
});
When("the user is presented a {string} message", (type: string) => {
const toast = new Toasts();
cy.get(toast.toastSlot).within(() => {
cy.get(toast.toastTypeSuccess);
cy.get(toast.toastTitle).should("be.visible");
cy.get(toast.toastMessage).should("be.visible");
});
});

View File

@ -1,24 +0,0 @@
import { And, When } from "@badeball/cypress-cucumber-preprocessor";
import { RegistrationPage } from "../../e2e/models/RegistrationPage";
const registrationPage = new RegistrationPage();
When(
"the user fills name and email {string} {string} {string}",
(firstname: string, lastname: string, email: string) => {
const registrationPage = new RegistrationPage();
registrationPage.enterFirstname(firstname);
registrationPage.enterLastname(lastname);
registrationPage.enterEmail(email);
}
);
And("the user agrees to the privacy policy", () => {
registrationPage.checkPrivacyCheckbox();
});
And("the user submits the registration form", () => {
registrationPage.submitRegistrationPage();
cy.get(registrationPage.RegistrationThanxHeadline).should("be.visible");
cy.get(registrationPage.RegistrationThanxText).should("be.visible");
});

View File

@ -1,10 +0,0 @@
{
"compilerOptions": {
"target": "es2016",
"lib": ["es6", "dom"],
"baseUrl": "../node_modules",
"types": ["cypress", "node"],
"strict": true
},
"include": ["**/*.ts"]
}

16
e2e-tests/tsconfig.json Normal file
View File

@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "es6",
"lib": ["es6", "dom"],
"baseUrl": ".",
"types": ["cypress", "node"],
"strict": true,
"esModuleInterop": true,
"moduleResolution": "node",
"paths": {
"@/*": ["cypress/*"],
"@models/*": ["cypress/e2e/models/*"],
}
},
"include": ["**/*.ts"],
}

116
federation/Dockerfile Normal file
View File

@ -0,0 +1,116 @@
##################################################################################
# BASE ###########################################################################
##################################################################################
FROM node:18.7.0-alpine3.16 as base
# ENVs (available in production aswell, can be overwritten by commandline or env file)
## DOCKER_WORKDIR would be a classical ARG, but that is not multi layer persistent - shame
ENV DOCKER_WORKDIR="/app"
## We Cannot do `$(date -u +'%Y-%m-%dT%H:%M:%SZ')` here so we use unix timestamp=0
ENV BUILD_DATE="1970-01-01T00:00:00.00Z"
## We cannot do $(npm run version).${BUILD_NUMBER} here so we default to 0.0.0.0
ENV BUILD_VERSION="0.0.0.0"
## We cannot do `$(git rev-parse --short HEAD)` here so we default to 0000000
ENV BUILD_COMMIT="0000000"
## SET NODE_ENV
ENV NODE_ENV="production"
## App relevant Envs
ENV PORT="5010"
# ENV PORT="${env.FEDERATION_PORT}"
# Labels
LABEL org.label-schema.build-date="${BUILD_DATE}"
LABEL org.label-schema.name="gradido:federation"
LABEL org.label-schema.description="Gradido GraphQL Federation"
LABEL org.label-schema.usage="https://github.com/gradido/gradido/blob/master/README.md"
LABEL org.label-schema.url="https://gradido.net"
LABEL org.label-schema.vcs-url="https://github.com/gradido/gradido/tree/master/federation"
LABEL org.label-schema.vcs-ref="${BUILD_COMMIT}"
LABEL org.label-schema.vendor="Gradido Community"
LABEL org.label-schema.version="${BUILD_VERSION}"
LABEL org.label-schema.schema-version="1.0"
LABEL maintainer="support@gradido.net"
# Install Additional Software
## install: git
#RUN apk --no-cache add git
# Settings
## Expose Container Port
EXPOSE ${PORT}
## Workdir
RUN mkdir -p ${DOCKER_WORKDIR}
WORKDIR ${DOCKER_WORKDIR}
RUN mkdir -p /database
##################################################################################
# DEVELOPMENT (Connected to the local environment, to reload on demand) ##########
##################################################################################
FROM base as development
# We don't need to copy or build anything since we gonna bind to the
# local filesystem which will need a rebuild anyway
# Run command
# (for development we need to execute yarn install since the
# node_modules are on another volume and need updating)
CMD /bin/sh -c "cd /database && yarn install && yarn build && cd /app && yarn install && yarn run dev"
##################################################################################
# BUILD (Does contain all files and is therefore bloated) ########################
##################################################################################
FROM base as build
# Copy everything from federation
COPY ./federation/ ./
# Copy everything from database
COPY ./database/ ../database/
# yarn install federation
RUN yarn install --production=false --frozen-lockfile --non-interactive
# yarn install database
RUN cd ../database && yarn install --production=false --frozen-lockfile --non-interactive
# yarn build
RUN yarn run build
# yarn build database
RUN cd ../database && yarn run build
##################################################################################
# TEST ###########################################################################
##################################################################################
FROM build as test
# Run command
CMD /bin/sh -c "yarn run start"
##################################################################################
# PRODUCTION (Does contain only "binary"- and static-files to reduce image size) #
##################################################################################
FROM base as production
# Copy "binary"-files from build image
COPY --from=build ${DOCKER_WORKDIR}/build ./build
COPY --from=build ${DOCKER_WORKDIR}/../database/build ../database/build
# We also copy the node_modules express and serve-static for the run script
COPY --from=build ${DOCKER_WORKDIR}/node_modules ./node_modules
COPY --from=build ${DOCKER_WORKDIR}/../database/node_modules ../database/node_modules
# Copy static files
# COPY --from=build ${DOCKER_WORKDIR}/public ./public
# Copy package.json for script definitions (lock file should not be needed)
COPY --from=build ${DOCKER_WORKDIR}/package.json ./package.json
# Copy tsconfig.json to provide alias path definitions
COPY --from=build ${DOCKER_WORKDIR}/tsconfig.json ./tsconfig.json
# Copy log4js-config.json to provide log configuration
COPY --from=build ${DOCKER_WORKDIR}/log4js-config.json ./log4js-config.json
# Copy run scripts run/
# COPY --from=build ${DOCKER_WORKDIR}/run ./run
# Run command
CMD /bin/sh -c "yarn run start"

32
federation/jest.config.js Normal file
View File

@ -0,0 +1,32 @@
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
module.exports = {
verbose: true,
preset: 'ts-jest',
collectCoverage: true,
collectCoverageFrom: [
'src/**/*.ts',
'!**/node_modules/**',
'!src/seeds/**',
'!build/**',
],
setupFiles: ['<rootDir>/test/testSetup.ts'],
setupFilesAfterEnv: [],
modulePathIgnorePatterns: ['<rootDir>/build/'],
moduleNameMapper: {
'@/(.*)': '<rootDir>/src/$1',
'@arg/(.*)': '<rootDir>/src/graphql/arg/$1',
'@enum/(.*)': '<rootDir>/src/graphql/enum/$1',
'@model/(.*)': '<rootDir>/src/graphql/model/$1',
'@union/(.*)': '<rootDir>/src/graphql/union/$1',
'@repository/(.*)': '<rootDir>/src/typeorm/repository/$1',
'@test/(.*)': '<rootDir>/test/$1',
'@entity/(.*)':
process.env.NODE_ENV === 'development'
? '<rootDir>/../database/entity/$1'
: '<rootDir>/../database/build/entity/$1',
'@dbTools/(.*)':
process.env.NODE_ENV === 'development'
? '<rootDir>/../database/src/$1'
: '<rootDir>/../database/build/src/$1',
},
}

View File

@ -11,6 +11,7 @@
"build": "tsc --build",
"clean": "tsc --build --clean",
"start": "cross-env TZ=UTC TS_NODE_BASEURL=./build node -r tsconfig-paths/register build/src/index.js",
"test": "cross-env TZ=UTC NODE_ENV=development jest --runInBand --coverage --forceExit --detectOpenHandles",
"dev": "cross-env TZ=UTC nodemon -w src --ext ts --exec ts-node -r dotenv/config -r tsconfig-paths/register src/index.ts",
"lint": "eslint --max-warnings=0 --ext .js,.ts ."
},
@ -26,15 +27,16 @@
"lodash.clonedeep": "^4.5.0",
"log4js": "^6.7.1",
"reflect-metadata": "^0.1.13",
"ts-node": "^10.9.1",
"tsconfig-paths": "^4.1.1",
"type-graphql": "^1.1.1"
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^4.28.0",
"@typescript-eslint/parser": "^4.28.0",
"@types/express": "4.17.12",
"@types/jest": "27.0.2",
"@types/lodash.clonedeep": "^4.5.6",
"@types/node": "^16.10.3",
"@typescript-eslint/eslint-plugin": "^4.28.0",
"@typescript-eslint/parser": "^4.28.0",
"apollo-server-testing": "2.25.2",
"eslint": "^7.29.0",
"eslint-config-prettier": "^8.3.0",
"eslint-config-standard": "^16.0.3",
@ -42,8 +44,15 @@
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^3.4.0",
"eslint-plugin-promise": "^5.1.0",
"jest": "27.2.4",
"ts-jest": "27.0.5",
"ts-node": "^10.9.1",
"tsconfig-paths": "^4.1.1",
"nodemon": "^2.0.7",
"prettier": "^2.3.1",
"typescript": "^4.3.4",
"nodemon": "^2.0.7"
"typescript": "^4.3.4"
},
"nodemonConfig": {
"ignore": ["**/*.test.ts"]
}
}

View File

@ -11,7 +11,7 @@ Decimal.set({
*/
const constants = {
DB_VERSION: '0059-add_hide_amount_to_users',
DB_VERSION: '0060-update_communities_table',
// DECAY_START_TIME: new Date('2021-05-13 17:46:31-0000'), // GMT+0
LOG4JS_CONFIG: 'log4js-config.json',
// default log level on production should be info
@ -24,7 +24,7 @@ const constants = {
}
const server = {
PORT: process.env.PORT || 5000,
PORT: process.env.PORT || 5010,
// JWT_SECRET: process.env.JWT_SECRET || 'secret123',
// JWT_EXPIRES_IN: process.env.JWT_EXPIRES_IN || '10m',
GRAPHIQL: process.env.GRAPHIQL === 'true' || false,
@ -73,7 +73,7 @@ if (
const federation = {
// FEDERATION_DHT_TOPIC: process.env.FEDERATION_DHT_TOPIC || null,
// FEDERATION_DHT_SEED: process.env.FEDERATION_DHT_SEED || null,
FEDERATION_PORT: process.env.FEDERATION_PORT || 5000,
FEDERATION_PORT: process.env.FEDERATION_PORT || 5010,
FEDERATION_API: process.env.FEDERATION_API || '1_0',
FEDERATION_COMMUNITY_URL: process.env.FEDERATION_COMMUNITY_URL || null,
}

Some files were not shown because too many files have changed in this diff Show More