Merge branch 'master' into 2019/kw15/User_can_change_its_username_to_emptystring

This commit is contained in:
Ulf Gebhardt 2019-05-09 08:26:53 +02:00
commit 7983a93fbd
No known key found for this signature in database
GPG Key ID: 44C888923CC8E7F3
28 changed files with 672 additions and 497 deletions

169
.codecov.yml Normal file
View File

@ -0,0 +1,169 @@
codecov:
#token: uuid # Your private repository token
#url: "http" # for Codecov Enterprise customers
#slug: "owner/repo" # for Codecov Enterprise customers
#branch: master # override the default branch
#bot: username # set user whom will be the consumer of oauth requests
#ci: # Custom CI domains if Codecov does not identify them automatically
# - ci.domain.com
# - !provider # ignore these providers when checking if CI passed
# # ex. You may test on Travis, Circle, and AppVeyor, but only need
# # to check if Travis passes. Therefore add: !circle and !appveyor
notify:
#after_n_builds: null # number of expected builds to recieve before sending notifications
# # after: check ci status unless disabled via require_ci_to_pass
require_ci_to_pass: yes # yes: will delay sending notifications until all ci is finished
# no: will send notifications without checking ci status and wait till "after_n_builds" are uploaded
#countdown: null # number of seconds to wait before first ci build check
#delay: null # number of seconds to wait between ci build checks
coverage:
precision: 2 # 2 = xx.xx%, 0 = xx%
round: nearest # down|up|nearest - default down
# range: 50...60 # default 70...90. red...green
#notify:
# irc:
# default:
# server: "chat.freenode.net"|encrypted
# branches: null # all branches by default
# threshold: 1%
# message: "Coverage {{changed}} for {{owner}}/{{repo}}" # customize the message
# flags: null
# paths: null
#
# slack:
# default:
# url: "http"|encrypted
# threshold: 1%
# branches: null # all branches by default
# message: "Coverage {{changed}} for {{owner}}/{{repo}}" # customize the message
# attachments: "sunburst, diff"
# only_pulls: false
# flags: null
# paths: null
#
# email:
# default:
# to:
# - example@domain.com
# - &author
# threshold: 1%
# only_pulls: false
# layout: header, diff, trends
# flags: null
# paths: null
#
# hipchat:
# default:
# url: "http"|encrypted
# room: name|id
# threshold: 1%
# token: encrypted
# branches: null # all branches by default
# notify: false # if the hipchat message is silent or loud (default false)
# message: "Coverage {{changed}} for {{owner}}/{{repo}}" # customize the message
# flags: null
# paths: null
#
# gitter:
# url: "http"|encrypted
# threshold: 1%
# branches: null # all branches by default
# message: "Coverage {{changed}} for {{owner}}/{{repo}}" # customize the message
#
# webhooks:
# _name_:
# url: "http"|encrypted
# threshold: 1%
# branches: null # all branches by default
status:
project:
default: false # disable the default status that measures entire project
backend: # declare a new status context "backend"
against: parent
target: auto
threshold: null
#threshold: 1%
base: auto
if_no_uploads: error
if_not_found: success
if_ci_failed: error
only_pulls: false
#branches:
# - master
#flags:
# - integration
paths:
- backend/ # only include coverage in "backend/" folder
webapp: # declare a new status context "frontend"
against: parent
target: auto
threshold: null
#threshold: 1%
base: auto
if_no_uploads: error
if_not_found: success
if_ci_failed: error
only_pulls: false
#branches:
# - master
#flags:
# - integration
paths:
- webapp/ # only include coverage in "webapp/" folder
patch:
default: false
# against: parent
# target: 80%
# branches: null
# if_no_uploads: success
# if_not_found: success
# if_ci_failed: error
# only_pulls: false
# flags:
# - integration
# paths:
# - folder
#changes:
# default:
# against: parent
# branches: null
# if_no_uploads: error
# if_not_found: success
# if_ci_failed: error
# only_pulls: false
# flags:
# - integration
# paths:
# - folder
#flags:
# integration:
# branches:
# - master
# ignore:
# - app/ui
#ignore: # files and folders for processing
# - tests/*
#fixes:
# - "old_path::new_path"
comment:
# layout options are quite limited in v4.x - there have been way more options in v1.0
layout: reach, diff, flags, files # mostly old options: header, diff, uncovered, reach, files, tree, changes, sunburst, flags
behavior: new # default = posts once then update, posts new if delete
# once = post once then updates
# new = delete old, post new
# spammy = post new
require_changes: false # if true: only post the comment if coverage changes
require_base: no # [yes :: must have a base report to post]
require_head: no # [yes :: must have a head report to post]
branches: null # branch names that can post comment
flags: null
paths: null

View File

@ -10,6 +10,8 @@ addons:
before_install:
- yarn global add wait-on
# Install Codecov
- yarn global add codecov
- yarn install
- cp cypress.env.template.json cypress.env.json
@ -18,6 +20,7 @@ install:
- wait-on http://localhost:7474 && docker-compose exec neo4j migrate
script:
# Backend
- docker-compose exec backend yarn run lint
- docker-compose exec backend yarn run test:jest --ci --verbose=false
- docker-compose exec backend yarn run db:reset
@ -25,10 +28,14 @@ script:
- docker-compose exec backend yarn run test:cucumber
- docker-compose exec backend yarn run db:reset
- docker-compose exec backend yarn run db:seed
# Frontend
- docker-compose exec webapp yarn run lint
- docker-compose exec webapp yarn run test --ci --verbose=false
- docker-compose exec -d backend yarn run test:before:seeder
# Fullstack
- CYPRESS_RETRIES=1 yarn run cypress:run
# Coverage
- codecov
after_success:
- wget https://raw.githubusercontent.com/DiscordHooks/travis-ci-discord-webhook/master/send.sh

View File

@ -1,9 +0,0 @@
version: "3.7"
services:
neo4j:
environment:
- NEO4J_PASSWORD=letmein
backend:
environment:
- NEO4J_PASSWORD=letmein

View File

@ -27,7 +27,17 @@
"jest": {
"verbose": true,
"collectCoverage": true,
"coverageReporters": ["text", "lcov"],
"collectCoverageFrom": [
"**/*.js",
"!**/node_modules/**",
"!**/test/**",
"!**/dist/**",
"!**/src/**/?(*.)+(spec|test).js?(x)"
],
"coverageReporters": [
"text",
"lcov"
],
"testMatch": [
"**/src/**/?(*.)+(spec|test).js?(x)"
]
@ -38,7 +48,7 @@
"apollo-client": "~2.5.1",
"apollo-link-context": "~1.0.14",
"apollo-link-http": "~1.5.14",
"apollo-server": "~2.4.8",
"apollo-server": "~2.5.0",
"bcryptjs": "~2.4.3",
"cheerio": "~1.0.0-rc.3",
"cors": "~2.8.5",
@ -88,11 +98,11 @@
"eslint-config-standard": "~12.0.0",
"eslint-plugin-import": "~2.17.2",
"eslint-plugin-jest": "~22.5.1",
"eslint-plugin-node": "~8.0.1",
"eslint-plugin-node": "~9.0.1",
"eslint-plugin-promise": "~4.1.1",
"eslint-plugin-standard": "~4.0.0",
"graphql-request": "~1.8.2",
"jest": "~24.7.1",
"jest": "~24.8.0",
"nodemon": "~1.19.0",
"supertest": "~4.0.2"
}

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +0,0 @@
#!/usr/bin/env bash
set -e
mkdir -p ~/.ssh
echo $SSH_PRIVATE_KEY | base64 -d > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa

View File

@ -0,0 +1,2 @@
#!/usr/bin/env bash
tail -f /dev/null

View File

@ -1,6 +1,6 @@
#!/usr/bin/env bash
set -e
for var in "SSH_USERNAME" "SSH_HOST" "MONGODB_USERNAME" "MONGODB_PASSWORD" "MONGODB_DATABASE" "MONGODB_AUTH_DB" "NEO4J_URI"
for var in "SSH_USERNAME" "SSH_HOST" "MONGODB_USERNAME" "MONGODB_PASSWORD" "MONGODB_DATABASE" "MONGODB_AUTH_DB"
do
if [[ -z "${!var}" ]]; then
echo "${var} is undefined"

View File

@ -9,5 +9,4 @@ do
fi
done
[ -z "$SSH_PRIVATE_KEY" ] || create_private_ssh_key_from_env
rsync --archive --update --verbose ${SSH_USERNAME}@${SSH_HOST}:${UPLOADS_DIRECTORY}/* /uploads/
rsync --archive --update --verbose ${SSH_USERNAME}@${SSH_HOST}:${UPLOADS_DIRECTORY}/ /uploads/

View File

@ -9,16 +9,17 @@ echo "MONGODB_DATABASE ${MONGODB_DATABASE}"
echo "MONGODB_AUTH_DB ${MONGODB_AUTH_DB}"
echo "-------------------------------------------------"
[ -z "$SSH_PRIVATE_KEY" ] || create_private_ssh_key_from_env
rm -rf /tmp/mongo-export/*
mkdir -p /tmp/mongo-export
mkdir -p /tmp/mongo-export/
ssh -4 -M -S my-ctrl-socket -fnNT -L 27018:localhost:27017 -l ${SSH_USERNAME} ${SSH_HOST}
for collection in "categories" "badges" "users" "contributions" "comments" "follows" "shouts"
do
mongoexport --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase ${MONGODB_AUTH_DB} --db ${MONGODB_DATABASE} --collection $collection --out "/tmp/mongo-export/$collection.json"
mongoexport --db ${MONGODB_DATABASE} --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase ${MONGODB_AUTH_DB} --collection $collection --collection $collection --out "/tmp/mongo-export/$collection.json"
mkdir -p /tmp/mongo-export/splits/$collection/
split -l 1000 -a 3 /tmp/mongo-export/$collection.json /tmp/mongo-export/splits/$collection/
done
ssh -S my-ctrl-socket -O check -l ${SSH_USERNAME} ${SSH_HOST}

View File

@ -1,4 +1,4 @@
CALL apoc.load.json('file:/tmp/mongo-export/badges.json') YIELD value as badge
CALL apoc.load.json('file:/tmp/mongo-export/splits/current-chunk.json') YIELD value as badge
MERGE(b:Badge {id: badge._id["$oid"]})
ON CREATE SET
b.key = badge.key,

View File

@ -1,4 +1,4 @@
CALL apoc.load.json('file:/tmp/mongo-export/categories.json') YIELD value as category
CALL apoc.load.json('file:/tmp/mongo-export/splits/current-chunk.json') YIELD value as category
MERGE(c:Category {id: category._id["$oid"]})
ON CREATE SET
c.name = category.title,

View File

@ -1,4 +1,5 @@
CALL apoc.load.json('file:/tmp/mongo-export/comments.json') YIELD value as json
CALL apoc.load.json('file:/tmp/mongo-export/splits/current-chunk.json') YIELD value as json
MERGE (comment:Comment {id: json._id["$oid"]})
ON CREATE SET
comment.content = json.content,

View File

@ -1,4 +1,4 @@
CALL apoc.load.json('file:/tmp/mongo-export/contributions.json') YIELD value as post
CALL apoc.load.json('file:/tmp/mongo-export/splits/current-chunk.json') YIELD value as post
MERGE (p:Post {id: post._id["$oid"]})
ON CREATE SET
p.title = post.title,

View File

@ -1,4 +1,4 @@
CALL apoc.load.json('file:/tmp/mongo-export/follows.json') YIELD value as follow
CALL apoc.load.json('file:/tmp/mongo-export/splits/current-chunk.json') YIELD value as follow
MATCH (u1:User {id: follow.userId}), (u2:User {id: follow.foreignId})
MERGE (u1)-[:FOLLOWS]->(u2)
;

View File

@ -1,9 +1,17 @@
#!/usr/bin/env bash
set -e
SECONDS=0
SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE n,r;" | cypher-shell -a $NEO4J_URI
echo "MATCH (n) DETACH DELETE n;" | cypher-shell
for collection in "badges" "categories" "users" "follows" "contributions" "shouts" "comments"
do
echo "Import ${collection}..." && cypher-shell -a $NEO4J_URI < $SCRIPT_DIRECTORY/$collection.cql
for chunk in /tmp/mongo-export/splits/$collection/*
do
mv $chunk /tmp/mongo-export/splits/current-chunk.json
echo "Import ${chunk}" && cypher-shell < $SCRIPT_DIRECTORY/$collection.cql
done
done
echo "Time elapsed: $SECONDS seconds"

View File

@ -1,4 +1,4 @@
CALL apoc.load.json('file:/tmp/mongo-export/shouts.json') YIELD value as shout
CALL apoc.load.json('file:/tmp/mongo-export/splits/current-chunk.json') YIELD value as shout
MATCH (u:User {id: shout.userId}), (p:Post {id: shout.foreignId})
MERGE (u)-[:SHOUTED]->(p)
;

View File

@ -1,4 +1,4 @@
CALL apoc.load.json('file:/tmp/mongo-export/users.json') YIELD value as user
CALL apoc.load.json('file:/tmp/mongo-export/splits/current-chunk.json') YIELD value as user
MERGE(u:User {id: user._id["$oid"]})
ON CREATE SET
u.name = user.name,

View File

@ -4,14 +4,17 @@ services:
maintenance:
image: humanconnection/maintenance-worker:latest
build:
context: .
context: deployment/legacy-migration/maintenance-worker
volumes:
- uploads:/uploads
- neo4j-data:/data
- ./migration/:/migration
- ./deployment/legacy-migration/maintenance-worker/migration/:/migration
- ./deployment/legacy-migration/maintenance-worker/ssh/:/root/.ssh
networks:
- hc-network
environment:
- NEO4J_dbms_security_auth__enabled=false
- NEO4J_dbms_memory_heap_max__size=2G
- GRAPHQL_PORT=4000
- GRAPHQL_URI=http://localhost:4000
- CLIENT_URI=http://localhost:3000
@ -19,12 +22,9 @@ services:
- MOCK=false
- MAPBOX_TOKEN=pk.eyJ1IjoiaHVtYW4tY29ubmVjdGlvbiIsImEiOiJjajl0cnBubGoweTVlM3VwZ2lzNTNud3ZtIn0.KZ8KK9l70omjXbEkkbHGsQ
- PRIVATE_KEY_PASSPHRASE=a7dsf78sadg87ad87sfagsadg78
- NEO4J_URI=bolt://localhost:7687
- NEO4J_apoc_import_file_enabled=true
- NEO4J_AUTH=none
- "SSH_USERNAME=${SSH_USERNAME}"
- "SSH_HOST=${SSH_HOST}"
- "SSH_PRIVATE_KEY=${SSH_PRIVATE_KEY}"
- "MONGODB_USERNAME=${MONGODB_USERNAME}"
- "MONGODB_PASSWORD=${MONGODB_PASSWORD}"
- "MONGODB_AUTH_DB=${MONGODB_AUTH_DB}"
@ -34,9 +34,11 @@ services:
- 7687:7687
- 7474:7474
volumes:
uploads:
neo4j-data:
networks:
hc-network:
volumes:
webapp_node_modules:
backend_node_modules:
neo4j-data:
uploads:

View File

@ -18,6 +18,7 @@ services:
volumes:
- ./backend:/nitro-backend
- backend_node_modules:/nitro-backend/node_modules
- uploads:/nitro-backend/public/uploads
command: yarn run dev
neo4j:
environment:
@ -32,3 +33,4 @@ volumes:
webapp_node_modules:
backend_node_modules:
neo4j-data:
uploads:

View File

@ -11,6 +11,9 @@ services:
build:
context: webapp
target: build-and-test
volumes:
#/nitro-web
- ./webapp/coverage:/nitro-web/coverage
environment:
- GRAPHQL_URI=http://backend:4000
backend:
@ -18,6 +21,8 @@ services:
build:
context: backend
target: builder
volumes:
- ./backend/coverage:/nitro-backend/coverage
ports:
- 4001:4001
- 4123:4123

View File

@ -4,15 +4,30 @@
# the initial default user. Before we can create constraints, we have to change
# the default password. This is a security feature of neo4j.
if echo ":exit" | cypher-shell --password neo4j 2> /dev/null ; then
echo "CALL dbms.security.changePassword('${NEO4J_PASSWORD}');" | cypher-shell --password neo4j
if [[ -z "${NEO4J_PASSWORD}" ]]; then
echo "NEO4J_PASSWORD environment variable is undefined. I cannot set the initial password."
else
echo "CALL dbms.security.changePassword('${NEO4J_PASSWORD}');" | cypher-shell --password neo4j
fi
fi
set -e
echo '
CALL db.index.fulltext.createNodeIndex("full_text_search",["Post"],["title", "content"]);
CREATE CONSTRAINT ON (p:Post) ASSERT p.id IS UNIQUE;
CREATE CONSTRAINT ON (c:Comment) ASSERT c.id IS UNIQUE;
CREATE CONSTRAINT ON (c:Category) ASSERT c.id IS UNIQUE;
CREATE CONSTRAINT ON (u:User) ASSERT u.id IS UNIQUE;
CREATE CONSTRAINT ON (o:Organization) ASSERT o.id IS UNIQUE;
CREATE CONSTRAINT ON (t:Tag) ASSERT t.id IS UNIQUE;
CREATE CONSTRAINT ON (p:Post) ASSERT p.slug IS UNIQUE;
CREATE CONSTRAINT ON (c:Category) ASSERT c.slug IS UNIQUE;
CREATE CONSTRAINT ON (u:User) ASSERT u.slug IS UNIQUE;
CREATE CONSTRAINT ON (o:Organization) ASSERT o.slug IS UNIQUE;
' | cypher-shell
echo "Successfully created all indices and unique constraints:"
echo 'CALL db.indexes();' | cypher-shell

View File

@ -1,4 +1,7 @@
{
"plugins": [
"@babel/plugin-syntax-dynamic-import"
],
"presets": [
[
"@babel/preset-env",
@ -21,4 +24,4 @@
]
}
}
}
}

View File

@ -1,16 +0,0 @@
version: '3.7'
services:
webapp:
build:
context: .
target: build-and-test
volumes:
- .:/nitro-web
- node_modules:/nitro-web/node_modules
- nuxt:/nitro-web/.nuxt
command: yarn run dev
volumes:
node_modules:
nuxt:

View File

@ -1,10 +0,0 @@
version: "3.7"
services:
webapp:
build:
context: .
target: build-and-test
environment:
- GRAPHQL_URI=http://backend:4123
- NODE_ENV=test

View File

@ -1,23 +0,0 @@
version: '3.7'
services:
webapp:
image: humanconnection/nitro-web:latest
build:
context: .
target: production
ports:
- 3000:3000
networks:
- hc-network
environment:
- HOST=0.0.0.0
- GRAPHQL_URI=http://backend:4000
- MAPBOX_TOKEN="pk.eyJ1IjoiaHVtYW4tY29ubmVjdGlvbiIsImEiOiJjajl0cnBubGoweTVlM3VwZ2lzNTNud3ZtIn0.bZ8KK9l70omjXbEkkbHGsQ"
networks:
hc-network:
name: hc-network
volumes:
node_modules:

View File

@ -18,20 +18,32 @@
"jest": {
"verbose": true,
"collectCoverage": true,
"coverageReporters": ["text", "lcov"],
"moduleFileExtensions": [
"js",
"json",
"vue"
"collectCoverageFrom": [
"**/*.{js,vue}",
"!**/node_modules/**",
"!**/.nuxt/**",
"!**/?(*.)+(spec|test).js?(x)"
],
"coverageReporters": [
"text",
"lcov"
],
"transform": {
".*\\.(vue)$": "vue-jest",
"^.+\\.js$": "<rootDir>/node_modules/babel-jest"
},
"moduleFileExtensions": [
"js",
"json",
"vue"
],
"moduleNameMapper": {
"^@/(.*)$": "<rootDir>/src/$1",
"^~/(.*)$": "<rootDir>/$1"
}
},
"testMatch": [
"**/?(*.)+(spec|test).js?(x)"
]
},
"dependencies": {
"@human-connection/styleguide": "0.5.15",
@ -46,7 +58,7 @@
"cross-env": "~5.2.0",
"date-fns": "2.0.0-alpha.27",
"express": "~4.16.4",
"graphql": "~14.2.1",
"graphql": "~14.3.0",
"jsonwebtoken": "~8.5.1",
"linkify-it": "~2.1.0",
"nuxt": "~2.6.3",
@ -63,6 +75,7 @@
},
"devDependencies": {
"@babel/core": "~7.4.4",
"@babel/plugin-syntax-dynamic-import": "^7.2.0",
"@babel/preset-env": "~7.4.4",
"@vue/cli-shared-utils": "~3.7.0",
"@vue/eslint-config-prettier": "~4.0.1",
@ -86,4 +99,4 @@
"vue-jest": "~3.0.4",
"vue-svg-loader": "~0.12.0"
}
}
}

View File

@ -5098,10 +5098,10 @@ graphql-upload@^8.0.2:
http-errors "^1.7.1"
object-path "^0.11.4"
graphql@^14.0.2, graphql@~14.2.1:
version "14.2.1"
resolved "https://registry.yarnpkg.com/graphql/-/graphql-14.2.1.tgz#779529bf9a01e7207b977a54c20670b48ca6e95c"
integrity sha512-2PL1UbvKeSjy/lUeJqHk+eR9CvuErXoCNwJI4jm3oNFEeY+9ELqHNKO1ZuSxAkasPkpWbmT/iMRMFxd3cEL3tQ==
graphql@^14.0.2, graphql@~14.3.0:
version "14.3.0"
resolved "https://registry.yarnpkg.com/graphql/-/graphql-14.3.0.tgz#34dd36faa489ff642bcd25df6c3b4f988a1a2f3e"
integrity sha512-MdfI4v7kSNC3NhB7cF8KNijDsifuWO2XOtzpyququqaclO8wVuChYv+KogexDwgP5sp7nFI9Z6N4QHgoLkfjrg==
dependencies:
iterall "^1.2.2"