From 57a6b259ebf8582e6a253042e3530794b5a589dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Tue, 15 Jan 2019 20:32:34 +0100 Subject: [PATCH 01/17] Copy remote-dump.sh as a starting piont Just added the two environment variables for neo4j. --- scripts/import-legacy-db/.gitignore | 4 + scripts/import-legacy-db/README.md | 82 ++++++++++++++++++++ scripts/import-legacy-db/import-legacy-db.sh | 37 +++++++++ 3 files changed, 123 insertions(+) create mode 100644 scripts/import-legacy-db/.gitignore create mode 100644 scripts/import-legacy-db/README.md create mode 100755 scripts/import-legacy-db/import-legacy-db.sh diff --git a/scripts/import-legacy-db/.gitignore b/scripts/import-legacy-db/.gitignore new file mode 100644 index 000000000..82c26eeec --- /dev/null +++ b/scripts/import-legacy-db/.gitignore @@ -0,0 +1,4 @@ +* +!import-legacy-db.sh +!README.md +!.gitignore diff --git a/scripts/import-legacy-db/README.md b/scripts/import-legacy-db/README.md new file mode 100644 index 000000000..9ad50d658 --- /dev/null +++ b/scripts/import-legacy-db/README.md @@ -0,0 +1,82 @@ +# MongoDB scripts + +This README explains how to directly access the production or staging database +for backup or query purposes. + +## Backup script + +The backup script is intended to be used as a cron job or as a single command from your laptop. +It uses SSH tunneling to a remote host and dumps the mongo database on your machine. +Therefore, a public SSH key needs to be copied to the remote machine. + +### Usage + +All parameters must be supplied as environment variables: + +| Name | required | +|-----------------------|-----------| +| SSH\_USERNAME | yes | +| SSH\_HOST | yes | +| MONGODB\_USERNAME | yes | +| MONGODB\_PASSWORD | yes | +| MONGODB\_DATABASE | yes | +| NEO4J\_USER | yes | +| NEO4J\_PASSWORD | yes | +| OUTPUT | | +| GPG\_PASSWORD | | + +If you set `GPG_PASSWORD`, the resulting archive will be encrypted (symmetrically, with the given passphrase). +This is recommended if you dump the database on your personal laptop because of data security. + +After exporting these environment variables to your bash, run: + +```bash +./import-legacy-db.sh +``` + + +### Import into your local mongo db (optional) + +Run (but change the file name accordingly): +```bash +mongorestore --gzip --archive=human-connection-dump_2018-11-21.archive +``` + +If you previously encrypted your dump, run: +```bash +gpg --decrypt human-connection-dump_2018-11-21.archive.gpg | mongorestore --gzip --archive +``` + + +## Query remote MongoDB + +In contrast to the backup script, querying the database is expected to be done +interactively and on demand by the user. Therefore our suggestion is to use a +tool like [MongoDB compass](https://www.mongodb.com/products/compass) to query +the mongo db through an SSH tunnel. This tool can export a collection as .csv +file and you can further do custom processing with a csv tool like +[q](https://github.com/harelba/q). + +### Suggested workflow + +Read on the mongodb compass documentation how to connect to the remote mongo +database [through SSH](https://docs.mongodb.com/compass/master/connect/). You +will need all the credentials and a public SSH key on the server as for the +backup script above. + +Once you have a connection, use the MongoDB Compass +[query bar](https://docs.mongodb.com/compass/master/query-bar/) to query for the +desired data. You can +[export the result](https://docs.mongodb.com/compass/master/import-export/) as +.json or .csv. + +Once you have the .csv file on your machine, use standard SQL queries through +the command line tool q to further process the data. + +For example +```sh +q "SELECT email FROM ./invites.csv INTERSECT SELECT email FROM ./emails.csv" -H --delimiter=, +``` + +[Q's website](http://harelba.github.io/q/usage.html) explains the usage fairly +well. diff --git a/scripts/import-legacy-db/import-legacy-db.sh b/scripts/import-legacy-db/import-legacy-db.sh new file mode 100755 index 000000000..263ea9a53 --- /dev/null +++ b/scripts/import-legacy-db/import-legacy-db.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +for var in "SSH_USERNAME" "SSH_HOST" "MONGODB_USERNAME" "MONGODB_PASSWORD" "MONGODB_DATABASE" "NEO4J_USER" "NEO4J_PASSWORD" +do + if [[ -z "${!var}" ]]; then + echo "${var} is undefined" + exit -1 + fi +done + +OUTPUT_FILE_NAME=${OUTPUT:-human-connection-dump}_$(date -I).archive + +echo "SSH_USERNAME ${SSH_USERNAME}" +echo "SSH_HOST ${SSH_HOST}" +echo "MONGODB_USERNAME ${MONGODB_USERNAME}" +echo "MONGODB_PASSWORD ${MONGODB_PASSWORD}" +echo "MONGODB_DATABASE ${MONGODB_DATABASE}" +echo "NEO4J_USER ${NEO4J_USER}" +echo "NEO4J_PASSWORD ${NEO4J_PASSWORD}" +echo "OUTPUT_FILE_NAME ${OUTPUT_FILE_NAME}" +echo "GPG_PASSWORD ${GPG_PASSWORD:-}" +echo "-------------------------------------------------" + +ssh -M -S my-ctrl-socket -fnNT -L 27018:localhost:27017 -l ${SSH_USERNAME} ${SSH_HOST} + +if [[ -z "${!GPG_PASSWORD}" ]]; then + mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase admin --gzip --archive | gpg -c --batch --passphrase ${GPG_PASSWORD} --output ${OUTPUT_FILE_NAME}.gpg +else + mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase admin --gzip --archive=${OUTPUT_FILE_NAME} +fi + + +ssh -S my-ctrl-socket -O check -l ${SSH_USERNAME} ${SSH_HOST} +ssh -S my-ctrl-socket -O exit -l ${SSH_USERNAME} ${SSH_HOST} + + + From 0970014a5949bd1476549329c6bfd4bb8562e3cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Wed, 16 Jan 2019 00:37:21 +0100 Subject: [PATCH 02/17] Create db-migration-worker as a docker container The idea is to import/dump the remote database via SSH, restore it to the local mongodb, export .json collections to a shared volume and import the json collections with cypher-shell. --- .dockerignore | 2 + db-migration-worker/.gitignore | 1 + db-migration-worker/Dockerfile | 9 +++ db-migration-worker/import.sh | 32 ++++++++ docker-compose.override.yml | 26 +++++++ scripts/import-legacy-db/.gitignore | 4 - scripts/import-legacy-db/README.md | 82 -------------------- scripts/import-legacy-db/import-legacy-db.sh | 37 --------- 8 files changed, 70 insertions(+), 123 deletions(-) create mode 100644 db-migration-worker/.gitignore create mode 100644 db-migration-worker/Dockerfile create mode 100755 db-migration-worker/import.sh delete mode 100644 scripts/import-legacy-db/.gitignore delete mode 100644 scripts/import-legacy-db/README.md delete mode 100755 scripts/import-legacy-db/import-legacy-db.sh diff --git a/.dockerignore b/.dockerignore index 84b5adc92..6b6b2193f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,6 +6,7 @@ .env Dockerfile +docker-compose*.yml ./*.png ./*.log @@ -14,3 +15,4 @@ kubernetes/ node_modules/ scripts/ dist/ +db-migration-worker/ diff --git a/db-migration-worker/.gitignore b/db-migration-worker/.gitignore new file mode 100644 index 000000000..690bae050 --- /dev/null +++ b/db-migration-worker/.gitignore @@ -0,0 +1 @@ +id_rsa diff --git a/db-migration-worker/Dockerfile b/db-migration-worker/Dockerfile new file mode 100644 index 000000000..844f65e16 --- /dev/null +++ b/db-migration-worker/Dockerfile @@ -0,0 +1,9 @@ +FROM mongo:latest +ARG KNOWN_HOST + +RUN apt-get update +RUN apt-get -y install openssh-client +COPY id_rsa /root/.ssh/id_rsa +RUN ssh-keyscan -H $KNOWN_HOST >> /root/.ssh/known_hosts +COPY import.sh . + diff --git a/db-migration-worker/import.sh b/db-migration-worker/import.sh new file mode 100755 index 000000000..ccc2cf19b --- /dev/null +++ b/db-migration-worker/import.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +for var in "SSH_USERNAME" "SSH_HOST" "MONGODB_USERNAME" "MONGODB_PASSWORD" "MONGODB_DATABASE" "NEO4J_USERNAME" "NEO4J_PASSWORD" "MONGODB_AUTH_DB" +do + if [[ -z "${!var}" ]]; then + echo "${var} is undefined" + exit -1 + fi +done + +OUTPUT_FILE_NAME=${OUTPUT_FILE_NAME:-human-connection-dump}.archive + +echo "SSH_USERNAME ${SSH_USERNAME}" +echo "SSH_HOST ${SSH_HOST}" +echo "MONGODB_USERNAME ${MONGODB_USERNAME}" +echo "MONGODB_PASSWORD ${MONGODB_PASSWORD}" +echo "MONGODB_DATABASE ${MONGODB_DATABASE}" +echo "MONGODB_AUTH_DB ${MONGODB_AUTH_DB}" +echo "NEO4J_USERNAME ${NEO4J_USERNAME}" +echo "NEO4J_PASSWORD ${NEO4J_PASSWORD}" +echo "-------------------------------------------------" + +ssh -4 -M -S my-ctrl-socket -fnNT -L 27018:localhost:27017 -l ${SSH_USERNAME} ${SSH_HOST} +mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase ${MONGODB_AUTH_DB} --gzip --archive=${OUTPUT_FILE_NAME} +ssh -S my-ctrl-socket -O check -l ${SSH_USERNAME} ${SSH_HOST} +ssh -S my-ctrl-socket -O exit -l ${SSH_USERNAME} ${SSH_HOST} + +mongorestore --gzip --archive=human-connection-dump.archive +# cat ./neo4j_import.cql | /usr/share/neo4j/bin/cypher-shell + + + diff --git a/docker-compose.override.yml b/docker-compose.override.yml index ef7d52c7e..f2b3fa0fc 100644 --- a/docker-compose.override.yml +++ b/docker-compose.override.yml @@ -11,6 +11,32 @@ services: - /nitro-backend/node_modules command: yarn run dev neo4j: + volumes: + - mongo-export:/mongo-export ports: - 7687:7687 - 7474:7474 + environment: + - NEO4J_apoc_import_file_enabled=true + db-migration-worker: + build: + context: db-migration-worker + args: + - "KNOWN_HOST=${SSH_HOST}" + volumes: + - mongo-export:/mongo-export + networks: + - hc-network + environment: + - "SSH_USERNAME=${SSH_USERNAME}" + - "SSH_HOST=${SSH_HOST}" + - "MONGODB_USERNAME=${MONGODB_USERNAME}" + - "MONGODB_PASSWORD=${MONGODB_PASSWORD}" + - "MONGODB_AUTH_DB=${MONGODB_AUTH_DB}" + - "MONGODB_DATABASE=${MONGODB_DATABASE}" + - "NEO4J_USERNAME=${NEO4J_USERNAME}" + - "NEO4J_PASSWORD=${NEO4J_PASSWORD}" + command: "--smallfiles --logpath=/dev/null" + +volumes: + mongo-export: diff --git a/scripts/import-legacy-db/.gitignore b/scripts/import-legacy-db/.gitignore deleted file mode 100644 index 82c26eeec..000000000 --- a/scripts/import-legacy-db/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -* -!import-legacy-db.sh -!README.md -!.gitignore diff --git a/scripts/import-legacy-db/README.md b/scripts/import-legacy-db/README.md deleted file mode 100644 index 9ad50d658..000000000 --- a/scripts/import-legacy-db/README.md +++ /dev/null @@ -1,82 +0,0 @@ -# MongoDB scripts - -This README explains how to directly access the production or staging database -for backup or query purposes. - -## Backup script - -The backup script is intended to be used as a cron job or as a single command from your laptop. -It uses SSH tunneling to a remote host and dumps the mongo database on your machine. -Therefore, a public SSH key needs to be copied to the remote machine. - -### Usage - -All parameters must be supplied as environment variables: - -| Name | required | -|-----------------------|-----------| -| SSH\_USERNAME | yes | -| SSH\_HOST | yes | -| MONGODB\_USERNAME | yes | -| MONGODB\_PASSWORD | yes | -| MONGODB\_DATABASE | yes | -| NEO4J\_USER | yes | -| NEO4J\_PASSWORD | yes | -| OUTPUT | | -| GPG\_PASSWORD | | - -If you set `GPG_PASSWORD`, the resulting archive will be encrypted (symmetrically, with the given passphrase). -This is recommended if you dump the database on your personal laptop because of data security. - -After exporting these environment variables to your bash, run: - -```bash -./import-legacy-db.sh -``` - - -### Import into your local mongo db (optional) - -Run (but change the file name accordingly): -```bash -mongorestore --gzip --archive=human-connection-dump_2018-11-21.archive -``` - -If you previously encrypted your dump, run: -```bash -gpg --decrypt human-connection-dump_2018-11-21.archive.gpg | mongorestore --gzip --archive -``` - - -## Query remote MongoDB - -In contrast to the backup script, querying the database is expected to be done -interactively and on demand by the user. Therefore our suggestion is to use a -tool like [MongoDB compass](https://www.mongodb.com/products/compass) to query -the mongo db through an SSH tunnel. This tool can export a collection as .csv -file and you can further do custom processing with a csv tool like -[q](https://github.com/harelba/q). - -### Suggested workflow - -Read on the mongodb compass documentation how to connect to the remote mongo -database [through SSH](https://docs.mongodb.com/compass/master/connect/). You -will need all the credentials and a public SSH key on the server as for the -backup script above. - -Once you have a connection, use the MongoDB Compass -[query bar](https://docs.mongodb.com/compass/master/query-bar/) to query for the -desired data. You can -[export the result](https://docs.mongodb.com/compass/master/import-export/) as -.json or .csv. - -Once you have the .csv file on your machine, use standard SQL queries through -the command line tool q to further process the data. - -For example -```sh -q "SELECT email FROM ./invites.csv INTERSECT SELECT email FROM ./emails.csv" -H --delimiter=, -``` - -[Q's website](http://harelba.github.io/q/usage.html) explains the usage fairly -well. diff --git a/scripts/import-legacy-db/import-legacy-db.sh b/scripts/import-legacy-db/import-legacy-db.sh deleted file mode 100755 index 263ea9a53..000000000 --- a/scripts/import-legacy-db/import-legacy-db.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash - -for var in "SSH_USERNAME" "SSH_HOST" "MONGODB_USERNAME" "MONGODB_PASSWORD" "MONGODB_DATABASE" "NEO4J_USER" "NEO4J_PASSWORD" -do - if [[ -z "${!var}" ]]; then - echo "${var} is undefined" - exit -1 - fi -done - -OUTPUT_FILE_NAME=${OUTPUT:-human-connection-dump}_$(date -I).archive - -echo "SSH_USERNAME ${SSH_USERNAME}" -echo "SSH_HOST ${SSH_HOST}" -echo "MONGODB_USERNAME ${MONGODB_USERNAME}" -echo "MONGODB_PASSWORD ${MONGODB_PASSWORD}" -echo "MONGODB_DATABASE ${MONGODB_DATABASE}" -echo "NEO4J_USER ${NEO4J_USER}" -echo "NEO4J_PASSWORD ${NEO4J_PASSWORD}" -echo "OUTPUT_FILE_NAME ${OUTPUT_FILE_NAME}" -echo "GPG_PASSWORD ${GPG_PASSWORD:-}" -echo "-------------------------------------------------" - -ssh -M -S my-ctrl-socket -fnNT -L 27018:localhost:27017 -l ${SSH_USERNAME} ${SSH_HOST} - -if [[ -z "${!GPG_PASSWORD}" ]]; then - mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase admin --gzip --archive | gpg -c --batch --passphrase ${GPG_PASSWORD} --output ${OUTPUT_FILE_NAME}.gpg -else - mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase admin --gzip --archive=${OUTPUT_FILE_NAME} -fi - - -ssh -S my-ctrl-socket -O check -l ${SSH_USERNAME} ${SSH_HOST} -ssh -S my-ctrl-socket -O exit -l ${SSH_USERNAME} ${SSH_HOST} - - - From 1c04b8f3deb538195bec48f3a054d9e0e6563948 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Wed, 16 Jan 2019 00:47:28 +0100 Subject: [PATCH 03/17] Import mongodb on-the-fly --- db-migration-worker/import.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/db-migration-worker/import.sh b/db-migration-worker/import.sh index ccc2cf19b..02fae0f7e 100755 --- a/db-migration-worker/import.sh +++ b/db-migration-worker/import.sh @@ -8,8 +8,6 @@ do fi done -OUTPUT_FILE_NAME=${OUTPUT_FILE_NAME:-human-connection-dump}.archive - echo "SSH_USERNAME ${SSH_USERNAME}" echo "SSH_HOST ${SSH_HOST}" echo "MONGODB_USERNAME ${MONGODB_USERNAME}" @@ -21,11 +19,10 @@ echo "NEO4J_PASSWORD ${NEO4J_PASSWORD}" echo "-------------------------------------------------" ssh -4 -M -S my-ctrl-socket -fnNT -L 27018:localhost:27017 -l ${SSH_USERNAME} ${SSH_HOST} -mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase ${MONGODB_AUTH_DB} --gzip --archive=${OUTPUT_FILE_NAME} +mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase ${MONGODB_AUTH_DB} --gzip --archive | mongorestore --gzip --archive ssh -S my-ctrl-socket -O check -l ${SSH_USERNAME} ${SSH_HOST} ssh -S my-ctrl-socket -O exit -l ${SSH_USERNAME} ${SSH_HOST} -mongorestore --gzip --archive=human-connection-dump.archive # cat ./neo4j_import.cql | /usr/share/neo4j/bin/cypher-shell From 433cd7a52e0b73f30abdde7cbf7ba18a70eb302f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Wed, 16 Jan 2019 01:13:42 +0100 Subject: [PATCH 04/17] Export all relevant collection to a shared folder --- db-migration-worker/import.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/db-migration-worker/import.sh b/db-migration-worker/import.sh index 02fae0f7e..2ff3d6443 100755 --- a/db-migration-worker/import.sh +++ b/db-migration-worker/import.sh @@ -23,7 +23,7 @@ mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONG ssh -S my-ctrl-socket -O check -l ${SSH_USERNAME} ${SSH_HOST} ssh -S my-ctrl-socket -O exit -l ${SSH_USERNAME} ${SSH_HOST} -# cat ./neo4j_import.cql | /usr/share/neo4j/bin/cypher-shell - - - +for collection in "categories" "badges" "users" "contributions" "comments" "follows" "shouts" +do + mongoexport --db ${MONGODB_DATABASE} --collection $collection --out "/mongo-export/$collection.json" +done From 8d8dfb06401e80c9888fbd2d84618311ba09e8d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Wed, 16 Jan 2019 01:50:01 +0100 Subject: [PATCH 05/17] Provision neo4j container with import script --- .dockerignore | 2 + docker-compose.override.yml | 4 +- docker-compose.yml | 3 +- Dockerfile.neo4j => neo4j/Dockerfile | 1 + neo4j/import.cql | 75 ++++++++++++++++++++++++++++ 5 files changed, 81 insertions(+), 4 deletions(-) rename Dockerfile.neo4j => neo4j/Dockerfile (88%) create mode 100644 neo4j/import.cql diff --git a/.dockerignore b/.dockerignore index 6b6b2193f..f5a08be39 100644 --- a/.dockerignore +++ b/.dockerignore @@ -15,4 +15,6 @@ kubernetes/ node_modules/ scripts/ dist/ + db-migration-worker/ +neo4j/ diff --git a/docker-compose.override.yml b/docker-compose.override.yml index f2b3fa0fc..c1ac0569d 100644 --- a/docker-compose.override.yml +++ b/docker-compose.override.yml @@ -18,6 +18,8 @@ services: - 7474:7474 environment: - NEO4J_apoc_import_file_enabled=true + - "NEO4J_USERNAME=${NEO4J_USERNAME}" + - "NEO4J_PASSWORD=${NEO4J_PASSWORD}" db-migration-worker: build: context: db-migration-worker @@ -34,8 +36,6 @@ services: - "MONGODB_PASSWORD=${MONGODB_PASSWORD}" - "MONGODB_AUTH_DB=${MONGODB_AUTH_DB}" - "MONGODB_DATABASE=${MONGODB_DATABASE}" - - "NEO4J_USERNAME=${NEO4J_USERNAME}" - - "NEO4J_PASSWORD=${NEO4J_PASSWORD}" command: "--smallfiles --logpath=/dev/null" volumes: diff --git a/docker-compose.yml b/docker-compose.yml index 5a7650aa1..df8de6b01 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,8 +24,7 @@ services: neo4j: image: humanconnection/neo4j:latest build: - context: . - dockerfile: Dockerfile.neo4j + context: neo4j networks: - hc-network volumes: diff --git a/Dockerfile.neo4j b/neo4j/Dockerfile similarity index 88% rename from Dockerfile.neo4j rename to neo4j/Dockerfile index cb7fd228f..2ef9443a5 100644 --- a/Dockerfile.neo4j +++ b/neo4j/Dockerfile @@ -1,2 +1,3 @@ FROM neo4j:3.5.0 RUN wget https://github.com/neo4j-contrib/neo4j-apoc-procedures/releases/download/3.5.0.1/apoc-3.5.0.1-all.jar -P plugins/ +COPY import.cql . diff --git a/neo4j/import.cql b/neo4j/import.cql new file mode 100644 index 000000000..521cc1221 --- /dev/null +++ b/neo4j/import.cql @@ -0,0 +1,75 @@ +CALL apoc.load.json('file:/mongo-export/categories.json') YIELD value as category +MERGE(c:Category {id: category._id["$oid"]}) +ON CREATE SET c.name = category.title, + c.slug = category.slug, + c.icon = category.icon + + +CALL apoc.load.json('file:/mongo-export/badges.json') YIELD value as badge +MERGE(b:Badge {id: badge._id["$oid"]}) +ON CREATE SET b.key = badge.key, + b.type = badge.type, + b.icon = badge.image.path, + b.status = badge.status + + +CALL apoc.load.json('file:/mongo-export/users.json') YIELD value as user +MERGE(u:User {id: user._id["$oid"]}) +ON CREATE SET u.name = user.name, + u.slug = user.slug, + u.email = user.email, + u.password = user.password, + u.avatar = user.avatar, + u.coverImg = user.coverImg, + u.wasInvited = user.wasInvited, + u.role = apoc.text.toUpperCase(user.role) +WITH u, user, user.badgeIds AS badgeIds +UNWIND badgeIds AS badgeId +MATCH (b:Badge {id: badgeId}) +MERGE (b)-[:REWARDED]->(u) + + + +CALL apoc.load.json('file:/mongo-export/contributions.json') YIELD value as post +MERGE (p:Post {id: post._id["$oid"]}) +ON CREATE SET p.title = post.title, + p.slug = post.slug, + p.image = post.teaserImg, + p.content = post.content, + p.contentExcerpt = post.contentExcerpt, + p.visibility = apoc.text.toUpperCase(post.visibility), + p.createdAt = datetime(post.createdAt["$date"]), + p.updatedAt = datetime(post.updatedAt["$date"]) +WITH p, post, post.tags AS tags, post.categoryIds as categoryIds +UNWIND tags AS tag +UNWIND categoryIds AS categoryId +MATCH (c:Category {id: categoryId}), + (u:User {id: post.userId}) +MERGE (t:Tag {id: apoc.create.uuid(), name: tag}) +MERGE (p)-[:TAGGED]->(t) +MERGE (u)-[:WROTE]->(p) +MERGE (p)-[:CATEGORIZED]->(c) + + +CALL apoc.load.json('file:/mongo-export/comments.json') YIELD value as comment +MERGE (c:Comment {id: comment._id["$oid"]}) +ON CREATE SET c.content = comment.content, + c.contentExcerpt = comment.contentExcerpt, + c.deleted = comment.deleted +WITH comment +MATCH (p:Post {id: comment.contributionId}), + (u:User {id: comment.userId}) +MERGE (c)-[:COMMENTS]->(p) +MERGE (u)-[:WROTE]->(c) + + +CALL apoc.load.json('file:/mongo-export/follows.json') YIELD value as follow +MATCH (u1:User {id: follow.userId}), + (u2:User {id: follow.foreignId}) +MERGE (u1)-[:FOLLOWS]->(u2) + + +CALL apoc.load.json('file:/mongo-export/shouts.json') YIELD value as shout +MATCH (u:User {id: shout.userId}), + (p:Post {id: shout.foreignId}) +MERGE (u)-[:SHOUTED]->(p) From bfad62130604eb114cfd43fa4c5e4415c1bcbd79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Wed, 16 Jan 2019 02:19:02 +0100 Subject: [PATCH 06/17] Disable automatic know-hosts for now This will require user interaction when running the import script --- db-migration-worker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db-migration-worker/Dockerfile b/db-migration-worker/Dockerfile index 844f65e16..39ef39275 100644 --- a/db-migration-worker/Dockerfile +++ b/db-migration-worker/Dockerfile @@ -4,6 +4,6 @@ ARG KNOWN_HOST RUN apt-get update RUN apt-get -y install openssh-client COPY id_rsa /root/.ssh/id_rsa -RUN ssh-keyscan -H $KNOWN_HOST >> /root/.ssh/known_hosts +# RUN ssh-keyscan -H $KNOWN_HOST >> /root/.ssh/known_hosts COPY import.sh . From f0c18e2ccabae1438ba9ba88ce9c1cbe85e09d19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Fri, 18 Jan 2019 21:07:24 +0100 Subject: [PATCH 07/17] Clean up Dockerfile, add known_hosts file @appinteractive it's troublesome to add the SSH private key via environment variable. You have to convert newlines to spaces and convert them back - which I think is error prone. I hope we can transfer the private key file on to our deployed container later on. --- db-migration-worker/.gitignore | 2 +- db-migration-worker/.ssh/.dockerignore | 1 + db-migration-worker/.ssh/known_hosts | 1 + db-migration-worker/Dockerfile | 4 +--- db-migration-worker/import.sh | 9 +++++---- docker-compose.override.yml | 5 +---- 6 files changed, 10 insertions(+), 12 deletions(-) create mode 100644 db-migration-worker/.ssh/.dockerignore create mode 100644 db-migration-worker/.ssh/known_hosts diff --git a/db-migration-worker/.gitignore b/db-migration-worker/.gitignore index 690bae050..87cb01310 100644 --- a/db-migration-worker/.gitignore +++ b/db-migration-worker/.gitignore @@ -1 +1 @@ -id_rsa +.ssh/id_rsa diff --git a/db-migration-worker/.ssh/.dockerignore b/db-migration-worker/.ssh/.dockerignore new file mode 100644 index 000000000..87cb01310 --- /dev/null +++ b/db-migration-worker/.ssh/.dockerignore @@ -0,0 +1 @@ +.ssh/id_rsa diff --git a/db-migration-worker/.ssh/known_hosts b/db-migration-worker/.ssh/known_hosts new file mode 100644 index 000000000..0ce0dce0d --- /dev/null +++ b/db-migration-worker/.ssh/known_hosts @@ -0,0 +1 @@ +|1|GuOYlVEhTowidPs18zj9p5F2j3o=|sDHJYLz9Ftv11oXeGEjs7SpVyg0= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBM5N29bI5CeKu1/RBPyM2fwyf7fuajOO+tyhKe1+CC2sZ1XNB5Ff6t6MtCLNRv2mUuvzTbW/HkisDiA5tuXUHOk= diff --git a/db-migration-worker/Dockerfile b/db-migration-worker/Dockerfile index 39ef39275..139b3403a 100644 --- a/db-migration-worker/Dockerfile +++ b/db-migration-worker/Dockerfile @@ -1,9 +1,7 @@ FROM mongo:latest -ARG KNOWN_HOST RUN apt-get update RUN apt-get -y install openssh-client -COPY id_rsa /root/.ssh/id_rsa -# RUN ssh-keyscan -H $KNOWN_HOST >> /root/.ssh/known_hosts +COPY .ssh /root/.ssh/ COPY import.sh . diff --git a/db-migration-worker/import.sh b/db-migration-worker/import.sh index 2ff3d6443..b1042844b 100755 --- a/db-migration-worker/import.sh +++ b/db-migration-worker/import.sh @@ -1,6 +1,6 @@ -#!/bin/bash +#!/usr/bin/env bash -for var in "SSH_USERNAME" "SSH_HOST" "MONGODB_USERNAME" "MONGODB_PASSWORD" "MONGODB_DATABASE" "NEO4J_USERNAME" "NEO4J_PASSWORD" "MONGODB_AUTH_DB" +for var in "SSH_USERNAME" "SSH_HOST" "MONGODB_USERNAME" "MONGODB_PASSWORD" "MONGODB_DATABASE" "MONGODB_AUTH_DB" do if [[ -z "${!var}" ]]; then echo "${var} is undefined" @@ -14,10 +14,11 @@ echo "MONGODB_USERNAME ${MONGODB_USERNAME}" echo "MONGODB_PASSWORD ${MONGODB_PASSWORD}" echo "MONGODB_DATABASE ${MONGODB_DATABASE}" echo "MONGODB_AUTH_DB ${MONGODB_AUTH_DB}" -echo "NEO4J_USERNAME ${NEO4J_USERNAME}" -echo "NEO4J_PASSWORD ${NEO4J_PASSWORD}" echo "-------------------------------------------------" +mongo ${MONGODB_DATABASE} --eval "db.dropDatabase();" +rm -f /mongo-export/* + ssh -4 -M -S my-ctrl-socket -fnNT -L 27018:localhost:27017 -l ${SSH_USERNAME} ${SSH_HOST} mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase ${MONGODB_AUTH_DB} --gzip --archive | mongorestore --gzip --archive ssh -S my-ctrl-socket -O check -l ${SSH_USERNAME} ${SSH_HOST} diff --git a/docker-compose.override.yml b/docker-compose.override.yml index c1ac0569d..c8644a6f8 100644 --- a/docker-compose.override.yml +++ b/docker-compose.override.yml @@ -18,15 +18,12 @@ services: - 7474:7474 environment: - NEO4J_apoc_import_file_enabled=true - - "NEO4J_USERNAME=${NEO4J_USERNAME}" - - "NEO4J_PASSWORD=${NEO4J_PASSWORD}" db-migration-worker: build: context: db-migration-worker - args: - - "KNOWN_HOST=${SSH_HOST}" volumes: - mongo-export:/mongo-export + - ./db-migration-worker/.ssh/:/root/.ssh/ networks: - hc-network environment: From 4534f1ae9e215def40842536cea4b3efb37d00b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Fri, 18 Jan 2019 21:18:59 +0100 Subject: [PATCH 08/17] Add production server to known_hosts --- db-migration-worker/.ssh/known_hosts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/db-migration-worker/.ssh/known_hosts b/db-migration-worker/.ssh/known_hosts index 0ce0dce0d..947840cb2 100644 --- a/db-migration-worker/.ssh/known_hosts +++ b/db-migration-worker/.ssh/known_hosts @@ -1 +1,3 @@ |1|GuOYlVEhTowidPs18zj9p5F2j3o=|sDHJYLz9Ftv11oXeGEjs7SpVyg0= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBM5N29bI5CeKu1/RBPyM2fwyf7fuajOO+tyhKe1+CC2sZ1XNB5Ff6t6MtCLNRv2mUuvzTbW/HkisDiA5tuXUHOk= +|1|2KP9NV+Q5g2MrtjAeFSVcs8YeOI=|nf3h4wWVwC4xbBS1kzgzE2tBldk= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNhRK6BeIEUxXlS0z/pOfkUkSPfn33g4J1U3L+MyUQYHm+7agT08799ANJhnvELKE1tt4Vx80I9UR81oxzZcy3E= +|1|HonYIRNhKyroUHPKU1HSZw0+Qzs=|5T1btfwFBz2vNSldhqAIfTbfIgQ= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNhRK6BeIEUxXlS0z/pOfkUkSPfn33g4J1U3L+MyUQYHm+7agT08799ANJhnvELKE1tt4Vx80I9UR81oxzZcy3E= From 97e6acf46b6753ffe262a00ee5ab73487d79d2a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Fri, 18 Jan 2019 22:45:29 +0100 Subject: [PATCH 09/17] Import of all users in one script: ```sh docker-compose exec neo4j import/import.sh ``` --- docker-compose.override.yml | 1 + neo4j/Dockerfile | 2 +- neo4j/import/import.sh | 4 ++++ neo4j/{import.cql => import/import.todo} | 17 ----------------- neo4j/import/users.cql | 20 ++++++++++++++++++++ src/seed/reset-db.js | 7 ++++++- 6 files changed, 32 insertions(+), 19 deletions(-) create mode 100755 neo4j/import/import.sh rename neo4j/{import.cql => import/import.todo} (79%) create mode 100644 neo4j/import/users.cql diff --git a/docker-compose.override.yml b/docker-compose.override.yml index c8644a6f8..c5e7d5cf9 100644 --- a/docker-compose.override.yml +++ b/docker-compose.override.yml @@ -13,6 +13,7 @@ services: neo4j: volumes: - mongo-export:/mongo-export + - ./neo4j/import:/var/lib/neo4j/import ports: - 7687:7687 - 7474:7474 diff --git a/neo4j/Dockerfile b/neo4j/Dockerfile index 2ef9443a5..07344b47b 100644 --- a/neo4j/Dockerfile +++ b/neo4j/Dockerfile @@ -1,3 +1,3 @@ FROM neo4j:3.5.0 RUN wget https://github.com/neo4j-contrib/neo4j-apoc-procedures/releases/download/3.5.0.1/apoc-3.5.0.1-all.jar -P plugins/ -COPY import.cql . +COPY import ./import diff --git a/neo4j/import/import.sh b/neo4j/import/import.sh new file mode 100755 index 000000000..88a2666dc --- /dev/null +++ b/neo4j/import/import.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +echo "MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE n,r;" | cypher-shell +cat $SCRIPT_DIRECTORY/*.cql | cypher-shell diff --git a/neo4j/import.cql b/neo4j/import/import.todo similarity index 79% rename from neo4j/import.cql rename to neo4j/import/import.todo index 521cc1221..672ef0116 100644 --- a/neo4j/import.cql +++ b/neo4j/import/import.todo @@ -13,23 +13,6 @@ ON CREATE SET b.key = badge.key, b.status = badge.status -CALL apoc.load.json('file:/mongo-export/users.json') YIELD value as user -MERGE(u:User {id: user._id["$oid"]}) -ON CREATE SET u.name = user.name, - u.slug = user.slug, - u.email = user.email, - u.password = user.password, - u.avatar = user.avatar, - u.coverImg = user.coverImg, - u.wasInvited = user.wasInvited, - u.role = apoc.text.toUpperCase(user.role) -WITH u, user, user.badgeIds AS badgeIds -UNWIND badgeIds AS badgeId -MATCH (b:Badge {id: badgeId}) -MERGE (b)-[:REWARDED]->(u) - - - CALL apoc.load.json('file:/mongo-export/contributions.json') YIELD value as post MERGE (p:Post {id: post._id["$oid"]}) ON CREATE SET p.title = post.title, diff --git a/neo4j/import/users.cql b/neo4j/import/users.cql new file mode 100644 index 000000000..96abb1637 --- /dev/null +++ b/neo4j/import/users.cql @@ -0,0 +1,20 @@ +CALL apoc.load.json('file:/mongo-export/users.json') YIELD value as user +MERGE(u:User {id: user._id["$oid"]}) +ON CREATE SET +u.name = user.name, +u.slug = user.slug, +u.email = user.email, +u.password = user.password, +u.avatar = user.avatar, +u.coverImg = user.coverImg, +u.wasInvited = user.wasInvited, +u.role = toLower(user.role), +u.createdAt = user.createdAt.`$date`, +u.updatedAt = user.updatedAt.`$date`, +u.deleted = false, +u.disabled = false +WITH u, user, user.badgeIds AS badgeIds +UNWIND badgeIds AS badgeId +MATCH (b:Badge {id: badgeId}) +MERGE (b)-[:REWARDED]->(u) +; diff --git a/src/seed/reset-db.js b/src/seed/reset-db.js index 616ff71e8..7d7c4f3f9 100644 --- a/src/seed/reset-db.js +++ b/src/seed/reset-db.js @@ -11,7 +11,12 @@ if (process.env.NODE_ENV === 'production') { const driver = neo4j().getDriver() const session = driver.session() -query('MATCH (n) DETACH DELETE n', session).then(() => { +const deleteAll = ` +MATCH (n) +OPTIONAL MATCH (n)-[r]-() +DELETE n,r +` +query(deleteAll, session).then(() => { /* eslint-disable-next-line no-console */ console.log('Successfully deleted all nodes and relations!') }).catch((err) => { From 887a6d201f8140b280a99f01c19b10b0cf92d7a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Fri, 18 Jan 2019 23:53:00 +0100 Subject: [PATCH 10/17] Import contributions to neo4j --- neo4j/import/contributions.cql | 23 +++++++++++++++++++++++ neo4j/import/{import.todo => todo} | 19 ------------------- neo4j/import/users.cql | 2 +- 3 files changed, 24 insertions(+), 20 deletions(-) create mode 100644 neo4j/import/contributions.cql rename neo4j/import/{import.todo => todo} (61%) diff --git a/neo4j/import/contributions.cql b/neo4j/import/contributions.cql new file mode 100644 index 000000000..86226b98f --- /dev/null +++ b/neo4j/import/contributions.cql @@ -0,0 +1,23 @@ +CALL apoc.load.json('file:/mongo-export/contributions.json') YIELD value as post +MERGE (p:Post {id: post._id["$oid"]}) +ON CREATE SET +p.title = post.title, +p.slug = post.slug, +p.image = post.teaserImg, +p.content = post.content, +p.contentExcerpt = post.contentExcerpt, +p.visibility = toLower(post.visibility), +p.createdAt = post.createdAt.`$date`, +p.updatedAt = post.updatedAt.`$date`, +p.deleted = post.deleted, +p.disabled = NOT post.isEnabled +WITH p, post, post.tags AS tags, post.categoryIds as categoryIds +UNWIND tags AS tag +UNWIND categoryIds AS categoryId +MATCH (c:Category {id: categoryId}), + (u:User {id: post.userId}) +MERGE (t:Tag {id: apoc.create.uuid(), name: tag}) +MERGE (p)-[:TAGGED]->(t) +MERGE (u)-[:WROTE]->(p) +MERGE (p)-[:CATEGORIZED]->(c) +; diff --git a/neo4j/import/import.todo b/neo4j/import/todo similarity index 61% rename from neo4j/import/import.todo rename to neo4j/import/todo index 672ef0116..2d912c5d4 100644 --- a/neo4j/import/import.todo +++ b/neo4j/import/todo @@ -13,25 +13,6 @@ ON CREATE SET b.key = badge.key, b.status = badge.status -CALL apoc.load.json('file:/mongo-export/contributions.json') YIELD value as post -MERGE (p:Post {id: post._id["$oid"]}) -ON CREATE SET p.title = post.title, - p.slug = post.slug, - p.image = post.teaserImg, - p.content = post.content, - p.contentExcerpt = post.contentExcerpt, - p.visibility = apoc.text.toUpperCase(post.visibility), - p.createdAt = datetime(post.createdAt["$date"]), - p.updatedAt = datetime(post.updatedAt["$date"]) -WITH p, post, post.tags AS tags, post.categoryIds as categoryIds -UNWIND tags AS tag -UNWIND categoryIds AS categoryId -MATCH (c:Category {id: categoryId}), - (u:User {id: post.userId}) -MERGE (t:Tag {id: apoc.create.uuid(), name: tag}) -MERGE (p)-[:TAGGED]->(t) -MERGE (u)-[:WROTE]->(p) -MERGE (p)-[:CATEGORIZED]->(c) CALL apoc.load.json('file:/mongo-export/comments.json') YIELD value as comment diff --git a/neo4j/import/users.cql b/neo4j/import/users.cql index 96abb1637..5f87bb273 100644 --- a/neo4j/import/users.cql +++ b/neo4j/import/users.cql @@ -11,7 +11,7 @@ u.wasInvited = user.wasInvited, u.role = toLower(user.role), u.createdAt = user.createdAt.`$date`, u.updatedAt = user.updatedAt.`$date`, -u.deleted = false, +u.deleted = user.deletedAt IS NOT NULL, u.disabled = false WITH u, user, user.badgeIds AS badgeIds UNWIND badgeIds AS badgeId From b2720249337a7ac0a4a2c3d23f214251a727db73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Sat, 19 Jan 2019 00:01:30 +0100 Subject: [PATCH 11/17] Import comments --- neo4j/import/comments.cql | 12 ++++++++++++ neo4j/import/todo | 10 ---------- 2 files changed, 12 insertions(+), 10 deletions(-) create mode 100644 neo4j/import/comments.cql diff --git a/neo4j/import/comments.cql b/neo4j/import/comments.cql new file mode 100644 index 000000000..16537b730 --- /dev/null +++ b/neo4j/import/comments.cql @@ -0,0 +1,12 @@ +CALL apoc.load.json('file:/mongo-export/comments.json') YIELD value as comment +MERGE (c:Comment {id: comment._id["$oid"]}) +ON CREATE SET +c.content = comment.content, +c.contentExcerpt = comment.contentExcerpt, +c.deleted = comment.deleted, +c.disabled = false +WITH comment +MATCH (p:Post {id: comment.contributionId}), (u:User {id: comment.userId}) +MERGE (c)-[:COMMENTS]->(p) +MERGE (u)-[:WROTE]->(c) +; diff --git a/neo4j/import/todo b/neo4j/import/todo index 2d912c5d4..0b86c8bc5 100644 --- a/neo4j/import/todo +++ b/neo4j/import/todo @@ -15,16 +15,6 @@ ON CREATE SET b.key = badge.key, -CALL apoc.load.json('file:/mongo-export/comments.json') YIELD value as comment -MERGE (c:Comment {id: comment._id["$oid"]}) -ON CREATE SET c.content = comment.content, - c.contentExcerpt = comment.contentExcerpt, - c.deleted = comment.deleted -WITH comment -MATCH (p:Post {id: comment.contributionId}), - (u:User {id: comment.userId}) -MERGE (c)-[:COMMENTS]->(p) -MERGE (u)-[:WROTE]->(c) CALL apoc.load.json('file:/mongo-export/follows.json') YIELD value as follow From 4a21e5b885df9fc161934aac99b6829cc3c88205 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Sat, 19 Jan 2019 00:27:37 +0100 Subject: [PATCH 12/17] Implement test for image middleware --- src/middleware/fixImageUrlsMiddleware.js | 2 +- src/middleware/fixImageUrlsMiddleware.spec.js | 30 +++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 src/middleware/fixImageUrlsMiddleware.spec.js diff --git a/src/middleware/fixImageUrlsMiddleware.js b/src/middleware/fixImageUrlsMiddleware.js index 919a46ab6..8d72e2233 100644 --- a/src/middleware/fixImageUrlsMiddleware.js +++ b/src/middleware/fixImageUrlsMiddleware.js @@ -7,7 +7,7 @@ export const fixUrl = (url) => { url = url.replace(urlSearchLocal, '') return url } -const fixImageURLs = (result, recursive) => { +export const fixImageURLs = (result, recursive) => { if (result && typeof result === 'string' && (result.indexOf(urlSearchAlpha) === 0 || result.indexOf(urlSearchLocal) === 0)) { result = fixUrl(result) } else if (result && Array.isArray(result)) { diff --git a/src/middleware/fixImageUrlsMiddleware.spec.js b/src/middleware/fixImageUrlsMiddleware.spec.js new file mode 100644 index 000000000..837bd0d82 --- /dev/null +++ b/src/middleware/fixImageUrlsMiddleware.spec.js @@ -0,0 +1,30 @@ +import { fixImageURLs } from './fixImageUrlsMiddleware' + +describe('fixImageURLs', () => { + describe('image url of legacy alpha', () => { + it('removes domain', () => { + const url = 'https://api-alpha.human-connection.org/uploads/4bfaf9172c4ba03d7645108bbbd16f0a696a37d01eacd025fb131e5da61b15d9.png' + expect(fixImageURLs(url)).toEqual('/uploads/4bfaf9172c4ba03d7645108bbbd16f0a696a37d01eacd025fb131e5da61b15d9.png') + }) + }) + + describe('image url of legacy staging', () => { + it('removes domain', () => { + const url = 'https://staging-api.human-connection.org/uploads/1b3c39a24f27e2fb62b69074b2f71363b63b263f0c4574047d279967124c026e.jpeg' + expect(fixImageURLs(url)).toEqual('/uploads/1b3c39a24f27e2fb62b69074b2f71363b63b263f0c4574047d279967124c026e.jpeg') + }) + }) + + describe('object', () => { + it('returns untouched', () => { + const object = { some: 'thing' } + expect(fixImageURLs(object)).toEqual(object) + }) + }) + + describe('some string', () => { + it('returns untouched', () => {}) + const string = "Yeah I'm a String" + expect(fixImageURLs(string)).toEqual(string) + }) +}) From 5701185d3480b482efdb82ae580cecd316f608b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Sat, 19 Jan 2019 00:36:55 +0100 Subject: [PATCH 13/17] Fix urls for staging-alpha --- src/middleware/fixImageUrlsMiddleware.js | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/src/middleware/fixImageUrlsMiddleware.js b/src/middleware/fixImageUrlsMiddleware.js index 8d72e2233..eacf5a090 100644 --- a/src/middleware/fixImageUrlsMiddleware.js +++ b/src/middleware/fixImageUrlsMiddleware.js @@ -1,14 +1,25 @@ -const urlSearchAlpha = 'https://api-alpha.human-connection.org' -const urlSearchLocal = 'http://localhost:3000' +const legacyUrls = [ + 'https://api-alpha.human-connection.org', + 'https://staging-api.human-connection.org', + 'http://localhost:3000' +] export const fixUrl = (url) => { - url = url.replace(urlSearchAlpha, '') - url = url.replace(urlSearchLocal, '') + legacyUrls.forEach((legacyUrl) => { + url = url.replace(legacyUrl, '') + }) return url } + +const checkUrl = (thing) => { + return thing && typeof thing === 'string' && legacyUrls.find((legacyUrl) => { + return thing.indexOf(legacyUrl) === 0 + }) +} + export const fixImageURLs = (result, recursive) => { - if (result && typeof result === 'string' && (result.indexOf(urlSearchAlpha) === 0 || result.indexOf(urlSearchLocal) === 0)) { + if(checkUrl(result)) { result = fixUrl(result) } else if (result && Array.isArray(result)) { result.forEach((res, index) => { From 8591816dfa7894b26ae776f9a2c4e69d01de9852 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Sat, 19 Jan 2019 00:57:12 +0100 Subject: [PATCH 14/17] Fix CodeFactor --- db-migration-worker/Dockerfile | 8 +++++--- db-migration-worker/import.sh | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/db-migration-worker/Dockerfile b/db-migration-worker/Dockerfile index 139b3403a..92dff618f 100644 --- a/db-migration-worker/Dockerfile +++ b/db-migration-worker/Dockerfile @@ -1,7 +1,9 @@ -FROM mongo:latest +FROM mongo:4 -RUN apt-get update -RUN apt-get -y install openssh-client +RUN apt-get update \ + && apt-get -y install --no-install-recommends openssh-client \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* COPY .ssh /root/.ssh/ COPY import.sh . diff --git a/db-migration-worker/import.sh b/db-migration-worker/import.sh index b1042844b..ba07217c0 100755 --- a/db-migration-worker/import.sh +++ b/db-migration-worker/import.sh @@ -4,7 +4,7 @@ for var in "SSH_USERNAME" "SSH_HOST" "MONGODB_USERNAME" "MONGODB_PASSWORD" "MONG do if [[ -z "${!var}" ]]; then echo "${var} is undefined" - exit -1 + exit 1 fi done From d0fa8b7e21fcba8c3943d3b9266cb7a6733d2a25 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Sat, 19 Jan 2019 01:04:42 +0100 Subject: [PATCH 15/17] Run `yarn run lint --fix` --- docker-compose.yml | 2 +- src/middleware/fixImageUrlsMiddleware.js | 2 +- src/middleware/fixImageUrlsMiddleware.spec.js | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index df8de6b01..6905bb893 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,7 +17,7 @@ services: - GRAPHQL_PORT=4000 - GRAPHQL_URI=http://localhost:4000 - CLIENT_URI=http://localhost:3000 - - JWT_SECRET="b/&&7b78BF&fv/Vd" + - JWT_SECRET=b/&&7b78BF&fv/Vd - MOCK=false - MAPBOX_TOKEN=pk.eyJ1IjoiaHVtYW4tY29ubmVjdGlvbiIsImEiOiJjajl0cnBubGoweTVlM3VwZ2lzNTNud3ZtIn0.KZ8KK9l70omjXbEkkbHGsQ diff --git a/src/middleware/fixImageUrlsMiddleware.js b/src/middleware/fixImageUrlsMiddleware.js index eacf5a090..c3b828dae 100644 --- a/src/middleware/fixImageUrlsMiddleware.js +++ b/src/middleware/fixImageUrlsMiddleware.js @@ -19,7 +19,7 @@ const checkUrl = (thing) => { } export const fixImageURLs = (result, recursive) => { - if(checkUrl(result)) { + if (checkUrl(result)) { result = fixUrl(result) } else if (result && Array.isArray(result)) { result.forEach((res, index) => { diff --git a/src/middleware/fixImageUrlsMiddleware.spec.js b/src/middleware/fixImageUrlsMiddleware.spec.js index 837bd0d82..081154c5c 100644 --- a/src/middleware/fixImageUrlsMiddleware.spec.js +++ b/src/middleware/fixImageUrlsMiddleware.spec.js @@ -24,7 +24,7 @@ describe('fixImageURLs', () => { describe('some string', () => { it('returns untouched', () => {}) - const string = "Yeah I'm a String" - expect(fixImageURLs(string)).toEqual(string) + const string = 'Yeah I\'m a String' + expect(fixImageURLs(string)).toEqual(string) }) }) From 3aef664c8c087e4ea6a734189cad21ec730a3468 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Sun, 20 Jan 2019 23:18:42 +0100 Subject: [PATCH 16/17] Import collections in order --- neo4j/import/import.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/neo4j/import/import.sh b/neo4j/import/import.sh index 88a2666dc..a08115005 100755 --- a/neo4j/import/import.sh +++ b/neo4j/import/import.sh @@ -1,4 +1,7 @@ #!/usr/bin/env bash SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" echo "MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE n,r;" | cypher-shell -cat $SCRIPT_DIRECTORY/*.cql | cypher-shell +for collection in "users" "contributions" "comments" +do + echo "Import ${collection}..." && cat $SCRIPT_DIRECTORY/$collection.cql | cypher-shell +done From 5ccfed29945aa3c4b5d0d3d59bc5992600ca2ed4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Sch=C3=A4fer?= Date: Sun, 20 Jan 2019 23:32:56 +0100 Subject: [PATCH 17/17] Fix CodeFactor --- neo4j/import/import.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neo4j/import/import.sh b/neo4j/import/import.sh index a08115005..319f1a591 100755 --- a/neo4j/import/import.sh +++ b/neo4j/import/import.sh @@ -3,5 +3,5 @@ SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pw echo "MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE n,r;" | cypher-shell for collection in "users" "contributions" "comments" do - echo "Import ${collection}..." && cat $SCRIPT_DIRECTORY/$collection.cql | cypher-shell + echo "Import ${collection}..." && cypher-shell < $SCRIPT_DIRECTORY/$collection.cql done