mirror of
https://github.com/IT4Change/Ocelot-Social.git
synced 2025-12-13 07:45:56 +00:00
Merge branch 'master' into report-content
This commit is contained in:
commit
f273240fd8
@ -6,6 +6,7 @@
|
||||
.env
|
||||
|
||||
Dockerfile
|
||||
docker-compose*.yml
|
||||
|
||||
./*.png
|
||||
./*.log
|
||||
@ -14,3 +15,6 @@ kubernetes/
|
||||
node_modules/
|
||||
scripts/
|
||||
dist/
|
||||
|
||||
db-migration-worker/
|
||||
neo4j/
|
||||
|
||||
1
db-migration-worker/.gitignore
vendored
Normal file
1
db-migration-worker/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
.ssh/id_rsa
|
||||
1
db-migration-worker/.ssh/.dockerignore
Normal file
1
db-migration-worker/.ssh/.dockerignore
Normal file
@ -0,0 +1 @@
|
||||
.ssh/id_rsa
|
||||
3
db-migration-worker/.ssh/known_hosts
Normal file
3
db-migration-worker/.ssh/known_hosts
Normal file
@ -0,0 +1,3 @@
|
||||
|1|GuOYlVEhTowidPs18zj9p5F2j3o=|sDHJYLz9Ftv11oXeGEjs7SpVyg0= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBM5N29bI5CeKu1/RBPyM2fwyf7fuajOO+tyhKe1+CC2sZ1XNB5Ff6t6MtCLNRv2mUuvzTbW/HkisDiA5tuXUHOk=
|
||||
|1|2KP9NV+Q5g2MrtjAeFSVcs8YeOI=|nf3h4wWVwC4xbBS1kzgzE2tBldk= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNhRK6BeIEUxXlS0z/pOfkUkSPfn33g4J1U3L+MyUQYHm+7agT08799ANJhnvELKE1tt4Vx80I9UR81oxzZcy3E=
|
||||
|1|HonYIRNhKyroUHPKU1HSZw0+Qzs=|5T1btfwFBz2vNSldhqAIfTbfIgQ= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNhRK6BeIEUxXlS0z/pOfkUkSPfn33g4J1U3L+MyUQYHm+7agT08799ANJhnvELKE1tt4Vx80I9UR81oxzZcy3E=
|
||||
9
db-migration-worker/Dockerfile
Normal file
9
db-migration-worker/Dockerfile
Normal file
@ -0,0 +1,9 @@
|
||||
FROM mongo:4
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get -y install --no-install-recommends openssh-client \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
COPY .ssh /root/.ssh/
|
||||
COPY import.sh .
|
||||
|
||||
30
db-migration-worker/import.sh
Executable file
30
db-migration-worker/import.sh
Executable file
@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
for var in "SSH_USERNAME" "SSH_HOST" "MONGODB_USERNAME" "MONGODB_PASSWORD" "MONGODB_DATABASE" "MONGODB_AUTH_DB"
|
||||
do
|
||||
if [[ -z "${!var}" ]]; then
|
||||
echo "${var} is undefined"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "SSH_USERNAME ${SSH_USERNAME}"
|
||||
echo "SSH_HOST ${SSH_HOST}"
|
||||
echo "MONGODB_USERNAME ${MONGODB_USERNAME}"
|
||||
echo "MONGODB_PASSWORD ${MONGODB_PASSWORD}"
|
||||
echo "MONGODB_DATABASE ${MONGODB_DATABASE}"
|
||||
echo "MONGODB_AUTH_DB ${MONGODB_AUTH_DB}"
|
||||
echo "-------------------------------------------------"
|
||||
|
||||
mongo ${MONGODB_DATABASE} --eval "db.dropDatabase();"
|
||||
rm -f /mongo-export/*
|
||||
|
||||
ssh -4 -M -S my-ctrl-socket -fnNT -L 27018:localhost:27017 -l ${SSH_USERNAME} ${SSH_HOST}
|
||||
mongodump --host localhost -d ${MONGODB_DATABASE} --port 27018 --username ${MONGODB_USERNAME} --password ${MONGODB_PASSWORD} --authenticationDatabase ${MONGODB_AUTH_DB} --gzip --archive | mongorestore --gzip --archive
|
||||
ssh -S my-ctrl-socket -O check -l ${SSH_USERNAME} ${SSH_HOST}
|
||||
ssh -S my-ctrl-socket -O exit -l ${SSH_USERNAME} ${SSH_HOST}
|
||||
|
||||
for collection in "categories" "badges" "users" "contributions" "comments" "follows" "shouts"
|
||||
do
|
||||
mongoexport --db ${MONGODB_DATABASE} --collection $collection --out "/mongo-export/$collection.json"
|
||||
done
|
||||
@ -11,6 +11,30 @@ services:
|
||||
- /nitro-backend/node_modules
|
||||
command: yarn run dev
|
||||
neo4j:
|
||||
volumes:
|
||||
- mongo-export:/mongo-export
|
||||
- ./neo4j/import:/var/lib/neo4j/import
|
||||
ports:
|
||||
- 7687:7687
|
||||
- 7474:7474
|
||||
environment:
|
||||
- NEO4J_apoc_import_file_enabled=true
|
||||
db-migration-worker:
|
||||
build:
|
||||
context: db-migration-worker
|
||||
volumes:
|
||||
- mongo-export:/mongo-export
|
||||
- ./db-migration-worker/.ssh/:/root/.ssh/
|
||||
networks:
|
||||
- hc-network
|
||||
environment:
|
||||
- "SSH_USERNAME=${SSH_USERNAME}"
|
||||
- "SSH_HOST=${SSH_HOST}"
|
||||
- "MONGODB_USERNAME=${MONGODB_USERNAME}"
|
||||
- "MONGODB_PASSWORD=${MONGODB_PASSWORD}"
|
||||
- "MONGODB_AUTH_DB=${MONGODB_AUTH_DB}"
|
||||
- "MONGODB_DATABASE=${MONGODB_DATABASE}"
|
||||
command: "--smallfiles --logpath=/dev/null"
|
||||
|
||||
volumes:
|
||||
mongo-export:
|
||||
|
||||
@ -17,15 +17,14 @@ services:
|
||||
- GRAPHQL_PORT=4000
|
||||
- GRAPHQL_URI=http://localhost:4000
|
||||
- CLIENT_URI=http://localhost:3000
|
||||
- JWT_SECRET="b/&&7b78BF&fv/Vd"
|
||||
- JWT_SECRET=b/&&7b78BF&fv/Vd
|
||||
- MOCK=false
|
||||
- MAPBOX_TOKEN=pk.eyJ1IjoiaHVtYW4tY29ubmVjdGlvbiIsImEiOiJjajl0cnBubGoweTVlM3VwZ2lzNTNud3ZtIn0.KZ8KK9l70omjXbEkkbHGsQ
|
||||
|
||||
neo4j:
|
||||
image: humanconnection/neo4j:latest
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.neo4j
|
||||
context: neo4j
|
||||
networks:
|
||||
- hc-network
|
||||
volumes:
|
||||
|
||||
@ -1,2 +1,3 @@
|
||||
FROM neo4j:3.5.0
|
||||
RUN wget https://github.com/neo4j-contrib/neo4j-apoc-procedures/releases/download/3.5.0.1/apoc-3.5.0.1-all.jar -P plugins/
|
||||
COPY import ./import
|
||||
12
neo4j/import/comments.cql
Normal file
12
neo4j/import/comments.cql
Normal file
@ -0,0 +1,12 @@
|
||||
CALL apoc.load.json('file:/mongo-export/comments.json') YIELD value as comment
|
||||
MERGE (c:Comment {id: comment._id["$oid"]})
|
||||
ON CREATE SET
|
||||
c.content = comment.content,
|
||||
c.contentExcerpt = comment.contentExcerpt,
|
||||
c.deleted = comment.deleted,
|
||||
c.disabled = false
|
||||
WITH comment
|
||||
MATCH (p:Post {id: comment.contributionId}), (u:User {id: comment.userId})
|
||||
MERGE (c)-[:COMMENTS]->(p)
|
||||
MERGE (u)-[:WROTE]->(c)
|
||||
;
|
||||
23
neo4j/import/contributions.cql
Normal file
23
neo4j/import/contributions.cql
Normal file
@ -0,0 +1,23 @@
|
||||
CALL apoc.load.json('file:/mongo-export/contributions.json') YIELD value as post
|
||||
MERGE (p:Post {id: post._id["$oid"]})
|
||||
ON CREATE SET
|
||||
p.title = post.title,
|
||||
p.slug = post.slug,
|
||||
p.image = post.teaserImg,
|
||||
p.content = post.content,
|
||||
p.contentExcerpt = post.contentExcerpt,
|
||||
p.visibility = toLower(post.visibility),
|
||||
p.createdAt = post.createdAt.`$date`,
|
||||
p.updatedAt = post.updatedAt.`$date`,
|
||||
p.deleted = post.deleted,
|
||||
p.disabled = NOT post.isEnabled
|
||||
WITH p, post, post.tags AS tags, post.categoryIds as categoryIds
|
||||
UNWIND tags AS tag
|
||||
UNWIND categoryIds AS categoryId
|
||||
MATCH (c:Category {id: categoryId}),
|
||||
(u:User {id: post.userId})
|
||||
MERGE (t:Tag {id: apoc.create.uuid(), name: tag})
|
||||
MERGE (p)-[:TAGGED]->(t)
|
||||
MERGE (u)-[:WROTE]->(p)
|
||||
MERGE (p)-[:CATEGORIZED]->(c)
|
||||
;
|
||||
7
neo4j/import/import.sh
Executable file
7
neo4j/import/import.sh
Executable file
@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
echo "MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE n,r;" | cypher-shell
|
||||
for collection in "users" "contributions" "comments"
|
||||
do
|
||||
echo "Import ${collection}..." && cypher-shell < $SCRIPT_DIRECTORY/$collection.cql
|
||||
done
|
||||
29
neo4j/import/todo
Normal file
29
neo4j/import/todo
Normal file
@ -0,0 +1,29 @@
|
||||
CALL apoc.load.json('file:/mongo-export/categories.json') YIELD value as category
|
||||
MERGE(c:Category {id: category._id["$oid"]})
|
||||
ON CREATE SET c.name = category.title,
|
||||
c.slug = category.slug,
|
||||
c.icon = category.icon
|
||||
|
||||
|
||||
CALL apoc.load.json('file:/mongo-export/badges.json') YIELD value as badge
|
||||
MERGE(b:Badge {id: badge._id["$oid"]})
|
||||
ON CREATE SET b.key = badge.key,
|
||||
b.type = badge.type,
|
||||
b.icon = badge.image.path,
|
||||
b.status = badge.status
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
CALL apoc.load.json('file:/mongo-export/follows.json') YIELD value as follow
|
||||
MATCH (u1:User {id: follow.userId}),
|
||||
(u2:User {id: follow.foreignId})
|
||||
MERGE (u1)-[:FOLLOWS]->(u2)
|
||||
|
||||
|
||||
CALL apoc.load.json('file:/mongo-export/shouts.json') YIELD value as shout
|
||||
MATCH (u:User {id: shout.userId}),
|
||||
(p:Post {id: shout.foreignId})
|
||||
MERGE (u)-[:SHOUTED]->(p)
|
||||
20
neo4j/import/users.cql
Normal file
20
neo4j/import/users.cql
Normal file
@ -0,0 +1,20 @@
|
||||
CALL apoc.load.json('file:/mongo-export/users.json') YIELD value as user
|
||||
MERGE(u:User {id: user._id["$oid"]})
|
||||
ON CREATE SET
|
||||
u.name = user.name,
|
||||
u.slug = user.slug,
|
||||
u.email = user.email,
|
||||
u.password = user.password,
|
||||
u.avatar = user.avatar,
|
||||
u.coverImg = user.coverImg,
|
||||
u.wasInvited = user.wasInvited,
|
||||
u.role = toLower(user.role),
|
||||
u.createdAt = user.createdAt.`$date`,
|
||||
u.updatedAt = user.updatedAt.`$date`,
|
||||
u.deleted = user.deletedAt IS NOT NULL,
|
||||
u.disabled = false
|
||||
WITH u, user, user.badgeIds AS badgeIds
|
||||
UNWIND badgeIds AS badgeId
|
||||
MATCH (b:Badge {id: badgeId})
|
||||
MERGE (b)-[:REWARDED]->(u)
|
||||
;
|
||||
@ -1,14 +1,25 @@
|
||||
|
||||
const urlSearchAlpha = 'https://api-alpha.human-connection.org'
|
||||
const urlSearchLocal = 'http://localhost:3000'
|
||||
const legacyUrls = [
|
||||
'https://api-alpha.human-connection.org',
|
||||
'https://staging-api.human-connection.org',
|
||||
'http://localhost:3000'
|
||||
]
|
||||
|
||||
export const fixUrl = (url) => {
|
||||
url = url.replace(urlSearchAlpha, '')
|
||||
url = url.replace(urlSearchLocal, '')
|
||||
legacyUrls.forEach((legacyUrl) => {
|
||||
url = url.replace(legacyUrl, '')
|
||||
})
|
||||
return url
|
||||
}
|
||||
const fixImageURLs = (result, recursive) => {
|
||||
if (result && typeof result === 'string' && (result.indexOf(urlSearchAlpha) === 0 || result.indexOf(urlSearchLocal) === 0)) {
|
||||
|
||||
const checkUrl = (thing) => {
|
||||
return thing && typeof thing === 'string' && legacyUrls.find((legacyUrl) => {
|
||||
return thing.indexOf(legacyUrl) === 0
|
||||
})
|
||||
}
|
||||
|
||||
export const fixImageURLs = (result, recursive) => {
|
||||
if (checkUrl(result)) {
|
||||
result = fixUrl(result)
|
||||
} else if (result && Array.isArray(result)) {
|
||||
result.forEach((res, index) => {
|
||||
|
||||
30
src/middleware/fixImageUrlsMiddleware.spec.js
Normal file
30
src/middleware/fixImageUrlsMiddleware.spec.js
Normal file
@ -0,0 +1,30 @@
|
||||
import { fixImageURLs } from './fixImageUrlsMiddleware'
|
||||
|
||||
describe('fixImageURLs', () => {
|
||||
describe('image url of legacy alpha', () => {
|
||||
it('removes domain', () => {
|
||||
const url = 'https://api-alpha.human-connection.org/uploads/4bfaf9172c4ba03d7645108bbbd16f0a696a37d01eacd025fb131e5da61b15d9.png'
|
||||
expect(fixImageURLs(url)).toEqual('/uploads/4bfaf9172c4ba03d7645108bbbd16f0a696a37d01eacd025fb131e5da61b15d9.png')
|
||||
})
|
||||
})
|
||||
|
||||
describe('image url of legacy staging', () => {
|
||||
it('removes domain', () => {
|
||||
const url = 'https://staging-api.human-connection.org/uploads/1b3c39a24f27e2fb62b69074b2f71363b63b263f0c4574047d279967124c026e.jpeg'
|
||||
expect(fixImageURLs(url)).toEqual('/uploads/1b3c39a24f27e2fb62b69074b2f71363b63b263f0c4574047d279967124c026e.jpeg')
|
||||
})
|
||||
})
|
||||
|
||||
describe('object', () => {
|
||||
it('returns untouched', () => {
|
||||
const object = { some: 'thing' }
|
||||
expect(fixImageURLs(object)).toEqual(object)
|
||||
})
|
||||
})
|
||||
|
||||
describe('some string', () => {
|
||||
it('returns untouched', () => {})
|
||||
const string = 'Yeah I\'m a String'
|
||||
expect(fixImageURLs(string)).toEqual(string)
|
||||
})
|
||||
})
|
||||
@ -11,7 +11,12 @@ if (process.env.NODE_ENV === 'production') {
|
||||
const driver = neo4j().getDriver()
|
||||
const session = driver.session()
|
||||
|
||||
query('MATCH (n) DETACH DELETE n', session).then(() => {
|
||||
const deleteAll = `
|
||||
MATCH (n)
|
||||
OPTIONAL MATCH (n)-[r]-()
|
||||
DELETE n,r
|
||||
`
|
||||
query(deleteAll, session).then(() => {
|
||||
/* eslint-disable-next-line no-console */
|
||||
console.log('Successfully deleted all nodes and relations!')
|
||||
}).catch((err) => {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user