mirror of
https://github.com/overleaf/overleaf.git
synced 2025-12-05 01:10:29 +00:00
Compare commits
27 Commits
fa1aa0116a
...
ff8e9394f3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ff8e9394f3 | ||
|
|
2aeb788e3f | ||
|
|
76024341f8 | ||
|
|
f372fd07f2 | ||
|
|
5bbc5b2e58 | ||
|
|
c1dc70fc92 | ||
|
|
83db1e858f | ||
|
|
0b0de92444 | ||
|
|
a1197310c2 | ||
|
|
0c07e5ff40 | ||
|
|
c4d02f4571 | ||
|
|
d531613c7a | ||
|
|
24015a4c1f | ||
|
|
4853352040 | ||
|
|
8509a4cd9b | ||
|
|
ea5de4984f | ||
|
|
580fc2bad6 | ||
|
|
38ca13c26f | ||
|
|
3832c46f1c | ||
|
|
c25e49782f | ||
|
|
127d8273dc | ||
|
|
6354310656 | ||
|
|
eafef60b75 | ||
|
|
5982eed3fa | ||
|
|
a1f1ca2028 | ||
|
|
f1e788d9b3 | ||
|
|
a3ec5b2797 |
@@ -1,7 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/access-token-encryptor/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/access-token-encryptor/Jenkinsfile
vendored
118
libraries/access-token-encryptor/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/access-token-encryptor/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/access-token-encryptor monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/access-token-encryptor/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'access-token-encryptor-eslint', name: 'access-token-encryptor eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/access-token-encryptor/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/access-token-encryptor monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/access-token-encryptor monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/access-token-encryptor monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'access-token-encryptor test results', testResults: 'libraries/access-token-encryptor/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/access-token-encryptor/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/o-error/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/fetch-utils/Jenkinsfile
vendored
118
libraries/fetch-utils/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/fetch-utils/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/fetch-utils monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/fetch-utils/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'fetch-utils-eslint', name: 'fetch-utils eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/fetch-utils/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/fetch-utils monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/fetch-utils monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/fetch-utils monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'fetch-utils test results', testResults: 'libraries/fetch-utils/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/fetch-utils/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/o-error/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/logger/Jenkinsfile
vendored
118
libraries/logger/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/logger/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/logger monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/logger/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'logger-eslint', name: 'logger eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/logger/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/logger monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/logger monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/logger monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'logger test results', testResults: 'libraries/logger/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/logger/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/metrics/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/metrics/Jenkinsfile
vendored
118
libraries/metrics/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/metrics/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/metrics monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/metrics/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'metrics-eslint', name: 'metrics eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/metrics/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/metrics monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/metrics monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/metrics monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'metrics test results', testResults: 'libraries/metrics/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/metrics/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/mongo-utils/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
108
libraries/mongo-utils/Jenkinsfile
vendored
108
libraries/mongo-utils/Jenkinsfile
vendored
@@ -1,108 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/mongo-utils/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/mongo-utils monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/mongo-utils/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'mongo-utils-eslint', name: 'mongo-utils eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/mongo-utils/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/mongo-utils monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/mongo-utils monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/mongo-utils/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/o-error/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/o-error/Jenkinsfile
vendored
118
libraries/o-error/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/o-error/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/o-error monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/o-error/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'o-error-eslint', name: 'o-error eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/o-error/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/o-error monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/o-error monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/o-error monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'o-error test results', testResults: 'libraries/o-error/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/o-error/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/o-error/**
|
||||
libraries/object-persistor/**
|
||||
libraries/stream-utils/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/object-persistor/Jenkinsfile
vendored
118
libraries/object-persistor/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/object-persistor/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/object-persistor monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/object-persistor/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'object-persistor-eslint', name: 'object-persistor eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/object-persistor/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/object-persistor monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/object-persistor monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/object-persistor monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'object-persistor test results', testResults: 'libraries/object-persistor/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/object-persistor/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/o-error/**
|
||||
libraries/overleaf-editor-core/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/overleaf-editor-core/Jenkinsfile
vendored
118
libraries/overleaf-editor-core/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/overleaf-editor-core/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/overleaf-editor-core monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/overleaf-editor-core/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'overleaf-editor-core-eslint', name: 'overleaf-editor-core eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/overleaf-editor-core/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/overleaf-editor-core monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/overleaf-editor-core monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/overleaf-editor-core monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'overleaf-editor-core test results', testResults: 'libraries/overleaf-editor-core/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/overleaf-editor-core/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/promise-utils/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/promise-utils/Jenkinsfile
vendored
118
libraries/promise-utils/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/promise-utils/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/promise-utils monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/promise-utils/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'promise-utils-eslint', name: 'promise-utils eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/promise-utils/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/promise-utils monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/promise-utils monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/promise-utils monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'promise-utils test results', testResults: 'libraries/promise-utils/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/promise-utils/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/ranges-tracker/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/ranges-tracker/Jenkinsfile
vendored
118
libraries/ranges-tracker/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/ranges-tracker/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/ranges-tracker monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/ranges-tracker/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'ranges-tracker-eslint', name: 'ranges-tracker eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/ranges-tracker/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/ranges-tracker monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/ranges-tracker monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/ranges-tracker monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'ranges-tracker test results', testResults: 'libraries/ranges-tracker/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/ranges-tracker/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/o-error/**
|
||||
libraries/redis-wrapper/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/redis-wrapper/Jenkinsfile
vendored
118
libraries/redis-wrapper/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/redis-wrapper/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/redis-wrapper monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/redis-wrapper/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'redis-wrapper-eslint', name: 'redis-wrapper eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/redis-wrapper/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/redis-wrapper monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/redis-wrapper monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/redis-wrapper monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'redis-wrapper test results', testResults: 'libraries/redis-wrapper/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/redis-wrapper/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/settings/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
108
libraries/settings/Jenkinsfile
vendored
108
libraries/settings/Jenkinsfile
vendored
@@ -1,108 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/settings/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/settings monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/settings/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'settings-eslint', name: 'settings eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/settings/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/settings monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/settings monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/settings/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/stream-utils/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
118
libraries/stream-utils/Jenkinsfile
vendored
118
libraries/stream-utils/Jenkinsfile
vendored
@@ -1,118 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/stream-utils/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/stream-utils monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/stream-utils/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'stream-utils-eslint', name: 'stream-utils eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/stream-utils/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/stream-utils monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/stream-utils monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/stream-utils monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'stream-utils test results', testResults: 'libraries/stream-utils/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/stream-utils/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/o-error/**
|
||||
libraries/validation-tools/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
109
libraries/validation-tools/Jenkinsfile
vendored
109
libraries/validation-tools/Jenkinsfile
vendored
@@ -1,109 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Install monorepo') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir libraries/validation-tools/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/validation-tools monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/validation-tools/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'validation-tools-eslint', name: 'validation-tools eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/validation-tools/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/validation-tools monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'bin/run -w /overleaf/libraries/validation-tools monorepo npm run types:check'
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'bin/run -w /overleaf/libraries/validation-tools monorepo npm run test:ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'validation-tools test results', testResults: 'libraries/validation-tools/reports/junit-*.xml'
|
||||
}
|
||||
cleanup {
|
||||
sh 'rm -rf libraries/validation-tools/reports'
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
1574
package-lock.json
generated
1574
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@
|
||||
"@types/mocha": "^10.0.6",
|
||||
"@typescript-eslint/eslint-plugin": "^8.30.1",
|
||||
"@typescript-eslint/parser": "^8.30.1",
|
||||
"@vitest/eslint-plugin": "^1.5.0",
|
||||
"eslint": "^8.15.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-config-standard": "^17.0.0",
|
||||
@@ -39,6 +40,9 @@
|
||||
"request@2.88.2": {
|
||||
"tough-cookie": "5.1.2",
|
||||
"form-data": "2.5.5"
|
||||
},
|
||||
"streamdown": {
|
||||
"mermaid": "file:./services/web/stubs/mermaid-stub"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
copybara/**
|
||||
|
||||
libraries/**
|
||||
|
||||
patches/**
|
||||
|
||||
server-ce/**
|
||||
server-pro/**
|
||||
|
||||
# echo chat clsi contacts docstore document-updater filestore history-v1 notifications project-history real-time references templates web | xargs -n1 echo | xargs -I% echo 'services/%/**'
|
||||
# BEGIN GENERATED
|
||||
services/chat/**
|
||||
services/clsi/**
|
||||
services/contacts/**
|
||||
services/docstore/**
|
||||
services/document-updater/**
|
||||
services/filestore/**
|
||||
services/history-v1/**
|
||||
services/notifications/**
|
||||
services/project-history/**
|
||||
services/real-time/**
|
||||
services/references/**
|
||||
services/templates/**
|
||||
services/web/**
|
||||
# END GENERATED
|
||||
|
||||
tools/migrations/**
|
||||
|
||||
.dockerignore
|
||||
.eslint*
|
||||
.pretter*
|
||||
package.json
|
||||
package-lock.json
|
||||
tsconfig.backend.json
|
||||
385
server-ce/test/Jenkinsfile
vendored
385
server-ce/test/Jenkinsfile
vendored
@@ -1,385 +0,0 @@
|
||||
// Initialize variables to signal that a given stage finished.
|
||||
// We use them to build a graph of interconnected steps/dependencies.
|
||||
// - Incoming edges use "waitUntil" and reference the given variables of dependencies.
|
||||
// - Outgoing edges set the given variable to true.
|
||||
def job_copybara_done = false
|
||||
def job_npm_install_done = false
|
||||
def job_prefetch_custom_done = false
|
||||
def job_prefetch_default_done = false
|
||||
def job_server_ce_build_done = false
|
||||
def job_server_pro_build_done = false
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
// Select a VM with the given tabel.
|
||||
label 'jenkins-agent-web'
|
||||
// Use the monorepo checkout in /workspace.
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
// Print timestamp next to each log line.
|
||||
timestamps()
|
||||
// Abort build after hitting first failure.
|
||||
parallelsAlwaysFailFast()
|
||||
timeout(time: 20, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${GIT_BRANCH.replace('origin/', '')}"
|
||||
COMMIT_SHA = "${GIT_COMMIT}"
|
||||
SHORT_SHA = "${GIT_COMMIT.take(7)}"
|
||||
OVERLEAF_BASE_BRANCH = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base-internal:${BRANCH_NAME}"
|
||||
OVERLEAF_BASE_LATEST = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base-internal:main"
|
||||
OVERLEAF_BASE_TAG = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base-internal:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}"
|
||||
OVERLEAF_BRANCH = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-internal:${BRANCH_NAME}"
|
||||
OVERLEAF_LATEST = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-internal:main"
|
||||
OVERLEAF_TAG = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-internal:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}"
|
||||
IMAGE_TAG_CE = "${OVERLEAF_TAG}"
|
||||
IMAGE_TAG_PRO = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}"
|
||||
OVERLEAF_PRO_TAG_BRANCH = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro-internal:${BRANCH_NAME}"
|
||||
OVERLEAF_PRO_TAG_LATEST = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro-internal:main"
|
||||
}
|
||||
stages {
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir server-ce/test/reports'
|
||||
}
|
||||
}
|
||||
stage('Parallel') {
|
||||
parallel {
|
||||
stage('Install deps') {
|
||||
steps {
|
||||
retry(count: 3) {
|
||||
sh 'make monorepo_setup'
|
||||
}
|
||||
script {
|
||||
job_npm_install_done = true
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('shellcheck') {
|
||||
steps {
|
||||
dir('server-ce') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done
|
||||
}
|
||||
}
|
||||
sh 'bin/run -w /overleaf/server-ce/test monorepo npm run format'
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done
|
||||
}
|
||||
}
|
||||
sh 'bin/run -w /overleaf/server-ce/test monorepo npm run lint -- --format json --output-file reports/eslint.json'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh """
|
||||
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' server-ce/test/reports/eslint.json
|
||||
"""
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'server-pro-e2e-tests-eslint', name: 'Server-Pro-E2E-Tests eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'server-ce/test/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Copybara') {
|
||||
steps {
|
||||
sh 'copybara/bin/sync'
|
||||
script {
|
||||
job_copybara_done = true
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Build CE image') {
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_copybara_done
|
||||
}
|
||||
}
|
||||
dir('copybara/public/repo/server-ce') {
|
||||
sh 'make refresh-cache -j2'
|
||||
retry(count: 3) {
|
||||
sh 'make build-base'
|
||||
}
|
||||
retry(count: 3) {
|
||||
sh 'make build-community'
|
||||
}
|
||||
}
|
||||
script {
|
||||
job_server_ce_build_done = true
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push CE to internal') {
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_server_ce_build_done
|
||||
}
|
||||
}
|
||||
dir('copybara/public/repo/server-ce') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Build Pro image') {
|
||||
environment {
|
||||
OVERLEAF_CE_TAG = "${OVERLEAF_TAG}"
|
||||
OVERLEAF_PRO_TAG = "${IMAGE_TAG_PRO}"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_server_ce_build_done
|
||||
}
|
||||
}
|
||||
dir('server-pro') {
|
||||
retry(count: 3) {
|
||||
sh 'make build-ci'
|
||||
}
|
||||
}
|
||||
script {
|
||||
job_server_pro_build_done = true
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Pro to internal') {
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_server_pro_build_done
|
||||
}
|
||||
}
|
||||
dir('server-pro') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Prefetch default') {
|
||||
steps {
|
||||
dir('server-ce/test') {
|
||||
sh 'make prefetch_default -j4'
|
||||
}
|
||||
script {
|
||||
job_prefetch_default_done = true
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Prefetch custom') {
|
||||
steps {
|
||||
dir('server-ce/test') {
|
||||
sh 'make prefetch_custom -j4'
|
||||
}
|
||||
script {
|
||||
job_prefetch_custom_done = true
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('CE default') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "CE_DEFAULT"
|
||||
COMPOSE_PROJECT_NAME = "test-ce-default"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_ce_build_done && job_prefetch_default_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
retry(count: 3) {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('CE custom 1') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "CE_CUSTOM_1"
|
||||
COMPOSE_PROJECT_NAME = "test-ce-custom-1"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_ce_build_done && job_prefetch_default_done && job_prefetch_custom_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
retry(count: 3) {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('PRO default 1') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "PRO_DEFAULT_1"
|
||||
COMPOSE_PROJECT_NAME = "test-pro-default-1"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
retry(count: 3) {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('PRO default 2') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "PRO_DEFAULT_2"
|
||||
COMPOSE_PROJECT_NAME = "test-pro-default-2"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
retry(count: 3) {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('PRO custom 1') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "PRO_CUSTOM_1"
|
||||
COMPOSE_PROJECT_NAME = "test-pro-custom-1"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
retry(count: 3) {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('PRO custom 2') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "PRO_CUSTOM_2"
|
||||
COMPOSE_PROJECT_NAME = "test-pro-custom-2"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
retry(count: 3) {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('PRO custom 3') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "PRO_CUSTOM_3"
|
||||
COMPOSE_PROJECT_NAME = "test-pro-custom-3"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
retry(count: 3) {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('PRO custom 4') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "PRO_CUSTOM_4"
|
||||
COMPOSE_PROJECT_NAME = "test-pro-custom-4"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
retry(count: 3) {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('PRO custom 5') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "PRO_CUSTOM_5"
|
||||
COMPOSE_PROJECT_NAME = "test-pro-custom-5"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
retry(count: 3) {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
// Collect junit test results for both success and failure case.
|
||||
always {
|
||||
junit checksName: 'Server-Pro-E2E-Tests results', testResults: 'server-ce/test/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="B2B"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Ensure tear down of test containers, remove CE docker images, then run general Jenkins VM cleanup.
|
||||
cleanup {
|
||||
dir('server-ce/test') {
|
||||
sh 'make clean -j10'
|
||||
}
|
||||
dir('server-ce') {
|
||||
sh 'make clean'
|
||||
}
|
||||
dir('server-pro') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vim: set ft=groovy :
|
||||
@@ -1,15 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/mongo-utils/**
|
||||
libraries/o-error/**
|
||||
libraries/promise-utils/**
|
||||
libraries/settings/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/chat/**
|
||||
tools/migrations/**
|
||||
161
services/chat/Jenkinsfile
vendored
161
services/chat/Jenkinsfile
vendored
@@ -1,161 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/chat') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/chat/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/chat') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/chat') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/chat') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'chat-eslint', name: 'chat eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/chat/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/chat') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/chat') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/chat') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
|
||||
}
|
||||
steps {
|
||||
dir('services/chat') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/chat') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'chat test results', testResults: 'services/chat/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/chat') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/o-error/**
|
||||
libraries/promise-utils/**
|
||||
libraries/settings/**
|
||||
libraries/stream-utils/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/clsi/**
|
||||
167
services/clsi/Jenkinsfile
vendored
167
services/clsi/Jenkinsfile
vendored
@@ -1,167 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/clsi') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Pull TL2017') {
|
||||
steps {
|
||||
// Remove after new worker VM image is live.
|
||||
sh 'docker pull us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2017.1'
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/clsi/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/clsi') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/clsi') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/clsi') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'clsi-eslint', name: 'clsi eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/clsi/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/clsi') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/clsi') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/clsi') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
|
||||
}
|
||||
steps {
|
||||
dir('services/clsi') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/clsi') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'clsi test results', testResults: 'services/clsi/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/clsi') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/mongo-utils/**
|
||||
libraries/o-error/**
|
||||
libraries/promise-utils/**
|
||||
libraries/settings/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/contacts/**
|
||||
tools/migrations/**
|
||||
161
services/contacts/Jenkinsfile
vendored
161
services/contacts/Jenkinsfile
vendored
@@ -1,161 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/contacts') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/contacts/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/contacts') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/contacts') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/contacts') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'contacts-eslint', name: 'contacts eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/contacts/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/contacts') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/contacts') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/contacts') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
|
||||
}
|
||||
steps {
|
||||
dir('services/contacts') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/contacts') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'contacts test results', testResults: 'services/contacts/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="B2C"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/contacts') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/mongo-utils/**
|
||||
libraries/o-error/**
|
||||
libraries/object-persistor/**
|
||||
libraries/promise-utils/**
|
||||
libraries/settings/**
|
||||
libraries/stream-utils/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/docstore/**
|
||||
tools/migrations/**
|
||||
161
services/docstore/Jenkinsfile
vendored
161
services/docstore/Jenkinsfile
vendored
@@ -1,161 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/docstore') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/docstore/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/docstore') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/docstore') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/docstore') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'docstore-eslint', name: 'docstore eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/docstore/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/docstore') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/docstore') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/docstore') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
|
||||
}
|
||||
steps {
|
||||
dir('services/docstore') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/docstore') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'docstore test results', testResults: 'services/docstore/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/docstore') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,20 +1,22 @@
|
||||
// Metrics must be initialized before importing anything else
|
||||
require('@overleaf/metrics/initialize')
|
||||
import '@overleaf/metrics/initialize.js'
|
||||
|
||||
const Events = require('node:events')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger')
|
||||
const express = require('express')
|
||||
const bodyParser = require('body-parser')
|
||||
const {
|
||||
celebrate: validate,
|
||||
import Events from 'node:events'
|
||||
import Metrics from '@overleaf/metrics'
|
||||
import Settings from '@overleaf/settings'
|
||||
import logger from '@overleaf/logger'
|
||||
import express from 'express'
|
||||
import bodyParser from 'body-parser'
|
||||
import {
|
||||
celebrate as validate,
|
||||
Joi,
|
||||
errors: handleValidationErrors,
|
||||
} = require('celebrate')
|
||||
const { mongoClient } = require('./app/js/mongodb')
|
||||
const Errors = require('./app/js/Errors')
|
||||
const HttpController = require('./app/js/HttpController')
|
||||
errors as handleValidationErrors,
|
||||
} from 'celebrate'
|
||||
import mongodb from './app/js/mongodb.js'
|
||||
import Errors from './app/js/Errors.js'
|
||||
import HttpController from './app/js/HttpController.js'
|
||||
|
||||
const { mongoClient } = mongodb
|
||||
|
||||
Events.setMaxListeners(20)
|
||||
|
||||
@@ -114,7 +116,7 @@ app.use(function (error, req, res, next) {
|
||||
const { port } = Settings.internal.docstore
|
||||
const { host } = Settings.internal.docstore
|
||||
|
||||
if (!module.parent) {
|
||||
if (import.meta.main) {
|
||||
// Called directly
|
||||
mongoClient
|
||||
.connect()
|
||||
@@ -137,4 +139,4 @@ if (!module.parent) {
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = app
|
||||
export default app
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
const MongoManager = require('./MongoManager')
|
||||
const Errors = require('./Errors')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const crypto = require('node:crypto')
|
||||
const { ReadableString } = require('@overleaf/stream-utils')
|
||||
const RangeManager = require('./RangeManager')
|
||||
const PersistorManager = require('./PersistorManager')
|
||||
const pMap = require('p-map')
|
||||
const { streamToBuffer } = require('./StreamToBuffer')
|
||||
const { BSON } = require('mongodb-legacy')
|
||||
import MongoManager from './MongoManager.js'
|
||||
import Errors from './Errors.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import Settings from '@overleaf/settings'
|
||||
import crypto from 'node:crypto'
|
||||
import { ReadableString } from '@overleaf/stream-utils'
|
||||
import RangeManager from './RangeManager.js'
|
||||
import PersistorManager from './PersistorManager.js'
|
||||
import pMap from 'p-map'
|
||||
import { streamToBuffer } from './StreamToBuffer.js'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
|
||||
const { BSON } = mongodb
|
||||
|
||||
const PARALLEL_JOBS = Settings.parallelArchiveJobs
|
||||
const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize
|
||||
@@ -220,7 +222,7 @@ function _isArchivingEnabled() {
|
||||
return true
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
export default {
|
||||
archiveAllDocs,
|
||||
archiveDoc,
|
||||
unArchiveAllDocs,
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
const MongoManager = require('./MongoManager')
|
||||
const Errors = require('./Errors')
|
||||
const logger = require('@overleaf/logger')
|
||||
const _ = require('lodash')
|
||||
const DocArchive = require('./DocArchiveManager')
|
||||
const RangeManager = require('./RangeManager')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { setTimeout } = require('node:timers/promises')
|
||||
import MongoManager from './MongoManager.js'
|
||||
import Errors from './Errors.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import _ from 'lodash'
|
||||
import DocArchive from './DocArchiveManager.js'
|
||||
import RangeManager from './RangeManager.js'
|
||||
import Settings from '@overleaf/settings'
|
||||
import { setTimeout } from 'node:timers/promises'
|
||||
|
||||
/**
|
||||
* @import { Document } from 'mongodb'
|
||||
@@ -319,4 +319,4 @@ const DocManager = {
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = DocManager
|
||||
export default DocManager
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// import Errors from object-persistor to pass instanceof checks
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { Errors } = require('@overleaf/object-persistor')
|
||||
import OError from '@overleaf/o-error'
|
||||
|
||||
import { Errors } from '@overleaf/object-persistor'
|
||||
|
||||
class Md5MismatchError extends OError {}
|
||||
|
||||
@@ -12,7 +13,7 @@ class DocVersionDecrementedError extends OError {}
|
||||
|
||||
class DocWithoutLinesError extends OError {}
|
||||
|
||||
module.exports = {
|
||||
export default {
|
||||
Md5MismatchError,
|
||||
DocModifiedError,
|
||||
DocRevValueError,
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const _ = require('lodash')
|
||||
const crypto = require('node:crypto')
|
||||
const settings = require('@overleaf/settings')
|
||||
import mongodb from './mongodb.js'
|
||||
import _ from 'lodash'
|
||||
import crypto from 'node:crypto'
|
||||
import settings from '@overleaf/settings'
|
||||
import logger from '@overleaf/logger'
|
||||
import { fetchNothing, fetchJson } from '@overleaf/fetch-utils'
|
||||
|
||||
const { db, ObjectId } = mongodb
|
||||
|
||||
const { port } = settings.internal.docstore
|
||||
const logger = require('@overleaf/logger')
|
||||
const { fetchNothing, fetchJson } = require('@overleaf/fetch-utils')
|
||||
|
||||
async function check() {
|
||||
const docId = new ObjectId()
|
||||
@@ -30,6 +33,7 @@ async function check() {
|
||||
throw new Error(`health check lines not equal ${body.lines} != ${lines}`)
|
||||
}
|
||||
}
|
||||
module.exports = {
|
||||
|
||||
export default {
|
||||
check,
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
const DocManager = require('./DocManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
const DocArchive = require('./DocArchiveManager')
|
||||
const HealthChecker = require('./HealthChecker')
|
||||
const Errors = require('./Errors')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { expressify } = require('@overleaf/promise-utils')
|
||||
import DocManager from './DocManager.js'
|
||||
import logger from '@overleaf/logger'
|
||||
import DocArchive from './DocArchiveManager.js'
|
||||
import HealthChecker from './HealthChecker.js'
|
||||
import Errors from './Errors.js'
|
||||
import Settings from '@overleaf/settings'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
|
||||
async function getDoc(req, res) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
@@ -236,7 +236,7 @@ async function healthCheck(req, res) {
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
export default {
|
||||
getDoc: expressify(getDoc),
|
||||
peekDoc: expressify(peekDoc),
|
||||
isDocDeleted: expressify(isDocDeleted),
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Errors = require('./Errors')
|
||||
import mongodb from './mongodb.js'
|
||||
import Settings from '@overleaf/settings'
|
||||
import Errors from './Errors.js'
|
||||
|
||||
const { db, ObjectId } = mongodb
|
||||
|
||||
const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs
|
||||
|
||||
@@ -239,7 +241,7 @@ async function destroyProject(projectId) {
|
||||
await db.docs.deleteMany({ project_id: new ObjectId(projectId) })
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
export default {
|
||||
findDoc,
|
||||
getProjectsDeletedDocs,
|
||||
getProjectsDocs,
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
const settings = require('@overleaf/settings')
|
||||
import settings from '@overleaf/settings'
|
||||
import ObjectPersistor from '@overleaf/object-persistor'
|
||||
import AbstractPersistor from '@overleaf/object-persistor/src/AbstractPersistor.js'
|
||||
import Metrics from '@overleaf/metrics'
|
||||
|
||||
const persistorSettings = settings.docstore
|
||||
persistorSettings.Metrics = require('@overleaf/metrics')
|
||||
persistorSettings.Metrics = Metrics
|
||||
|
||||
const ObjectPersistor = require('@overleaf/object-persistor')
|
||||
const AbstractPersistor = require('@overleaf/object-persistor/src/AbstractPersistor')
|
||||
const persistor = settings.docstore.backend
|
||||
? ObjectPersistor(persistorSettings)
|
||||
: new AbstractPersistor()
|
||||
|
||||
module.exports = persistor
|
||||
export default persistor
|
||||
|
||||
@@ -10,11 +10,14 @@
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RangeManager
|
||||
const _ = require('lodash')
|
||||
const { ObjectId } = require('./mongodb')
|
||||
import _ from 'lodash'
|
||||
import mongodb from './mongodb.js'
|
||||
|
||||
module.exports = RangeManager = {
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
let RangeManager
|
||||
|
||||
export default RangeManager = {
|
||||
shouldUpdateRanges(docRanges, incomingRanges) {
|
||||
if (incomingRanges == null) {
|
||||
throw new Error('expected incoming_ranges')
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger/logging-manager')
|
||||
const { pipeline } = require('node:stream/promises')
|
||||
import { LoggerStream, WritableBuffer } from '@overleaf/stream-utils'
|
||||
import Settings from '@overleaf/settings'
|
||||
import logger from '@overleaf/logger/logging-manager.js'
|
||||
import { pipeline } from 'node:stream/promises'
|
||||
|
||||
module.exports = {
|
||||
streamToBuffer,
|
||||
}
|
||||
|
||||
async function streamToBuffer(projectId, docId, stream) {
|
||||
export async function streamToBuffer(projectId, docId, stream) {
|
||||
const loggerTransform = new LoggerStream(
|
||||
Settings.max_doc_length,
|
||||
(size, isFlush) => {
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
// @ts-check
|
||||
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const MongoUtils = require('@overleaf/mongo-utils')
|
||||
const { MongoClient, ObjectId } = require('mongodb-legacy')
|
||||
import Metrics from '@overleaf/metrics'
|
||||
|
||||
import Settings from '@overleaf/settings'
|
||||
import MongoUtils from '@overleaf/mongo-utils'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
|
||||
const { MongoClient, ObjectId } = mongodb
|
||||
|
||||
const mongoClient = new MongoClient(Settings.mongo.url, Settings.mongo.options)
|
||||
const mongoDb = mongoClient.db()
|
||||
@@ -18,7 +21,7 @@ async function cleanupTestDatabase() {
|
||||
await MongoUtils.cleanupTestDatabase(mongoClient)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
export default {
|
||||
db,
|
||||
mongoClient,
|
||||
ObjectId,
|
||||
|
||||
@@ -6,3 +6,5 @@ docstore
|
||||
--node-version=22.18.0
|
||||
--pipeline-owner=🚉 Platform
|
||||
--public-repo=True
|
||||
--test-unit-vitest=True
|
||||
--tsconfig-extra-includes=vitest.config.unit.cjs
|
||||
|
||||
@@ -9,6 +9,7 @@ services:
|
||||
volumes:
|
||||
- ./reports:/overleaf/services/docstore/reports
|
||||
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||
- ../../tsconfig.backend.json:/overleaf/tsconfig.backend.json
|
||||
entrypoint: /overleaf/bin/shared/wait_for_it mongo:27017 --timeout=60 --
|
||||
command: npm run test:unit:_run
|
||||
environment:
|
||||
@@ -16,6 +17,7 @@ services:
|
||||
MONGO_CONNECTION_STRING: mongodb://mongo/test-overleaf
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
VITEST_NO_CACHE: true
|
||||
depends_on:
|
||||
mongo:
|
||||
condition: service_started
|
||||
|
||||
@@ -11,6 +11,7 @@ services:
|
||||
- ../../libraries:/overleaf/libraries
|
||||
- ../../bin/shared/wait_for_it:/overleaf/bin/shared/wait_for_it
|
||||
- ../../tools/migrations:/overleaf/tools/migrations
|
||||
- ../../tsconfig.backend.json:/overleaf/tsconfig.backend.json
|
||||
working_dir: /overleaf/services/docstore
|
||||
environment:
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
|
||||
@@ -2,13 +2,14 @@
|
||||
"name": "@overleaf/docstore",
|
||||
"description": "A CRUD API for handling text documents in projects",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"main": "app.js",
|
||||
"scripts": {
|
||||
"start": "node app.js",
|
||||
"test:acceptance:_run": "mocha --recursive --timeout 15000 --exit $@ test/acceptance/js",
|
||||
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||
"test:unit:_run": "mocha --recursive --exit $@ test/unit/js",
|
||||
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||
"test:unit:_run": "vitest --config ./vitest.config.unit.cjs",
|
||||
"test:unit": "npm run test:unit:_run",
|
||||
"nodemon": "node --watch app.js",
|
||||
"lint": "eslint --max-warnings 0 --format unix .",
|
||||
"format": "prettier --list-different $PWD/'**/{*.*js,*.ts}'",
|
||||
@@ -46,6 +47,7 @@
|
||||
"sandboxed-module": "~2.0.4",
|
||||
"sinon": "~9.0.2",
|
||||
"sinon-chai": "^3.7.0",
|
||||
"typescript": "^5.0.4"
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { expect } = require('chai')
|
||||
const { db, ObjectId } = require('../../../app/js/mongodb')
|
||||
const async = require('async')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { Storage } = require('@google-cloud/storage')
|
||||
const Persistor = require('../../../app/js/PersistorManager')
|
||||
const { ReadableString } = require('@overleaf/stream-utils')
|
||||
const { callbackify } = require('node:util')
|
||||
import Settings from '@overleaf/settings'
|
||||
import { expect } from 'chai'
|
||||
import mongodb from '../../../app/js/mongodb.js'
|
||||
import async from 'async'
|
||||
import DocstoreApp from './helpers/DocstoreApp.js'
|
||||
import DocstoreClient from './helpers/DocstoreClient.js'
|
||||
import { Storage } from '@google-cloud/storage'
|
||||
import Persistor from '../../../app/js/PersistorManager.js'
|
||||
import { ReadableString } from '@overleaf/stream-utils'
|
||||
import { callbackify } from 'node:util'
|
||||
import Crypto from 'node:crypto'
|
||||
|
||||
const { db, ObjectId } = mongodb
|
||||
|
||||
async function uploadContent(path, json) {
|
||||
const stream = new ReadableString(JSON.stringify(json))
|
||||
@@ -275,9 +278,7 @@ describe('Archiving', function () {
|
||||
this.project_id = new ObjectId()
|
||||
this.timeout(1000 * 30)
|
||||
const quarterMegInBytes = 250000
|
||||
const bigLine = require('node:crypto')
|
||||
.randomBytes(quarterMegInBytes)
|
||||
.toString('hex')
|
||||
const bigLine = Crypto.randomBytes(quarterMegInBytes).toString('hex')
|
||||
this.doc = {
|
||||
_id: new ObjectId(),
|
||||
lines: [bigLine, bigLine, bigLine, bigLine],
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
const { db, ObjectId } = require('../../../app/js/mongodb')
|
||||
const { expect } = require('chai')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { Storage } = require('@google-cloud/storage')
|
||||
const { promisify } = require('node:util')
|
||||
import mongodb from '../../../app/js/mongodb.js'
|
||||
import { expect } from 'chai'
|
||||
import DocstoreApp from './helpers/DocstoreApp.js'
|
||||
import Errors from '../../../app/js/Errors.js'
|
||||
import Settings from '@overleaf/settings'
|
||||
import { Storage } from '@google-cloud/storage'
|
||||
import { setTimeout as sleep } from 'node:timers/promises'
|
||||
|
||||
const sleep = promisify(setTimeout)
|
||||
import DocstoreClient from './helpers/DocstoreClient.js'
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { db, ObjectId } = mongodb
|
||||
|
||||
function deleteTestSuite(deleteDoc) {
|
||||
before(async function () {
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const async = require('async')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const { callbackify } = require('node:util')
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import async from 'async'
|
||||
import DocstoreApp from './helpers/DocstoreApp.js'
|
||||
import { callbackify } from 'node:util'
|
||||
import DocstoreClient from './helpers/DocstoreClient.js'
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
describe('Getting all docs', function () {
|
||||
beforeEach(function (done) {
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { Storage } = require('@google-cloud/storage')
|
||||
import Settings from '@overleaf/settings'
|
||||
import mongodb from '../../../app/js/mongodb.js'
|
||||
import DocstoreApp from './helpers/DocstoreApp.js'
|
||||
import DocstoreClient from './helpers/DocstoreClient.js'
|
||||
import { Storage } from '@google-cloud/storage'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
describe('Getting A Doc from Archive', function () {
|
||||
before(async function () {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const { expect } = require('chai')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import { expect } from 'chai'
|
||||
import DocstoreApp from './helpers/DocstoreApp.js'
|
||||
import DocstoreClient from './helpers/DocstoreClient.js'
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
describe('Getting a doc', function () {
|
||||
beforeEach(async function () {
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
const { db } = require('../../../app/js/mongodb')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { expect } = require('chai')
|
||||
import mongodb from '../../../app/js/mongodb.js'
|
||||
import DocstoreApp from './helpers/DocstoreApp.js'
|
||||
import DocstoreClient from './helpers/DocstoreClient.js'
|
||||
import { expect } from 'chai'
|
||||
|
||||
const { db } = mongodb
|
||||
|
||||
describe('HealthChecker', function () {
|
||||
beforeEach('start', async function () {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import DocstoreApp from './helpers/DocstoreApp.js'
|
||||
import DocstoreClient from './helpers/DocstoreClient.js'
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
describe('Applying updates to a doc', function () {
|
||||
beforeEach(async function () {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const app = require('../../../../app')
|
||||
const Settings = require('@overleaf/settings')
|
||||
require('./MongoHelper')
|
||||
import app from '../../../../app.js'
|
||||
import Settings from '@overleaf/settings'
|
||||
import './MongoHelper.js'
|
||||
|
||||
function startApp() {
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -27,6 +27,6 @@ async function ensureRunning() {
|
||||
await appStartedPromise
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
export default {
|
||||
ensureRunning,
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
let DocstoreClient
|
||||
const {
|
||||
import {
|
||||
fetchNothing,
|
||||
fetchJson,
|
||||
fetchJsonWithResponse,
|
||||
} = require('@overleaf/fetch-utils')
|
||||
const settings = require('@overleaf/settings')
|
||||
const Persistor = require('../../../../app/js/PersistorManager')
|
||||
} from '@overleaf/fetch-utils'
|
||||
import settings from '@overleaf/settings'
|
||||
import Persistor from '../../../../app/js/PersistorManager.js'
|
||||
|
||||
let DocstoreClient
|
||||
|
||||
async function streamToString(stream) {
|
||||
const chunks = []
|
||||
@@ -22,7 +23,7 @@ async function getStringFromPersistor(persistor, bucket, key) {
|
||||
return await streamToString(stream)
|
||||
}
|
||||
|
||||
module.exports = DocstoreClient = {
|
||||
export default DocstoreClient = {
|
||||
async createDoc(projectId, docId, lines, version, ranges) {
|
||||
return await DocstoreClient.updateDoc(
|
||||
projectId,
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
const chai = require('chai')
|
||||
const sinon = require('sinon')
|
||||
const sinonChai = require('sinon-chai')
|
||||
const chaiAsPromised = require('chai-as-promised')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const timersPromises = require('node:timers/promises')
|
||||
import sinonChai from 'sinon-chai'
|
||||
import chaiAsPromised from 'chai-as-promised'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import chai from 'chai'
|
||||
|
||||
// ensure every ObjectId has the id string as a property for correct comparisons
|
||||
require('mongodb-legacy').ObjectId.cacheHexString = true
|
||||
mongodb.ObjectId.cacheHexString = true
|
||||
|
||||
process.env.BACKEND = 'gcs'
|
||||
|
||||
@@ -14,42 +12,3 @@ process.env.BACKEND = 'gcs'
|
||||
chai.should()
|
||||
chai.use(sinonChai)
|
||||
chai.use(chaiAsPromised)
|
||||
|
||||
// Global stubs
|
||||
const sandbox = sinon.createSandbox()
|
||||
const stubs = {
|
||||
logger: {
|
||||
debug: sandbox.stub(),
|
||||
log: sandbox.stub(),
|
||||
info: sandbox.stub(),
|
||||
warn: sandbox.stub(),
|
||||
err: sandbox.stub(),
|
||||
error: sandbox.stub(),
|
||||
fatal: sandbox.stub(),
|
||||
},
|
||||
}
|
||||
|
||||
// SandboxedModule configuration
|
||||
SandboxedModule.configure({
|
||||
requires: {
|
||||
'@overleaf/logger': stubs.logger,
|
||||
'timers/promises': timersPromises,
|
||||
'mongodb-legacy': require('mongodb-legacy'),
|
||||
},
|
||||
globals: { Buffer, JSON, Math, console, process },
|
||||
sourceTransformers: {
|
||||
removeNodePrefix: function (source) {
|
||||
return source.replace(/require\(['"]node:/g, "require('")
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
exports.mochaHooks = {
|
||||
beforeEach() {
|
||||
this.logger = stubs.logger
|
||||
},
|
||||
|
||||
afterEach() {
|
||||
sandbox.reset()
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../app/js/DocArchiveManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
const StreamToBuffer = require('../../../app/js/StreamToBuffer')
|
||||
import sinon from 'sinon'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { ObjectId } from 'mongodb-legacy'
|
||||
import Errors from '../../../app/js/Errors.js'
|
||||
import * as StreamToBuffer from '../../../app/js/StreamToBuffer.js'
|
||||
|
||||
describe('DocArchiveManager', function () {
|
||||
const modulePath = '../../../app/js/DocArchiveManager.js'
|
||||
|
||||
describe('DocArchiveManager', () => {
|
||||
let DocArchiveManager,
|
||||
PersistorManager,
|
||||
MongoManager,
|
||||
@@ -26,7 +26,7 @@ describe('DocArchiveManager', function () {
|
||||
stream,
|
||||
streamToBuffer
|
||||
|
||||
beforeEach(function () {
|
||||
beforeEach(async () => {
|
||||
md5Sum = 'decafbad'
|
||||
|
||||
RangeManager = {
|
||||
@@ -173,32 +173,49 @@ describe('DocArchiveManager', function () {
|
||||
},
|
||||
}
|
||||
|
||||
DocArchiveManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': Settings,
|
||||
crypto: Crypto,
|
||||
'@overleaf/stream-utils': StreamUtils,
|
||||
'./MongoManager': MongoManager,
|
||||
'./RangeManager': RangeManager,
|
||||
'./PersistorManager': PersistorManager,
|
||||
'./Errors': Errors,
|
||||
'./StreamToBuffer': streamToBuffer,
|
||||
},
|
||||
})
|
||||
vi.doMock('@overleaf/settings', () => ({
|
||||
default: Settings,
|
||||
}))
|
||||
|
||||
vi.doMock('crypto', () => ({
|
||||
default: Crypto,
|
||||
}))
|
||||
|
||||
vi.doMock('@overleaf/stream-utils', () => StreamUtils)
|
||||
|
||||
vi.doMock('../../../app/js/MongoManager', () => ({
|
||||
default: MongoManager,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/RangeManager', () => ({
|
||||
default: RangeManager,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/PersistorManager', () => ({
|
||||
default: PersistorManager,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/Errors', () => ({
|
||||
default: Errors,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/StreamToBuffer', () => streamToBuffer)
|
||||
|
||||
DocArchiveManager = (await import(modulePath)).default
|
||||
})
|
||||
|
||||
describe('archiveDoc', function () {
|
||||
it('should resolve when passed a valid document', async function () {
|
||||
describe('archiveDoc', () => {
|
||||
it('should resolve when passed a valid document', async () => {
|
||||
await expect(DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)).to
|
||||
.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should fix comment ids', async function () {
|
||||
it('should fix comment ids', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id)
|
||||
expect(RangeManager.fixCommentIds).to.have.been.called
|
||||
})
|
||||
|
||||
it('should throw an error if the doc has no lines', async function () {
|
||||
it('should throw an error if the doc has no lines', async () => {
|
||||
const doc = mongoDocs[0]
|
||||
doc.lines = null
|
||||
|
||||
@@ -207,21 +224,21 @@ describe('DocArchiveManager', function () {
|
||||
).to.eventually.be.rejectedWith('doc has no lines')
|
||||
})
|
||||
|
||||
it('should add the schema version', async function () {
|
||||
it('should add the schema version', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[1]._id)
|
||||
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
||||
sinon.match(/"schema_v":1/)
|
||||
)
|
||||
})
|
||||
|
||||
it('should calculate the hex md5 sum of the content', async function () {
|
||||
it('should calculate the hex md5 sum of the content', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(Crypto.createHash).to.have.been.calledWith('md5')
|
||||
expect(HashUpdate).to.have.been.calledWith(archivedDocJson)
|
||||
expect(HashDigest).to.have.been.calledWith('hex')
|
||||
})
|
||||
|
||||
it('should pass the md5 hash to the object persistor for verification', async function () {
|
||||
it('should pass the md5 hash to the object persistor for verification', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
|
||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||
@@ -232,26 +249,26 @@ describe('DocArchiveManager', function () {
|
||||
)
|
||||
})
|
||||
|
||||
describe('with S3 persistor', function () {
|
||||
beforeEach(async function () {
|
||||
describe('with S3 persistor', () => {
|
||||
beforeEach(async () => {
|
||||
Settings.docstore.backend = 's3'
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
})
|
||||
|
||||
it('should not calculate the hex md5 sum of the content', function () {
|
||||
it('should not calculate the hex md5 sum of the content', () => {
|
||||
expect(Crypto.createHash).not.to.have.been.called
|
||||
expect(HashUpdate).not.to.have.been.called
|
||||
expect(HashDigest).not.to.have.been.called
|
||||
})
|
||||
|
||||
it('should not pass an md5 hash to the object persistor for verification', function () {
|
||||
it('should not pass an md5 hash to the object persistor for verification', () => {
|
||||
expect(PersistorManager.sendStream).not.to.have.been.calledWithMatch({
|
||||
sourceMd5: sinon.match.any,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should pass the correct bucket and key to the persistor', async function () {
|
||||
it('should pass the correct bucket and key to the persistor', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
|
||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||
@@ -260,7 +277,7 @@ describe('DocArchiveManager', function () {
|
||||
)
|
||||
})
|
||||
|
||||
it('should create a stream from the encoded json and send it', async function () {
|
||||
it('should create a stream from the encoded json and send it', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
||||
archivedDocJson
|
||||
@@ -272,7 +289,7 @@ describe('DocArchiveManager', function () {
|
||||
)
|
||||
})
|
||||
|
||||
it('should mark the doc as archived', async function () {
|
||||
it('should mark the doc as archived', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
|
||||
projectId,
|
||||
@@ -281,29 +298,29 @@ describe('DocArchiveManager', function () {
|
||||
)
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
describe('when archiving is not configured', () => {
|
||||
beforeEach(() => {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should bail out early', async function () {
|
||||
it('should bail out early', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.getDocForArchiving).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('with null bytes in the result', function () {
|
||||
describe('with null bytes in the result', () => {
|
||||
const _stringify = JSON.stringify
|
||||
|
||||
beforeEach(function () {
|
||||
beforeEach(() => {
|
||||
JSON.stringify = sinon.stub().returns('{"bad": "\u0000"}')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
afterEach(() => {
|
||||
JSON.stringify = _stringify
|
||||
})
|
||||
|
||||
it('should return an error', async function () {
|
||||
it('should return an error', async () => {
|
||||
await expect(
|
||||
DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
).to.eventually.be.rejectedWith('null bytes detected')
|
||||
@@ -311,37 +328,37 @@ describe('DocArchiveManager', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('unarchiveDoc', function () {
|
||||
describe('unarchiveDoc', () => {
|
||||
let docId, lines, rev
|
||||
|
||||
describe('when the doc is in S3', function () {
|
||||
beforeEach(function () {
|
||||
describe('when the doc is in S3', () => {
|
||||
beforeEach(() => {
|
||||
MongoManager.findDoc = sinon.stub().resolves({ inS3: true, rev })
|
||||
docId = mongoDocs[0]._id
|
||||
lines = ['doc', 'lines']
|
||||
rev = 123
|
||||
})
|
||||
|
||||
it('should resolve when passed a valid document', async function () {
|
||||
it('should resolve when passed a valid document', async () => {
|
||||
await expect(DocArchiveManager.unarchiveDoc(projectId, docId)).to
|
||||
.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should test md5 validity with the raw buffer', async function () {
|
||||
it('should test md5 validity with the raw buffer', async () => {
|
||||
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
expect(HashUpdate).to.have.been.calledWith(
|
||||
sinon.match.instanceOf(Buffer)
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error if the md5 does not match', async function () {
|
||||
it('should throw an error if the md5 does not match', async () => {
|
||||
PersistorManager.getObjectMd5Hash.resolves('badf00d')
|
||||
await expect(
|
||||
DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
|
||||
})
|
||||
|
||||
it('should restore the doc in Mongo', async function () {
|
||||
it('should restore the doc in Mongo', async () => {
|
||||
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||
projectId,
|
||||
@@ -350,12 +367,12 @@ describe('DocArchiveManager', function () {
|
||||
)
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
describe('when archiving is not configured', () => {
|
||||
beforeEach(() => {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should error out on archived doc', async function () {
|
||||
it('should error out on archived doc', async () => {
|
||||
await expect(
|
||||
DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejected.and.match(
|
||||
@@ -363,18 +380,18 @@ describe('DocArchiveManager', function () {
|
||||
)
|
||||
})
|
||||
|
||||
it('should return early on non-archived doc', async function () {
|
||||
it('should return early on non-archived doc', async () => {
|
||||
MongoManager.findDoc = sinon.stub().resolves({ rev })
|
||||
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('doc contents', function () {
|
||||
describe('doc contents', () => {
|
||||
let archivedDoc
|
||||
|
||||
describe('when the doc has the old schema', function () {
|
||||
beforeEach(function () {
|
||||
describe('when the doc has the old schema', () => {
|
||||
beforeEach(() => {
|
||||
archivedDoc = lines
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
@@ -382,7 +399,7 @@ describe('DocArchiveManager', function () {
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should return the docs lines', async function () {
|
||||
it('should return the docs lines', async () => {
|
||||
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||
projectId,
|
||||
@@ -392,8 +409,8 @@ describe('DocArchiveManager', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('with the new schema and ranges', function () {
|
||||
beforeEach(function () {
|
||||
describe('with the new schema and ranges', () => {
|
||||
beforeEach(() => {
|
||||
archivedDoc = {
|
||||
lines,
|
||||
ranges: { json: 'ranges' },
|
||||
@@ -406,7 +423,7 @@ describe('DocArchiveManager', function () {
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should return the doc lines and ranges', async function () {
|
||||
it('should return the doc lines and ranges', async () => {
|
||||
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||
projectId,
|
||||
@@ -420,8 +437,8 @@ describe('DocArchiveManager', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('with the new schema and no ranges', function () {
|
||||
beforeEach(function () {
|
||||
describe('with the new schema and no ranges', () => {
|
||||
beforeEach(() => {
|
||||
archivedDoc = { lines, rev: 456, schema_v: 1 }
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
@@ -429,7 +446,7 @@ describe('DocArchiveManager', function () {
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should return only the doc lines', async function () {
|
||||
it('should return only the doc lines', async () => {
|
||||
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||
projectId,
|
||||
@@ -439,8 +456,8 @@ describe('DocArchiveManager', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('with the new schema and no rev', function () {
|
||||
beforeEach(function () {
|
||||
describe('with the new schema and no rev', () => {
|
||||
beforeEach(() => {
|
||||
archivedDoc = { lines, schema_v: 1 }
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
@@ -448,7 +465,7 @@ describe('DocArchiveManager', function () {
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should use the rev obtained from Mongo', async function () {
|
||||
it('should use the rev obtained from Mongo', async () => {
|
||||
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
expect(MongoManager.restoreArchivedDoc).to.have.been.calledWith(
|
||||
projectId,
|
||||
@@ -458,8 +475,8 @@ describe('DocArchiveManager', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an unrecognised schema', function () {
|
||||
beforeEach(function () {
|
||||
describe('with an unrecognised schema', () => {
|
||||
beforeEach(() => {
|
||||
archivedDoc = { lines, schema_v: 2 }
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
@@ -467,7 +484,7 @@ describe('DocArchiveManager', function () {
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should throw an error', async function () {
|
||||
it('should throw an error', async () => {
|
||||
await expect(
|
||||
DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejectedWith(
|
||||
@@ -478,13 +495,13 @@ describe('DocArchiveManager', function () {
|
||||
})
|
||||
})
|
||||
|
||||
it('should not do anything if the file is already unarchived', async function () {
|
||||
it('should not do anything if the file is already unarchived', async () => {
|
||||
MongoManager.findDoc.resolves({ inS3: false })
|
||||
await DocArchiveManager.unarchiveDoc(projectId, docId)
|
||||
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
||||
})
|
||||
|
||||
it('should throw an error if the file is not found', async function () {
|
||||
it('should throw an error if the file is not found', async () => {
|
||||
PersistorManager.getObjectStream = sinon
|
||||
.stub()
|
||||
.rejects(new Errors.NotFoundError())
|
||||
@@ -494,17 +511,17 @@ describe('DocArchiveManager', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', function () {
|
||||
describe('when archiving is enabled', function () {
|
||||
beforeEach(async function () {
|
||||
describe('destroyProject', () => {
|
||||
describe('when archiving is enabled', () => {
|
||||
beforeEach(async () => {
|
||||
await DocArchiveManager.destroyProject(projectId)
|
||||
})
|
||||
|
||||
it('should delete the project in Mongo', function () {
|
||||
it('should delete the project in Mongo', () => {
|
||||
expect(MongoManager.destroyProject).to.have.been.calledWith(projectId)
|
||||
})
|
||||
|
||||
it('should delete the project in the persistor', function () {
|
||||
it('should delete the project in the persistor', () => {
|
||||
expect(PersistorManager.deleteDirectory).to.have.been.calledWith(
|
||||
Settings.docstore.bucket,
|
||||
projectId
|
||||
@@ -512,29 +529,29 @@ describe('DocArchiveManager', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('when archiving is disabled', function () {
|
||||
beforeEach(async function () {
|
||||
describe('when archiving is disabled', () => {
|
||||
beforeEach(async () => {
|
||||
Settings.docstore.backend = ''
|
||||
await DocArchiveManager.destroyProject(projectId)
|
||||
})
|
||||
|
||||
it('should delete the project in Mongo', function () {
|
||||
it('should delete the project in Mongo', () => {
|
||||
expect(MongoManager.destroyProject).to.have.been.calledWith(projectId)
|
||||
})
|
||||
|
||||
it('should not delete the project in the persistor', function () {
|
||||
it('should not delete the project in the persistor', () => {
|
||||
expect(PersistorManager.deleteDirectory).not.to.have.been.called
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('archiveAllDocs', function () {
|
||||
it('should resolve with valid arguments', async function () {
|
||||
describe('archiveAllDocs', () => {
|
||||
it('should resolve with valid arguments', async () => {
|
||||
await expect(DocArchiveManager.archiveAllDocs(projectId)).to.eventually.be
|
||||
.fulfilled
|
||||
})
|
||||
|
||||
it('should archive all project docs which are not in s3', async function () {
|
||||
it('should archive all project docs which are not in s3', async () => {
|
||||
await DocArchiveManager.archiveAllDocs(projectId)
|
||||
// not inS3
|
||||
expect(MongoManager.markDocAsArchived).to.have.been.calledWith(
|
||||
@@ -561,25 +578,25 @@ describe('DocArchiveManager', function () {
|
||||
)
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
describe('when archiving is not configured', () => {
|
||||
beforeEach(() => {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should bail out early', async function () {
|
||||
it('should bail out early', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.getNonArchivedProjectDocIds).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('unArchiveAllDocs', function () {
|
||||
it('should resolve with valid arguments', async function () {
|
||||
describe('unArchiveAllDocs', () => {
|
||||
it('should resolve with valid arguments', async () => {
|
||||
await expect(DocArchiveManager.unArchiveAllDocs(projectId)).to.eventually
|
||||
.be.fulfilled
|
||||
})
|
||||
|
||||
it('should unarchive all inS3 docs', async function () {
|
||||
it('should unarchive all inS3 docs', async () => {
|
||||
await DocArchiveManager.unArchiveAllDocs(projectId)
|
||||
|
||||
for (const doc of archivedDocs) {
|
||||
@@ -590,12 +607,12 @@ describe('DocArchiveManager', function () {
|
||||
}
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
describe('when archiving is not configured', () => {
|
||||
beforeEach(() => {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should bail out early', async function () {
|
||||
it('should bail out early', async () => {
|
||||
await DocArchiveManager.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.getNonDeletedArchivedProjectDocs).to.not.have.been
|
||||
.called
|
||||
762
services/docstore/test/unit/js/DocManager.test.js
Normal file
762
services/docstore/test/unit/js/DocManager.test.js
Normal file
@@ -0,0 +1,762 @@
|
||||
import sinon from 'sinon'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { ObjectId } from 'mongodb-legacy'
|
||||
import Errors from '../../../app/js/Errors.js'
|
||||
import path from 'node:path'
|
||||
|
||||
const modulePath = path.join(import.meta.dirname, '../../../app/js/DocManager')
|
||||
|
||||
describe('DocManager', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.doc_id = new ObjectId().toString()
|
||||
ctx.project_id = new ObjectId().toString()
|
||||
ctx.another_project_id = new ObjectId().toString()
|
||||
ctx.stubbedError = new Error('blew up')
|
||||
ctx.version = 42
|
||||
|
||||
ctx.MongoManager = {
|
||||
findDoc: sinon.stub(),
|
||||
getProjectsDocs: sinon.stub(),
|
||||
patchDoc: sinon.stub().resolves(),
|
||||
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||
}
|
||||
ctx.DocArchiveManager = {
|
||||
unarchiveDoc: sinon.stub(),
|
||||
unArchiveAllDocs: sinon.stub(),
|
||||
archiveDoc: sinon.stub().resolves(),
|
||||
}
|
||||
ctx.RangeManager = {
|
||||
jsonRangesToMongo(r) {
|
||||
return r
|
||||
},
|
||||
shouldUpdateRanges: sinon.stub().returns(false),
|
||||
fixCommentIds: sinon.stub(),
|
||||
}
|
||||
ctx.settings = { docstore: {} }
|
||||
|
||||
vi.doMock('../../../app/js/MongoManager', () => ({
|
||||
default: ctx.MongoManager,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/DocArchiveManager', () => ({
|
||||
default: ctx.DocArchiveManager,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/RangeManager', () => ({
|
||||
default: ctx.RangeManager,
|
||||
}))
|
||||
|
||||
vi.doMock('@overleaf/settings', () => ({
|
||||
default: ctx.settings,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/Errors', () => ({
|
||||
default: Errors,
|
||||
}))
|
||||
|
||||
ctx.DocManager = (await import(modulePath)).default
|
||||
})
|
||||
|
||||
describe('getFullDoc', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub()
|
||||
ctx.doc = {
|
||||
_id: ctx.doc_id,
|
||||
lines: ['2134'],
|
||||
}
|
||||
})
|
||||
|
||||
it('should call get doc with a quick filter', async ctx => {
|
||||
ctx.DocManager._getDoc.resolves(ctx.doc)
|
||||
const doc = await ctx.DocManager.getFullDoc(ctx.project_id, ctx.doc_id)
|
||||
doc.should.equal(ctx.doc)
|
||||
ctx.DocManager._getDoc
|
||||
.calledWith(ctx.project_id, ctx.doc_id, {
|
||||
lines: true,
|
||||
rev: true,
|
||||
deleted: true,
|
||||
version: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return error when get doc errors', async ctx => {
|
||||
ctx.DocManager._getDoc.rejects(ctx.stubbedError)
|
||||
await expect(
|
||||
ctx.DocManager.getFullDoc(ctx.project_id, ctx.doc_id)
|
||||
).to.be.rejectedWith(ctx.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRawDoc', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub()
|
||||
ctx.doc = { lines: ['2134'] }
|
||||
})
|
||||
|
||||
it('should call get doc with a quick filter', async ctx => {
|
||||
ctx.DocManager._getDoc.resolves(ctx.doc)
|
||||
const content = await ctx.DocManager.getDocLines(
|
||||
ctx.project_id,
|
||||
ctx.doc_id
|
||||
)
|
||||
content.should.equal(ctx.doc.lines.join('\n'))
|
||||
ctx.DocManager._getDoc
|
||||
.calledWith(ctx.project_id, ctx.doc_id, {
|
||||
lines: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return error when get doc errors', async ctx => {
|
||||
ctx.DocManager._getDoc.rejects(ctx.stubbedError)
|
||||
await expect(
|
||||
ctx.DocManager.getDocLines(ctx.project_id, ctx.doc_id)
|
||||
).to.be.rejectedWith(ctx.stubbedError)
|
||||
})
|
||||
|
||||
it('should return error when get doc does not exist', async ctx => {
|
||||
ctx.DocManager._getDoc.resolves(null)
|
||||
await expect(
|
||||
ctx.DocManager.getDocLines(ctx.project_id, ctx.doc_id)
|
||||
).to.be.rejectedWith(Errors.NotFoundError)
|
||||
})
|
||||
|
||||
it('should return error when get doc has no lines', async ctx => {
|
||||
ctx.DocManager._getDoc.resolves({})
|
||||
await expect(
|
||||
ctx.DocManager.getDocLines(ctx.project_id, ctx.doc_id)
|
||||
).to.be.rejectedWith(Errors.DocWithoutLinesError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('_getDoc', () => {
|
||||
it('should return error when get doc does not exist', async ctx => {
|
||||
ctx.MongoManager.findDoc.resolves(null)
|
||||
await expect(
|
||||
ctx.DocManager._getDoc(ctx.project_id, ctx.doc_id, { inS3: true })
|
||||
).to.be.rejectedWith(Errors.NotFoundError)
|
||||
})
|
||||
|
||||
it('should fix comment ids', async ctx => {
|
||||
ctx.MongoManager.findDoc.resolves({
|
||||
_id: ctx.doc_id,
|
||||
ranges: {},
|
||||
})
|
||||
await ctx.DocManager._getDoc(ctx.project_id, ctx.doc_id, {
|
||||
inS3: true,
|
||||
ranges: true,
|
||||
})
|
||||
expect(ctx.RangeManager.fixCommentIds).to.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('getDoc', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.project = { name: 'mock-project' }
|
||||
ctx.doc = {
|
||||
_id: ctx.doc_id,
|
||||
project_id: ctx.project_id,
|
||||
lines: ['mock-lines'],
|
||||
version: ctx.version,
|
||||
}
|
||||
})
|
||||
|
||||
describe('when using a filter', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.MongoManager.findDoc.resolves(ctx.doc)
|
||||
})
|
||||
|
||||
it('should error if inS3 is not set to true', async ctx => {
|
||||
await expect(
|
||||
ctx.DocManager._getDoc(ctx.project_id, ctx.doc_id, {
|
||||
inS3: false,
|
||||
})
|
||||
).to.be.rejected
|
||||
})
|
||||
|
||||
it('should always get inS3 even when no filter is passed', async ctx => {
|
||||
await expect(ctx.DocManager._getDoc(ctx.project_id, ctx.doc_id)).to.be
|
||||
.rejected
|
||||
ctx.MongoManager.findDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should not error if inS3 is set to true', async ctx => {
|
||||
await ctx.DocManager._getDoc(ctx.project_id, ctx.doc_id, {
|
||||
inS3: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is in the doc collection', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.MongoManager.findDoc.resolves(ctx.doc)
|
||||
ctx.result = await ctx.DocManager._getDoc(ctx.project_id, ctx.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should get the doc from the doc collection', ctx => {
|
||||
ctx.MongoManager.findDoc
|
||||
.calledWith(ctx.project_id, ctx.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc with the version', ctx => {
|
||||
ctx.result.lines.should.equal(ctx.doc.lines)
|
||||
ctx.result.version.should.equal(ctx.version)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when MongoManager.findDoc errors', () => {
|
||||
it('should return the error', async ctx => {
|
||||
ctx.MongoManager.findDoc.rejects(ctx.stubbedError)
|
||||
await expect(
|
||||
ctx.DocManager._getDoc(ctx.project_id, ctx.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
).to.be.rejectedWith(ctx.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is archived', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.doc = {
|
||||
_id: ctx.doc_id,
|
||||
project_id: ctx.project_id,
|
||||
version: 2,
|
||||
inS3: true,
|
||||
}
|
||||
ctx.unarchivedDoc = {
|
||||
_id: ctx.doc_id,
|
||||
project_id: ctx.project_id,
|
||||
lines: ['mock-lines'],
|
||||
version: 2,
|
||||
inS3: false,
|
||||
}
|
||||
ctx.MongoManager.findDoc.resolves(ctx.doc)
|
||||
ctx.DocArchiveManager.unarchiveDoc.callsFake(
|
||||
async (projectId, docId) => {
|
||||
ctx.MongoManager.findDoc.resolves({
|
||||
...ctx.unarchivedDoc,
|
||||
})
|
||||
}
|
||||
)
|
||||
ctx.result = await ctx.DocManager._getDoc(ctx.project_id, ctx.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should call the DocArchive to unarchive the doc', ctx => {
|
||||
ctx.DocArchiveManager.unarchiveDoc
|
||||
.calledWith(ctx.project_id, ctx.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should look up the doc twice', ctx => {
|
||||
ctx.MongoManager.findDoc.calledTwice.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc', ctx => {
|
||||
expect(ctx.result).to.deep.equal({
|
||||
...ctx.unarchivedDoc,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist in the docs collection', () => {
|
||||
it('should return a NotFoundError', async ctx => {
|
||||
ctx.MongoManager.findDoc.resolves(null)
|
||||
await expect(
|
||||
ctx.DocManager._getDoc(ctx.project_id, ctx.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
).to.be.rejectedWith(
|
||||
`No such doc: ${ctx.doc_id} in project ${ctx.project_id}`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllNonDeletedDocs', () => {
|
||||
describe('when the project exists', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.docs = [
|
||||
{
|
||||
_id: ctx.doc_id,
|
||||
project_id: ctx.project_id,
|
||||
lines: ['mock-lines'],
|
||||
},
|
||||
]
|
||||
ctx.MongoManager.getProjectsDocs.resolves(ctx.docs)
|
||||
ctx.DocArchiveManager.unArchiveAllDocs.resolves(ctx.docs)
|
||||
ctx.filter = { lines: true, ranges: true }
|
||||
ctx.result = await ctx.DocManager.getAllNonDeletedDocs(
|
||||
ctx.project_id,
|
||||
ctx.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the project from the database', ctx => {
|
||||
ctx.MongoManager.getProjectsDocs.should.have.been.calledWith(
|
||||
ctx.project_id,
|
||||
{ include_deleted: false },
|
||||
ctx.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should fix comment ids', async ctx => {
|
||||
expect(ctx.RangeManager.fixCommentIds).to.have.been.called
|
||||
})
|
||||
|
||||
it('should return the docs', ctx => {
|
||||
expect(ctx.result).to.deep.equal(ctx.docs)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there are no docs for the project', () => {
|
||||
it('should return a NotFoundError', async ctx => {
|
||||
ctx.MongoManager.getProjectsDocs.resolves(null)
|
||||
ctx.DocArchiveManager.unArchiveAllDocs.resolves(null)
|
||||
await expect(
|
||||
ctx.DocManager.getAllNonDeletedDocs(ctx.project_id, ctx.filter)
|
||||
).to.be.rejectedWith(`No docs for project ${ctx.project_id}`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', () => {
|
||||
describe('when the doc exists', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.lines = ['mock', 'doc', 'lines']
|
||||
ctx.rev = 77
|
||||
ctx.MongoManager.findDoc.resolves({
|
||||
_id: new ObjectId(ctx.doc_id),
|
||||
})
|
||||
ctx.meta = {}
|
||||
})
|
||||
|
||||
describe('standard path', () => {
|
||||
beforeEach(async ctx => {
|
||||
await ctx.DocManager.patchDoc(ctx.project_id, ctx.doc_id, ctx.meta)
|
||||
})
|
||||
|
||||
it('should get the doc', ctx => {
|
||||
expect(ctx.MongoManager.findDoc).to.have.been.calledWith(
|
||||
ctx.project_id,
|
||||
ctx.doc_id
|
||||
)
|
||||
})
|
||||
|
||||
it('should persist the meta', ctx => {
|
||||
expect(ctx.MongoManager.patchDoc).to.have.been.calledWith(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.meta
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush disabled and deleting a doc', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.settings.docstore.archiveOnSoftDelete = false
|
||||
ctx.meta.deleted = true
|
||||
|
||||
await ctx.DocManager.patchDoc(ctx.project_id, ctx.doc_id, ctx.meta)
|
||||
})
|
||||
|
||||
it('should not flush the doc out of mongo', ctx => {
|
||||
expect(ctx.DocArchiveManager.archiveDoc).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush enabled and not deleting a doc', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.settings.docstore.archiveOnSoftDelete = false
|
||||
ctx.meta.deleted = false
|
||||
await ctx.DocManager.patchDoc(ctx.project_id, ctx.doc_id, ctx.meta)
|
||||
})
|
||||
|
||||
it('should not flush the doc out of mongo', ctx => {
|
||||
expect(ctx.DocArchiveManager.archiveDoc).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush enabled and deleting a doc', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.settings.docstore.archiveOnSoftDelete = true
|
||||
ctx.meta.deleted = true
|
||||
})
|
||||
|
||||
describe('when the background flush succeeds', () => {
|
||||
beforeEach(async ctx => {
|
||||
await ctx.DocManager.patchDoc(ctx.project_id, ctx.doc_id, ctx.meta)
|
||||
})
|
||||
|
||||
it('should not log a warning', ctx => {
|
||||
expect(ctx.logger.warn).to.not.have.been.called
|
||||
})
|
||||
|
||||
it('should flush the doc out of mongo', ctx => {
|
||||
expect(ctx.DocArchiveManager.archiveDoc).to.have.been.calledWith(
|
||||
ctx.project_id,
|
||||
ctx.doc_id
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the background flush fails', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.err = new Error('foo')
|
||||
ctx.DocArchiveManager.archiveDoc.rejects(ctx.err)
|
||||
await ctx.DocManager.patchDoc(ctx.project_id, ctx.doc_id, ctx.meta)
|
||||
})
|
||||
|
||||
it('should log a warning', ctx => {
|
||||
expect(ctx.logger.warn).to.have.been.calledWith(
|
||||
sinon.match({
|
||||
projectId: ctx.project_id,
|
||||
docId: ctx.doc_id,
|
||||
err: ctx.err,
|
||||
}),
|
||||
'archiving a single doc in the background failed'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', () => {
|
||||
it('should return a NotFoundError', async ctx => {
|
||||
ctx.MongoManager.findDoc.resolves(null)
|
||||
await expect(
|
||||
ctx.DocManager.patchDoc(ctx.project_id, ctx.doc_id, {})
|
||||
).to.be.rejectedWith(
|
||||
`No such project/doc to delete: ${ctx.project_id}/${ctx.doc_id}`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateDoc', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.oldDocLines = ['old', 'doc', 'lines']
|
||||
ctx.newDocLines = ['new', 'doc', 'lines']
|
||||
ctx.originalRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
ctx.newRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'bar', p: 6 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
ctx.version = 42
|
||||
ctx.doc = {
|
||||
_id: ctx.doc_id,
|
||||
project_id: ctx.project_id,
|
||||
lines: ctx.oldDocLines,
|
||||
rev: (ctx.rev = 5),
|
||||
version: ctx.version,
|
||||
ranges: ctx.originalRanges,
|
||||
}
|
||||
|
||||
ctx.DocManager._getDoc = sinon.stub()
|
||||
})
|
||||
|
||||
describe('when only the doc lines have changed', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub().resolves(ctx.doc)
|
||||
ctx.result = await ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.newDocLines,
|
||||
ctx.version,
|
||||
ctx.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the existing doc', ctx => {
|
||||
ctx.DocManager._getDoc
|
||||
.calledWith(ctx.project_id, ctx.doc_id, {
|
||||
version: true,
|
||||
rev: true,
|
||||
lines: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should upsert the document to the doc collection', ctx => {
|
||||
ctx.MongoManager.upsertIntoDocCollection
|
||||
.calledWith(ctx.project_id, ctx.doc_id, ctx.rev, {
|
||||
lines: ctx.newDocLines,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the new rev', ctx => {
|
||||
expect(ctx.result).to.deep.equal({ modified: true, rev: ctx.rev + 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc ranges have changed', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub().resolves(ctx.doc)
|
||||
ctx.RangeManager.shouldUpdateRanges.returns(true)
|
||||
ctx.result = await ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.oldDocLines,
|
||||
ctx.version,
|
||||
ctx.newRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the ranges', ctx => {
|
||||
ctx.MongoManager.upsertIntoDocCollection
|
||||
.calledWith(ctx.project_id, ctx.doc_id, ctx.rev, {
|
||||
ranges: ctx.newRanges,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the new rev', ctx => {
|
||||
expect(ctx.result).to.deep.equal({ modified: true, rev: ctx.rev + 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when only the version has changed', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub().resolves(ctx.doc)
|
||||
ctx.result = await ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.oldDocLines,
|
||||
ctx.version + 1,
|
||||
ctx.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should update the version', ctx => {
|
||||
ctx.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.rev,
|
||||
{ version: ctx.version + 1 }
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the old rev', ctx => {
|
||||
expect(ctx.result).to.deep.equal({ modified: true, rev: ctx.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc has not changed at all', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub().resolves(ctx.doc)
|
||||
ctx.result = await ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.oldDocLines,
|
||||
ctx.version,
|
||||
ctx.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should not update the ranges or lines or version', ctx => {
|
||||
ctx.MongoManager.upsertIntoDocCollection.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return the old rev and modified == false', ctx => {
|
||||
expect(ctx.result).to.deep.equal({ modified: false, rev: ctx.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version is null', () => {
|
||||
it('should return an error', async ctx => {
|
||||
await expect(
|
||||
ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.newDocLines,
|
||||
null,
|
||||
ctx.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the lines are null', () => {
|
||||
it('should return an error', async ctx => {
|
||||
await expect(
|
||||
ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
null,
|
||||
ctx.version,
|
||||
ctx.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the ranges are null', () => {
|
||||
it('should return an error', async ctx => {
|
||||
await expect(
|
||||
ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.newDocLines,
|
||||
ctx.version,
|
||||
null
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a generic error getting the doc', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.error = new Error('doc could not be found')
|
||||
ctx.DocManager._getDoc = sinon.stub().rejects(ctx.error)
|
||||
await expect(
|
||||
ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.newDocLines,
|
||||
ctx.version,
|
||||
ctx.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith(ctx.error)
|
||||
})
|
||||
|
||||
it('should not upsert the document to the doc collection', ctx => {
|
||||
ctx.MongoManager.upsertIntoDocCollection.should.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version was decremented', () => {
|
||||
it('should return an error', async ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub().resolves(ctx.doc)
|
||||
await expect(
|
||||
ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.newDocLines,
|
||||
ctx.version - 1,
|
||||
ctx.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocVersionDecrementedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines have not changed', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub().resolves(ctx.doc)
|
||||
ctx.result = await ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.oldDocLines.slice(),
|
||||
ctx.version,
|
||||
ctx.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should not update the doc', ctx => {
|
||||
ctx.MongoManager.upsertIntoDocCollection.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return the existing rev', ctx => {
|
||||
expect(ctx.result).to.deep.equal({ modified: false, rev: ctx.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub().resolves(null)
|
||||
ctx.result = await ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.newDocLines,
|
||||
ctx.version,
|
||||
ctx.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the document to the doc collection', ctx => {
|
||||
ctx.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
undefined,
|
||||
{
|
||||
lines: ctx.newDocLines,
|
||||
ranges: ctx.originalRanges,
|
||||
version: ctx.version,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the new rev', ctx => {
|
||||
expect(ctx.result).to.deep.equal({ modified: true, rev: 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when another update is racing', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.DocManager._getDoc = sinon.stub().resolves(ctx.doc)
|
||||
ctx.MongoManager.upsertIntoDocCollection
|
||||
.onFirstCall()
|
||||
.rejects(new Errors.DocRevValueError())
|
||||
ctx.RangeManager.shouldUpdateRanges.returns(true)
|
||||
ctx.result = await ctx.DocManager.updateDoc(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.newDocLines,
|
||||
ctx.version + 1,
|
||||
ctx.newRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the doc twice', ctx => {
|
||||
ctx.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
ctx.project_id,
|
||||
ctx.doc_id,
|
||||
ctx.rev,
|
||||
{
|
||||
ranges: ctx.newRanges,
|
||||
lines: ctx.newDocLines,
|
||||
version: ctx.version + 1,
|
||||
}
|
||||
)
|
||||
ctx.MongoManager.upsertIntoDocCollection.should.have.been.calledTwice
|
||||
})
|
||||
|
||||
it('should return the new rev', ctx => {
|
||||
expect(ctx.result).to.deep.equal({ modified: true, rev: ctx.rev + 1 })
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,777 +0,0 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/DocManager'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
describe('DocManager', function () {
|
||||
beforeEach(function () {
|
||||
this.doc_id = new ObjectId().toString()
|
||||
this.project_id = new ObjectId().toString()
|
||||
this.another_project_id = new ObjectId().toString()
|
||||
this.stubbedError = new Error('blew up')
|
||||
this.version = 42
|
||||
|
||||
this.MongoManager = {
|
||||
findDoc: sinon.stub(),
|
||||
getProjectsDocs: sinon.stub(),
|
||||
patchDoc: sinon.stub().resolves(),
|
||||
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||
}
|
||||
this.DocArchiveManager = {
|
||||
unarchiveDoc: sinon.stub(),
|
||||
unArchiveAllDocs: sinon.stub(),
|
||||
archiveDoc: sinon.stub().resolves(),
|
||||
}
|
||||
this.RangeManager = {
|
||||
jsonRangesToMongo(r) {
|
||||
return r
|
||||
},
|
||||
shouldUpdateRanges: sinon.stub().returns(false),
|
||||
fixCommentIds: sinon.stub(),
|
||||
}
|
||||
this.settings = { docstore: {} }
|
||||
|
||||
this.DocManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./MongoManager': this.MongoManager,
|
||||
'./DocArchiveManager': this.DocArchiveManager,
|
||||
'./RangeManager': this.RangeManager,
|
||||
'@overleaf/settings': this.settings,
|
||||
'./Errors': Errors,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFullDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.DocManager._getDoc = sinon.stub()
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
lines: ['2134'],
|
||||
}
|
||||
})
|
||||
|
||||
it('should call get doc with a quick filter', async function () {
|
||||
this.DocManager._getDoc.resolves(this.doc)
|
||||
const doc = await this.DocManager.getFullDoc(this.project_id, this.doc_id)
|
||||
doc.should.equal(this.doc)
|
||||
this.DocManager._getDoc
|
||||
.calledWith(this.project_id, this.doc_id, {
|
||||
lines: true,
|
||||
rev: true,
|
||||
deleted: true,
|
||||
version: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return error when get doc errors', async function () {
|
||||
this.DocManager._getDoc.rejects(this.stubbedError)
|
||||
await expect(
|
||||
this.DocManager.getFullDoc(this.project_id, this.doc_id)
|
||||
).to.be.rejectedWith(this.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRawDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.DocManager._getDoc = sinon.stub()
|
||||
this.doc = { lines: ['2134'] }
|
||||
})
|
||||
|
||||
it('should call get doc with a quick filter', async function () {
|
||||
this.DocManager._getDoc.resolves(this.doc)
|
||||
const content = await this.DocManager.getDocLines(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)
|
||||
content.should.equal(this.doc.lines.join('\n'))
|
||||
this.DocManager._getDoc
|
||||
.calledWith(this.project_id, this.doc_id, {
|
||||
lines: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return error when get doc errors', async function () {
|
||||
this.DocManager._getDoc.rejects(this.stubbedError)
|
||||
await expect(
|
||||
this.DocManager.getDocLines(this.project_id, this.doc_id)
|
||||
).to.be.rejectedWith(this.stubbedError)
|
||||
})
|
||||
|
||||
it('should return error when get doc does not exist', async function () {
|
||||
this.DocManager._getDoc.resolves(null)
|
||||
await expect(
|
||||
this.DocManager.getDocLines(this.project_id, this.doc_id)
|
||||
).to.be.rejectedWith(Errors.NotFoundError)
|
||||
})
|
||||
|
||||
it('should return error when get doc has no lines', async function () {
|
||||
this.DocManager._getDoc.resolves({})
|
||||
await expect(
|
||||
this.DocManager.getDocLines(this.project_id, this.doc_id)
|
||||
).to.be.rejectedWith(Errors.DocWithoutLinesError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('_getDoc', function () {
|
||||
it('should return error when get doc does not exist', async function () {
|
||||
this.MongoManager.findDoc.resolves(null)
|
||||
await expect(
|
||||
this.DocManager._getDoc(this.project_id, this.doc_id, { inS3: true })
|
||||
).to.be.rejectedWith(Errors.NotFoundError)
|
||||
})
|
||||
|
||||
it('should fix comment ids', async function () {
|
||||
this.MongoManager.findDoc.resolves({
|
||||
_id: this.doc_id,
|
||||
ranges: {},
|
||||
})
|
||||
await this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||
inS3: true,
|
||||
ranges: true,
|
||||
})
|
||||
expect(this.RangeManager.fixCommentIds).to.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('getDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.project = { name: 'mock-project' }
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: ['mock-lines'],
|
||||
version: this.version,
|
||||
}
|
||||
})
|
||||
|
||||
describe('when using a filter', function () {
|
||||
beforeEach(function () {
|
||||
this.MongoManager.findDoc.resolves(this.doc)
|
||||
})
|
||||
|
||||
it('should error if inS3 is not set to true', async function () {
|
||||
await expect(
|
||||
this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||
inS3: false,
|
||||
})
|
||||
).to.be.rejected
|
||||
})
|
||||
|
||||
it('should always get inS3 even when no filter is passed', async function () {
|
||||
await expect(this.DocManager._getDoc(this.project_id, this.doc_id)).to
|
||||
.be.rejected
|
||||
this.MongoManager.findDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should not error if inS3 is set to true', async function () {
|
||||
await this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||
inS3: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is in the doc collection', function () {
|
||||
beforeEach(async function () {
|
||||
this.MongoManager.findDoc.resolves(this.doc)
|
||||
this.result = await this.DocManager._getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{ version: true, inS3: true }
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc collection', function () {
|
||||
this.MongoManager.findDoc
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc with the version', function () {
|
||||
this.result.lines.should.equal(this.doc.lines)
|
||||
this.result.version.should.equal(this.version)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when MongoManager.findDoc errors', function () {
|
||||
it('should return the error', async function () {
|
||||
this.MongoManager.findDoc.rejects(this.stubbedError)
|
||||
await expect(
|
||||
this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
).to.be.rejectedWith(this.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is archived', function () {
|
||||
beforeEach(async function () {
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
version: 2,
|
||||
inS3: true,
|
||||
}
|
||||
this.unarchivedDoc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: ['mock-lines'],
|
||||
version: 2,
|
||||
inS3: false,
|
||||
}
|
||||
this.MongoManager.findDoc.resolves(this.doc)
|
||||
this.DocArchiveManager.unarchiveDoc.callsFake(
|
||||
async (projectId, docId) => {
|
||||
this.MongoManager.findDoc.resolves({
|
||||
...this.unarchivedDoc,
|
||||
})
|
||||
}
|
||||
)
|
||||
this.result = await this.DocManager._getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{
|
||||
version: true,
|
||||
inS3: true,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should call the DocArchive to unarchive the doc', function () {
|
||||
this.DocArchiveManager.unarchiveDoc
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should look up the doc twice', function () {
|
||||
this.MongoManager.findDoc.calledTwice.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc', function () {
|
||||
expect(this.result).to.deep.equal({
|
||||
...this.unarchivedDoc,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist in the docs collection', function () {
|
||||
it('should return a NotFoundError', async function () {
|
||||
this.MongoManager.findDoc.resolves(null)
|
||||
await expect(
|
||||
this.DocManager._getDoc(this.project_id, this.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
).to.be.rejectedWith(
|
||||
`No such doc: ${this.doc_id} in project ${this.project_id}`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllNonDeletedDocs', function () {
|
||||
describe('when the project exists', function () {
|
||||
beforeEach(async function () {
|
||||
this.docs = [
|
||||
{
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: ['mock-lines'],
|
||||
},
|
||||
]
|
||||
this.MongoManager.getProjectsDocs.resolves(this.docs)
|
||||
this.DocArchiveManager.unArchiveAllDocs.resolves(this.docs)
|
||||
this.filter = { lines: true, ranges: true }
|
||||
this.result = await this.DocManager.getAllNonDeletedDocs(
|
||||
this.project_id,
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the project from the database', function () {
|
||||
this.MongoManager.getProjectsDocs.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
{ include_deleted: false },
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should fix comment ids', async function () {
|
||||
expect(this.RangeManager.fixCommentIds).to.have.been.called
|
||||
})
|
||||
|
||||
it('should return the docs', function () {
|
||||
expect(this.result).to.deep.equal(this.docs)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there are no docs for the project', function () {
|
||||
it('should return a NotFoundError', async function () {
|
||||
this.MongoManager.getProjectsDocs.resolves(null)
|
||||
this.DocArchiveManager.unArchiveAllDocs.resolves(null)
|
||||
await expect(
|
||||
this.DocManager.getAllNonDeletedDocs(this.project_id, this.filter)
|
||||
).to.be.rejectedWith(`No docs for project ${this.project_id}`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', function () {
|
||||
describe('when the doc exists', function () {
|
||||
beforeEach(function () {
|
||||
this.lines = ['mock', 'doc', 'lines']
|
||||
this.rev = 77
|
||||
this.MongoManager.findDoc.resolves({
|
||||
_id: new ObjectId(this.doc_id),
|
||||
})
|
||||
this.meta = {}
|
||||
})
|
||||
|
||||
describe('standard path', function () {
|
||||
beforeEach(async function () {
|
||||
await this.DocManager.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc', function () {
|
||||
expect(this.MongoManager.findDoc).to.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)
|
||||
})
|
||||
|
||||
it('should persist the meta', function () {
|
||||
expect(this.MongoManager.patchDoc).to.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush disabled and deleting a doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.settings.docstore.archiveOnSoftDelete = false
|
||||
this.meta.deleted = true
|
||||
|
||||
await this.DocManager.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should not flush the doc out of mongo', function () {
|
||||
expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush enabled and not deleting a doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.settings.docstore.archiveOnSoftDelete = false
|
||||
this.meta.deleted = false
|
||||
await this.DocManager.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should not flush the doc out of mongo', function () {
|
||||
expect(this.DocArchiveManager.archiveDoc).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush enabled and deleting a doc', function () {
|
||||
beforeEach(function () {
|
||||
this.settings.docstore.archiveOnSoftDelete = true
|
||||
this.meta.deleted = true
|
||||
})
|
||||
|
||||
describe('when the background flush succeeds', function () {
|
||||
beforeEach(async function () {
|
||||
await this.DocManager.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should not log a warning', function () {
|
||||
expect(this.logger.warn).to.not.have.been.called
|
||||
})
|
||||
|
||||
it('should flush the doc out of mongo', function () {
|
||||
expect(this.DocArchiveManager.archiveDoc).to.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the background flush fails', function () {
|
||||
beforeEach(async function () {
|
||||
this.err = new Error('foo')
|
||||
this.DocArchiveManager.archiveDoc.rejects(this.err)
|
||||
await this.DocManager.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should log a warning', function () {
|
||||
expect(this.logger.warn).to.have.been.calledWith(
|
||||
sinon.match({
|
||||
projectId: this.project_id,
|
||||
docId: this.doc_id,
|
||||
err: this.err,
|
||||
}),
|
||||
'archiving a single doc in the background failed'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
it('should return a NotFoundError', async function () {
|
||||
this.MongoManager.findDoc.resolves(null)
|
||||
await expect(
|
||||
this.DocManager.patchDoc(this.project_id, this.doc_id, {})
|
||||
).to.be.rejectedWith(
|
||||
`No such project/doc to delete: ${this.project_id}/${this.doc_id}`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.oldDocLines = ['old', 'doc', 'lines']
|
||||
this.newDocLines = ['new', 'doc', 'lines']
|
||||
this.originalRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.newRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'bar', p: 6 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.version = 42
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: this.oldDocLines,
|
||||
rev: (this.rev = 5),
|
||||
version: this.version,
|
||||
ranges: this.originalRanges,
|
||||
}
|
||||
|
||||
this.DocManager._getDoc = sinon.stub()
|
||||
})
|
||||
|
||||
describe('when only the doc lines have changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the existing doc', function () {
|
||||
this.DocManager._getDoc
|
||||
.calledWith(this.project_id, this.doc_id, {
|
||||
version: true,
|
||||
rev: true,
|
||||
lines: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should upsert the document to the doc collection', function () {
|
||||
this.MongoManager.upsertIntoDocCollection
|
||||
.calledWith(this.project_id, this.doc_id, this.rev, {
|
||||
lines: this.newDocLines,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc ranges have changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.RangeManager.shouldUpdateRanges.returns(true)
|
||||
this.result = await this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines,
|
||||
this.version,
|
||||
this.newRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the ranges', function () {
|
||||
this.MongoManager.upsertIntoDocCollection
|
||||
.calledWith(this.project_id, this.doc_id, this.rev, {
|
||||
ranges: this.newRanges,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when only the version has changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines,
|
||||
this.version + 1,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should update the version', function () {
|
||||
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.rev,
|
||||
{ version: this.version + 1 }
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the old rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc has not changed at all', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should not update the ranges or lines or version', function () {
|
||||
this.MongoManager.upsertIntoDocCollection.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return the old rev and modified == false', function () {
|
||||
expect(this.result).to.deep.equal({ modified: false, rev: this.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version is null', function () {
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
null,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the lines are null', function () {
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
null,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the ranges are null', function () {
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
null
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a generic error getting the doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.error = new Error('doc could not be found')
|
||||
this.DocManager._getDoc = sinon.stub().rejects(this.error)
|
||||
await expect(
|
||||
this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith(this.error)
|
||||
})
|
||||
|
||||
it('should not upsert the document to the doc collection', function () {
|
||||
this.MongoManager.upsertIntoDocCollection.should.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version was decremented', function () {
|
||||
it('should return an error', async function () {
|
||||
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||
await expect(
|
||||
this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version - 1,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocVersionDecrementedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines have not changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines.slice(),
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should not update the doc', function () {
|
||||
this.MongoManager.upsertIntoDocCollection.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return the existing rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: false, rev: this.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager._getDoc = sinon.stub().resolves(null)
|
||||
this.result = await this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the document to the doc collection', function () {
|
||||
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
undefined,
|
||||
{
|
||||
lines: this.newDocLines,
|
||||
ranges: this.originalRanges,
|
||||
version: this.version,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when another update is racing', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.MongoManager.upsertIntoDocCollection
|
||||
.onFirstCall()
|
||||
.rejects(new Errors.DocRevValueError())
|
||||
this.RangeManager.shouldUpdateRanges.returns(true)
|
||||
this.result = await this.DocManager.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version + 1,
|
||||
this.newRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the doc twice', function () {
|
||||
this.MongoManager.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.rev,
|
||||
{
|
||||
ranges: this.newRanges,
|
||||
lines: this.newDocLines,
|
||||
version: this.version + 1,
|
||||
}
|
||||
)
|
||||
this.MongoManager.upsertIntoDocCollection.should.have.been.calledTwice
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
595
services/docstore/test/unit/js/HttpController.test.js
Normal file
595
services/docstore/test/unit/js/HttpController.test.js
Normal file
@@ -0,0 +1,595 @@
|
||||
import sinon from 'sinon'
|
||||
import { assert, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import path from 'node:path'
|
||||
import { ObjectId } from 'mongodb-legacy'
|
||||
import Errors from '../../../app/js/Errors.js'
|
||||
|
||||
const modulePath = path.join(
|
||||
import.meta.dirname,
|
||||
'../../../app/js/HttpController'
|
||||
)
|
||||
|
||||
describe('HttpController', () => {
|
||||
beforeEach(async ctx => {
|
||||
const settings = {
|
||||
max_doc_length: 2 * 1024 * 1024,
|
||||
}
|
||||
ctx.DocArchiveManager = {
|
||||
unArchiveAllDocs: sinon.stub().returns(),
|
||||
}
|
||||
ctx.DocManager = {}
|
||||
|
||||
vi.doMock('../../../app/js/DocManager', () => ({
|
||||
default: ctx.DocManager,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/DocArchiveManager', () => ({
|
||||
default: ctx.DocArchiveManager,
|
||||
}))
|
||||
|
||||
vi.doMock('@overleaf/settings', () => ({
|
||||
default: settings,
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/HealthChecker', () => ({
|
||||
default: {},
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/Errors', () => ({
|
||||
default: Errors,
|
||||
}))
|
||||
|
||||
ctx.HttpController = (await import(modulePath)).default
|
||||
ctx.res = {
|
||||
send: sinon.stub(),
|
||||
sendStatus: sinon.stub(),
|
||||
json: sinon.stub(),
|
||||
setHeader: sinon.stub(),
|
||||
}
|
||||
ctx.res.status = sinon.stub().returns(ctx.res)
|
||||
ctx.req = { query: {} }
|
||||
ctx.next = sinon.stub()
|
||||
ctx.projectId = 'mock-project-id'
|
||||
ctx.docId = 'mock-doc-id'
|
||||
ctx.doc = {
|
||||
_id: ctx.docId,
|
||||
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||
version: 42,
|
||||
rev: 5,
|
||||
}
|
||||
ctx.deletedDoc = {
|
||||
deleted: true,
|
||||
_id: ctx.docId,
|
||||
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||
version: 42,
|
||||
rev: 5,
|
||||
}
|
||||
})
|
||||
|
||||
describe('getDoc', () => {
|
||||
describe('without deleted docs', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.params = {
|
||||
project_id: ctx.projectId,
|
||||
doc_id: ctx.docId,
|
||||
}
|
||||
ctx.DocManager.getFullDoc = sinon.stub().resolves(ctx.doc)
|
||||
await ctx.HttpController.getDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should get the document with the version (including deleted)', ctx => {
|
||||
ctx.DocManager.getFullDoc
|
||||
.calledWith(ctx.projectId, ctx.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', ctx => {
|
||||
ctx.res.json
|
||||
.calledWith({
|
||||
_id: ctx.docId,
|
||||
lines: ctx.doc.lines,
|
||||
rev: ctx.doc.rev,
|
||||
version: ctx.doc.version,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('which is deleted', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.req.params = {
|
||||
project_id: ctx.projectId,
|
||||
doc_id: ctx.docId,
|
||||
}
|
||||
ctx.DocManager.getFullDoc = sinon.stub().resolves(ctx.deletedDoc)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc manager', async ctx => {
|
||||
await ctx.HttpController.getDoc(ctx.req, ctx.res, ctx.next)
|
||||
ctx.DocManager.getFullDoc
|
||||
.calledWith(ctx.projectId, ctx.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return 404 if the query string delete is not set ', async ctx => {
|
||||
await ctx.HttpController.getDoc(ctx.req, ctx.res, ctx.next)
|
||||
ctx.res.sendStatus.calledWith(404).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON if include_deleted is set to true', async ctx => {
|
||||
ctx.req.query.include_deleted = 'true'
|
||||
await ctx.HttpController.getDoc(ctx.req, ctx.res, ctx.next)
|
||||
ctx.res.json
|
||||
.calledWith({
|
||||
_id: ctx.docId,
|
||||
lines: ctx.doc.lines,
|
||||
rev: ctx.doc.rev,
|
||||
deleted: true,
|
||||
version: ctx.doc.version,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRawDoc', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.params = {
|
||||
project_id: ctx.projectId,
|
||||
doc_id: ctx.docId,
|
||||
}
|
||||
ctx.DocManager.getDocLines = sinon
|
||||
.stub()
|
||||
.resolves(ctx.doc.lines.join('\n'))
|
||||
await ctx.HttpController.getRawDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should get the document without the version', ctx => {
|
||||
ctx.DocManager.getDocLines
|
||||
.calledWith(ctx.projectId, ctx.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should set the content type header', ctx => {
|
||||
ctx.res.setHeader
|
||||
.calledWith('content-type', 'text/plain')
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should send the raw version of the doc', ctx => {
|
||||
assert.deepEqual(
|
||||
ctx.res.send.args[0][0],
|
||||
`${ctx.doc.lines[0]}\n${ctx.doc.lines[1]}\n${ctx.doc.lines[2]}\n${ctx.doc.lines[3]}\n${ctx.doc.lines[4]}\n${ctx.doc.lines[5]}`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllDocs', () => {
|
||||
describe('normally', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.params = { project_id: ctx.projectId }
|
||||
ctx.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'one'],
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
ctx.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(ctx.docs)
|
||||
await ctx.HttpController.getAllDocs(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should get all the (non-deleted) docs', ctx => {
|
||||
ctx.DocManager.getAllNonDeletedDocs
|
||||
.calledWith(ctx.projectId, { lines: true, rev: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', ctx => {
|
||||
ctx.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: ctx.docs[0]._id.toString(),
|
||||
lines: ctx.docs[0].lines,
|
||||
rev: ctx.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: ctx.docs[1]._id.toString(),
|
||||
lines: ctx.docs[1].lines,
|
||||
rev: ctx.docs[1].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with null lines', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.params = { project_id: ctx.projectId }
|
||||
ctx.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: null,
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
ctx.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(ctx.docs)
|
||||
await ctx.HttpController.getAllDocs(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should return the doc with fallback lines', ctx => {
|
||||
ctx.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: ctx.docs[0]._id.toString(),
|
||||
lines: [],
|
||||
rev: ctx.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: ctx.docs[1]._id.toString(),
|
||||
lines: ctx.docs[1].lines,
|
||||
rev: ctx.docs[1].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a null doc', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.params = { project_id: ctx.projectId }
|
||||
ctx.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'one'],
|
||||
rev: 2,
|
||||
},
|
||||
null,
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
ctx.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(ctx.docs)
|
||||
await ctx.HttpController.getAllDocs(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should return the non null docs as JSON', ctx => {
|
||||
ctx.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: ctx.docs[0]._id.toString(),
|
||||
lines: ctx.docs[0].lines,
|
||||
rev: ctx.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: ctx.docs[2]._id.toString(),
|
||||
lines: ctx.docs[2].lines,
|
||||
rev: ctx.docs[2].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should log out an error', ctx => {
|
||||
ctx.logger.error
|
||||
.calledWith(
|
||||
{
|
||||
err: sinon.match.has('message', 'null doc'),
|
||||
projectId: ctx.projectId,
|
||||
},
|
||||
'encountered null doc'
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllRanges', () => {
|
||||
describe('normally', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.params = { project_id: ctx.projectId }
|
||||
ctx.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
ranges: { mock_ranges: 'one' },
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
ranges: { mock_ranges: 'two' },
|
||||
},
|
||||
]
|
||||
ctx.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(ctx.docs)
|
||||
await ctx.HttpController.getAllRanges(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should get all the (non-deleted) doc ranges', ctx => {
|
||||
ctx.DocManager.getAllNonDeletedDocs
|
||||
.calledWith(ctx.projectId, { ranges: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', ctx => {
|
||||
ctx.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: ctx.docs[0]._id.toString(),
|
||||
ranges: ctx.docs[0].ranges,
|
||||
},
|
||||
{
|
||||
_id: ctx.docs[1]._id.toString(),
|
||||
ranges: ctx.docs[1].ranges,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateDoc', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.req.params = {
|
||||
project_id: ctx.projectId,
|
||||
doc_id: ctx.docId,
|
||||
}
|
||||
})
|
||||
|
||||
describe('when the doc lines exist and were updated', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.body = {
|
||||
lines: (ctx.lines = ['hello', 'world']),
|
||||
version: (ctx.version = 42),
|
||||
ranges: (ctx.ranges = { changes: 'mock' }),
|
||||
}
|
||||
ctx.rev = 5
|
||||
ctx.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: true, rev: ctx.rev })
|
||||
await ctx.HttpController.updateDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should update the document', ctx => {
|
||||
ctx.DocManager.updateDoc
|
||||
.calledWith(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
ctx.lines,
|
||||
ctx.version,
|
||||
ctx.ranges
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return a modified status', ctx => {
|
||||
ctx.res.json
|
||||
.calledWith({ modified: true, rev: ctx.rev })
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines exist and were not updated', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.body = {
|
||||
lines: (ctx.lines = ['hello', 'world']),
|
||||
version: (ctx.version = 42),
|
||||
ranges: {},
|
||||
}
|
||||
ctx.rev = 5
|
||||
ctx.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: ctx.rev })
|
||||
await ctx.HttpController.updateDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should return a modified status', ctx => {
|
||||
ctx.res.json
|
||||
.calledWith({ modified: false, rev: ctx.rev })
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines are not provided', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.body = { version: 42, ranges: {} }
|
||||
ctx.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: 0 })
|
||||
await ctx.HttpController.updateDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should not update the document', ctx => {
|
||||
ctx.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', ctx => {
|
||||
ctx.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc version are not provided', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.body = { version: 42, lines: ['hello world'] }
|
||||
ctx.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: 0 })
|
||||
await ctx.HttpController.updateDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should not update the document', ctx => {
|
||||
ctx.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', ctx => {
|
||||
ctx.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc ranges is not provided', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.body = { lines: ['foo'], version: 42 }
|
||||
ctx.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: 0 })
|
||||
await ctx.HttpController.updateDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should not update the document', ctx => {
|
||||
ctx.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', ctx => {
|
||||
ctx.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc body is too large', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.body = {
|
||||
lines: (ctx.lines = Array(2049).fill('a'.repeat(1024))),
|
||||
version: (ctx.version = 42),
|
||||
ranges: (ctx.ranges = { changes: 'mock' }),
|
||||
}
|
||||
ctx.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: 0 })
|
||||
await ctx.HttpController.updateDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should not update the document', ctx => {
|
||||
ctx.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 413 (too large) response', ctx => {
|
||||
sinon.assert.calledWith(ctx.res.status, 413)
|
||||
})
|
||||
|
||||
it('should report that the document body is too large', ctx => {
|
||||
sinon.assert.calledWith(ctx.res.send, 'document body too large')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.params = {
|
||||
project_id: ctx.projectId,
|
||||
doc_id: ctx.docId,
|
||||
}
|
||||
ctx.req.body = { name: 'foo.tex' }
|
||||
ctx.DocManager.patchDoc = sinon.stub().resolves()
|
||||
await ctx.HttpController.patchDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should delete the document', ctx => {
|
||||
expect(ctx.DocManager.patchDoc).to.have.been.calledWith(
|
||||
ctx.projectId,
|
||||
ctx.docId
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a 204 (No Content)', ctx => {
|
||||
expect(ctx.res.sendStatus).to.have.been.calledWith(204)
|
||||
})
|
||||
|
||||
describe('with an invalid payload', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.body = { cannot: 'happen' }
|
||||
|
||||
ctx.DocManager.patchDoc = sinon.stub().resolves()
|
||||
await ctx.HttpController.patchDoc(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should log a message', ctx => {
|
||||
expect(ctx.logger.fatal).to.have.been.calledWith(
|
||||
{ field: 'cannot' },
|
||||
'joi validation for pathDoc is broken'
|
||||
)
|
||||
})
|
||||
|
||||
it('should not pass the invalid field along', ctx => {
|
||||
expect(ctx.DocManager.patchDoc).to.have.been.calledWith(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
{}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('archiveAllDocs', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.params = { project_id: ctx.projectId }
|
||||
ctx.DocArchiveManager.archiveAllDocs = sinon.stub().resolves()
|
||||
await ctx.HttpController.archiveAllDocs(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should archive the project', ctx => {
|
||||
ctx.DocArchiveManager.archiveAllDocs
|
||||
.calledWith(ctx.projectId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return a 204 (No Content)', ctx => {
|
||||
ctx.res.sendStatus.calledWith(204).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('unArchiveAllDocs', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.req.params = { project_id: ctx.projectId }
|
||||
})
|
||||
|
||||
describe('on success', () => {
|
||||
beforeEach(async ctx => {
|
||||
await ctx.HttpController.unArchiveAllDocs(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('returns a 200', ctx => {
|
||||
expect(ctx.res.sendStatus).to.have.been.calledWith(200)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the archived rev doesn't match", () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.DocArchiveManager.unArchiveAllDocs.rejects(
|
||||
new Errors.DocRevValueError('bad rev')
|
||||
)
|
||||
await ctx.HttpController.unArchiveAllDocs(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('returns a 409', ctx => {
|
||||
expect(ctx.res.sendStatus).to.have.been.calledWith(409)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.req.params = { project_id: ctx.projectId }
|
||||
ctx.DocArchiveManager.destroyProject = sinon.stub().resolves()
|
||||
await ctx.HttpController.destroyProject(ctx.req, ctx.res, ctx.next)
|
||||
})
|
||||
|
||||
it('should destroy the docs', ctx => {
|
||||
sinon.assert.calledWith(
|
||||
ctx.DocArchiveManager.destroyProject,
|
||||
ctx.projectId
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 204', ctx => {
|
||||
sinon.assert.calledWith(ctx.res.sendStatus, 204)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,589 +0,0 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { assert, expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/HttpController'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
describe('HttpController', function () {
|
||||
beforeEach(function () {
|
||||
const settings = {
|
||||
max_doc_length: 2 * 1024 * 1024,
|
||||
}
|
||||
this.DocArchiveManager = {
|
||||
unArchiveAllDocs: sinon.stub().returns(),
|
||||
}
|
||||
this.DocManager = {}
|
||||
this.HttpController = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./DocManager': this.DocManager,
|
||||
'./DocArchiveManager': this.DocArchiveManager,
|
||||
'@overleaf/settings': settings,
|
||||
'./HealthChecker': {},
|
||||
'./Errors': Errors,
|
||||
},
|
||||
})
|
||||
this.res = {
|
||||
send: sinon.stub(),
|
||||
sendStatus: sinon.stub(),
|
||||
json: sinon.stub(),
|
||||
setHeader: sinon.stub(),
|
||||
}
|
||||
this.res.status = sinon.stub().returns(this.res)
|
||||
this.req = { query: {} }
|
||||
this.next = sinon.stub()
|
||||
this.projectId = 'mock-project-id'
|
||||
this.docId = 'mock-doc-id'
|
||||
this.doc = {
|
||||
_id: this.docId,
|
||||
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||
version: 42,
|
||||
rev: 5,
|
||||
}
|
||||
this.deletedDoc = {
|
||||
deleted: true,
|
||||
_id: this.docId,
|
||||
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||
version: 42,
|
||||
rev: 5,
|
||||
}
|
||||
})
|
||||
|
||||
describe('getDoc', function () {
|
||||
describe('without deleted docs', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.DocManager.getFullDoc = sinon.stub().resolves(this.doc)
|
||||
await this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get the document with the version (including deleted)', function () {
|
||||
this.DocManager.getFullDoc
|
||||
.calledWith(this.projectId, this.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith({
|
||||
_id: this.docId,
|
||||
lines: this.doc.lines,
|
||||
rev: this.doc.rev,
|
||||
version: this.doc.version,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('which is deleted', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.DocManager.getFullDoc = sinon.stub().resolves(this.deletedDoc)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc manager', async function () {
|
||||
await this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
this.DocManager.getFullDoc
|
||||
.calledWith(this.projectId, this.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return 404 if the query string delete is not set ', async function () {
|
||||
await this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
this.res.sendStatus.calledWith(404).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON if include_deleted is set to true', async function () {
|
||||
this.req.query.include_deleted = 'true'
|
||||
await this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
this.res.json
|
||||
.calledWith({
|
||||
_id: this.docId,
|
||||
lines: this.doc.lines,
|
||||
rev: this.doc.rev,
|
||||
deleted: true,
|
||||
version: this.doc.version,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRawDoc', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.DocManager.getDocLines = sinon
|
||||
.stub()
|
||||
.resolves(this.doc.lines.join('\n'))
|
||||
await this.HttpController.getRawDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get the document without the version', function () {
|
||||
this.DocManager.getDocLines
|
||||
.calledWith(this.projectId, this.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should set the content type header', function () {
|
||||
this.res.setHeader
|
||||
.calledWith('content-type', 'text/plain')
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should send the raw version of the doc', function () {
|
||||
assert.deepEqual(
|
||||
this.res.send.args[0][0],
|
||||
`${this.doc.lines[0]}\n${this.doc.lines[1]}\n${this.doc.lines[2]}\n${this.doc.lines[3]}\n${this.doc.lines[4]}\n${this.doc.lines[5]}`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllDocs', function () {
|
||||
describe('normally', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'one'],
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
|
||||
await this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get all the (non-deleted) docs', function () {
|
||||
this.DocManager.getAllNonDeletedDocs
|
||||
.calledWith(this.projectId, { lines: true, rev: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
lines: this.docs[0].lines,
|
||||
rev: this.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: this.docs[1]._id.toString(),
|
||||
lines: this.docs[1].lines,
|
||||
rev: this.docs[1].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with null lines', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: null,
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
|
||||
await this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return the doc with fallback lines', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
lines: [],
|
||||
rev: this.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: this.docs[1]._id.toString(),
|
||||
lines: this.docs[1].lines,
|
||||
rev: this.docs[1].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a null doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'one'],
|
||||
rev: 2,
|
||||
},
|
||||
null,
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
|
||||
await this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return the non null docs as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
lines: this.docs[0].lines,
|
||||
rev: this.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: this.docs[2]._id.toString(),
|
||||
lines: this.docs[2].lines,
|
||||
rev: this.docs[2].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should log out an error', function () {
|
||||
this.logger.error
|
||||
.calledWith(
|
||||
{
|
||||
err: sinon.match.has('message', 'null doc'),
|
||||
projectId: this.projectId,
|
||||
},
|
||||
'encountered null doc'
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllRanges', function () {
|
||||
describe('normally', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
ranges: { mock_ranges: 'one' },
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
ranges: { mock_ranges: 'two' },
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon.stub().resolves(this.docs)
|
||||
await this.HttpController.getAllRanges(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get all the (non-deleted) doc ranges', function () {
|
||||
this.DocManager.getAllNonDeletedDocs
|
||||
.calledWith(this.projectId, { ranges: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
ranges: this.docs[0].ranges,
|
||||
},
|
||||
{
|
||||
_id: this.docs[1]._id.toString(),
|
||||
ranges: this.docs[1].ranges,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
})
|
||||
|
||||
describe('when the doc lines exist and were updated', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.body = {
|
||||
lines: (this.lines = ['hello', 'world']),
|
||||
version: (this.version = 42),
|
||||
ranges: (this.ranges = { changes: 'mock' }),
|
||||
}
|
||||
this.rev = 5
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: true, rev: this.rev })
|
||||
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should update the document', function () {
|
||||
this.DocManager.updateDoc
|
||||
.calledWith(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return a modified status', function () {
|
||||
this.res.json
|
||||
.calledWith({ modified: true, rev: this.rev })
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines exist and were not updated', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.body = {
|
||||
lines: (this.lines = ['hello', 'world']),
|
||||
version: (this.version = 42),
|
||||
ranges: {},
|
||||
}
|
||||
this.rev = 5
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: this.rev })
|
||||
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return a modified status', function () {
|
||||
this.res.json
|
||||
.calledWith({ modified: false, rev: this.rev })
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines are not provided', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.body = { version: 42, ranges: {} }
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: 0 })
|
||||
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', function () {
|
||||
this.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc version are not provided', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.body = { version: 42, lines: ['hello world'] }
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: 0 })
|
||||
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', function () {
|
||||
this.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc ranges is not provided', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.body = { lines: ['foo'], version: 42 }
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: 0 })
|
||||
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', function () {
|
||||
this.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc body is too large', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.body = {
|
||||
lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
|
||||
version: (this.version = 42),
|
||||
ranges: (this.ranges = { changes: 'mock' }),
|
||||
}
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.resolves({ modified: false, rev: 0 })
|
||||
await this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 413 (too large) response', function () {
|
||||
sinon.assert.calledWith(this.res.status, 413)
|
||||
})
|
||||
|
||||
it('should report that the document body is too large', function () {
|
||||
sinon.assert.calledWith(this.res.send, 'document body too large')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.req.body = { name: 'foo.tex' }
|
||||
this.DocManager.patchDoc = sinon.stub().resolves()
|
||||
await this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should delete the document', function () {
|
||||
expect(this.DocManager.patchDoc).to.have.been.calledWith(
|
||||
this.projectId,
|
||||
this.docId
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a 204 (No Content)', function () {
|
||||
expect(this.res.sendStatus).to.have.been.calledWith(204)
|
||||
})
|
||||
|
||||
describe('with an invalid payload', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.body = { cannot: 'happen' }
|
||||
|
||||
this.DocManager.patchDoc = sinon.stub().resolves()
|
||||
await this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should log a message', function () {
|
||||
expect(this.logger.fatal).to.have.been.calledWith(
|
||||
{ field: 'cannot' },
|
||||
'joi validation for pathDoc is broken'
|
||||
)
|
||||
})
|
||||
|
||||
it('should not pass the invalid field along', function () {
|
||||
expect(this.DocManager.patchDoc).to.have.been.calledWith(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
{}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('archiveAllDocs', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.DocArchiveManager.archiveAllDocs = sinon.stub().resolves()
|
||||
await this.HttpController.archiveAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should archive the project', function () {
|
||||
this.DocArchiveManager.archiveAllDocs
|
||||
.calledWith(this.projectId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return a 204 (No Content)', function () {
|
||||
this.res.sendStatus.calledWith(204).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('unArchiveAllDocs', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
})
|
||||
|
||||
describe('on success', function () {
|
||||
beforeEach(async function () {
|
||||
await this.HttpController.unArchiveAllDocs(
|
||||
this.req,
|
||||
this.res,
|
||||
this.next
|
||||
)
|
||||
})
|
||||
|
||||
it('returns a 200', function () {
|
||||
expect(this.res.sendStatus).to.have.been.calledWith(200)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the archived rev doesn't match", function () {
|
||||
beforeEach(async function () {
|
||||
this.DocArchiveManager.unArchiveAllDocs.rejects(
|
||||
new Errors.DocRevValueError('bad rev')
|
||||
)
|
||||
await this.HttpController.unArchiveAllDocs(
|
||||
this.req,
|
||||
this.res,
|
||||
this.next
|
||||
)
|
||||
})
|
||||
|
||||
it('returns a 409', function () {
|
||||
expect(this.res.sendStatus).to.have.been.calledWith(409)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', function () {
|
||||
beforeEach(async function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.DocArchiveManager.destroyProject = sinon.stub().resolves()
|
||||
await this.HttpController.destroyProject(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should destroy the docs', function () {
|
||||
sinon.assert.calledWith(
|
||||
this.DocArchiveManager.destroyProject,
|
||||
this.projectId
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 204', function () {
|
||||
sinon.assert.calledWith(this.res.sendStatus, 204)
|
||||
})
|
||||
})
|
||||
})
|
||||
411
services/docstore/test/unit/js/MongoManager.test.js
Normal file
411
services/docstore/test/unit/js/MongoManager.test.js
Normal file
@@ -0,0 +1,411 @@
|
||||
import sinon from 'sinon'
|
||||
import { ObjectId } from 'mongodb-legacy'
|
||||
import path from 'node:path'
|
||||
import { assert, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import Errors from '../../../app/js/Errors.js'
|
||||
|
||||
const modulePath = path.join(
|
||||
import.meta.dirname,
|
||||
'../../../app/js/MongoManager'
|
||||
)
|
||||
|
||||
describe('MongoManager', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.db = {
|
||||
docs: {
|
||||
updateOne: sinon.stub().resolves({ matchedCount: 1 }),
|
||||
insertOne: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
|
||||
vi.doMock('../../../app/js/mongodb', () => ({
|
||||
default: {
|
||||
db: ctx.db,
|
||||
ObjectId,
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@overleaf/settings', () => ({
|
||||
default: {
|
||||
max_deleted_docs: 42,
|
||||
docstore: { archivingLockDurationMs: 5000 },
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('../../../app/js/Errors', () => ({
|
||||
default: Errors,
|
||||
}))
|
||||
|
||||
ctx.MongoManager = (await import(modulePath)).default
|
||||
ctx.projectId = new ObjectId().toString()
|
||||
ctx.docId = new ObjectId().toString()
|
||||
ctx.rev = 42
|
||||
ctx.stubbedErr = new Error('hello world')
|
||||
ctx.lines = ['Three French hens', 'Two turtle doves']
|
||||
})
|
||||
|
||||
describe('findDoc', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.doc = { name: 'mock-doc' }
|
||||
ctx.db.docs.findOne = sinon.stub().resolves(ctx.doc)
|
||||
ctx.filter = { lines: true }
|
||||
ctx.result = await ctx.MongoManager.findDoc(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
ctx.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the doc', ctx => {
|
||||
ctx.db.docs.findOne
|
||||
.calledWith(
|
||||
{
|
||||
_id: new ObjectId(ctx.docId),
|
||||
project_id: new ObjectId(ctx.projectId),
|
||||
},
|
||||
{
|
||||
projection: ctx.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc', ctx => {
|
||||
expect(ctx.doc).to.deep.equal(ctx.doc)
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.meta = { name: 'foo.tex' }
|
||||
await ctx.MongoManager.patchDoc(ctx.projectId, ctx.docId, ctx.meta)
|
||||
})
|
||||
|
||||
it('should pass the parameter along', ctx => {
|
||||
ctx.db.docs.updateOne.should.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(ctx.docId),
|
||||
project_id: new ObjectId(ctx.projectId),
|
||||
},
|
||||
{
|
||||
$set: ctx.meta,
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProjectsDocs', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.filter = { lines: true }
|
||||
ctx.doc1 = { name: 'mock-doc1' }
|
||||
ctx.doc2 = { name: 'mock-doc2' }
|
||||
ctx.doc3 = { name: 'mock-doc3' }
|
||||
ctx.doc4 = { name: 'mock-doc4' }
|
||||
ctx.db.docs.find = sinon.stub().returns({
|
||||
toArray: sinon.stub().resolves([ctx.doc, ctx.doc3, ctx.doc4]),
|
||||
})
|
||||
})
|
||||
|
||||
describe('with included_deleted = false', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.result = await ctx.MongoManager.getProjectsDocs(
|
||||
ctx.projectId,
|
||||
{ include_deleted: false },
|
||||
ctx.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the non-deleted docs via the project_id', ctx => {
|
||||
ctx.db.docs.find
|
||||
.calledWith(
|
||||
{
|
||||
project_id: new ObjectId(ctx.projectId),
|
||||
deleted: { $ne: true },
|
||||
},
|
||||
{
|
||||
projection: ctx.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should call return the docs', ctx => {
|
||||
expect(ctx.result).to.deep.equal([ctx.doc, ctx.doc3, ctx.doc4])
|
||||
})
|
||||
})
|
||||
|
||||
describe('with included_deleted = true', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.result = await ctx.MongoManager.getProjectsDocs(
|
||||
ctx.projectId,
|
||||
{ include_deleted: true },
|
||||
ctx.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find all via the project_id', ctx => {
|
||||
ctx.db.docs.find
|
||||
.calledWith(
|
||||
{
|
||||
project_id: new ObjectId(ctx.projectId),
|
||||
},
|
||||
{
|
||||
projection: ctx.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the docs', ctx => {
|
||||
expect(ctx.result).to.deep.equal([ctx.doc, ctx.doc3, ctx.doc4])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProjectsDeletedDocs', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.filter = { name: true }
|
||||
ctx.doc1 = { _id: '1', name: 'mock-doc1.tex' }
|
||||
ctx.doc2 = { _id: '2', name: 'mock-doc2.tex' }
|
||||
ctx.doc3 = { _id: '3', name: 'mock-doc3.tex' }
|
||||
ctx.db.docs.find = sinon.stub().returns({
|
||||
toArray: sinon.stub().resolves([ctx.doc1, ctx.doc2, ctx.doc3]),
|
||||
})
|
||||
ctx.result = await ctx.MongoManager.getProjectsDeletedDocs(
|
||||
ctx.projectId,
|
||||
ctx.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the deleted docs via the project_id', ctx => {
|
||||
ctx.db.docs.find
|
||||
.calledWith({
|
||||
project_id: new ObjectId(ctx.projectId),
|
||||
deleted: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should filter, sort by deletedAt and limit', ctx => {
|
||||
ctx.db.docs.find
|
||||
.calledWith(sinon.match.any, {
|
||||
projection: ctx.filter,
|
||||
sort: { deletedAt: -1 },
|
||||
limit: 42,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the docs', ctx => {
|
||||
expect(ctx.result).to.deep.equal([ctx.doc1, ctx.doc2, ctx.doc3])
|
||||
})
|
||||
})
|
||||
|
||||
describe('upsertIntoDocCollection', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.oldRev = 77
|
||||
})
|
||||
|
||||
it('should upsert the document', async ctx => {
|
||||
await ctx.MongoManager.upsertIntoDocCollection(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
ctx.oldRev,
|
||||
{ lines: ctx.lines }
|
||||
)
|
||||
|
||||
const args = ctx.db.docs.updateOne.args[0]
|
||||
assert.deepEqual(args[0], {
|
||||
_id: new ObjectId(ctx.docId),
|
||||
project_id: new ObjectId(ctx.projectId),
|
||||
rev: ctx.oldRev,
|
||||
})
|
||||
assert.equal(args[1].$set.lines, ctx.lines)
|
||||
assert.equal(args[1].$inc.rev, 1)
|
||||
})
|
||||
|
||||
it('should handle update error', async ctx => {
|
||||
ctx.db.docs.updateOne.rejects(ctx.stubbedErr)
|
||||
await expect(
|
||||
ctx.MongoManager.upsertIntoDocCollection(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
ctx.rev,
|
||||
{
|
||||
lines: ctx.lines,
|
||||
}
|
||||
)
|
||||
).to.be.rejectedWith(ctx.stubbedErr)
|
||||
})
|
||||
|
||||
it('should insert without a previous rev', async ctx => {
|
||||
await ctx.MongoManager.upsertIntoDocCollection(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
null,
|
||||
{ lines: ctx.lines, ranges: ctx.ranges }
|
||||
)
|
||||
|
||||
expect(ctx.db.docs.insertOne).to.have.been.calledWith({
|
||||
_id: new ObjectId(ctx.docId),
|
||||
project_id: new ObjectId(ctx.projectId),
|
||||
rev: 1,
|
||||
lines: ctx.lines,
|
||||
ranges: ctx.ranges,
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle generic insert error', async ctx => {
|
||||
ctx.db.docs.insertOne.rejects(ctx.stubbedErr)
|
||||
await expect(
|
||||
ctx.MongoManager.upsertIntoDocCollection(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
null,
|
||||
{ lines: ctx.lines, ranges: ctx.ranges }
|
||||
)
|
||||
).to.be.rejectedWith(ctx.stubbedErr)
|
||||
})
|
||||
|
||||
it('should handle duplicate insert error', async ctx => {
|
||||
ctx.db.docs.insertOne.rejects({ code: 11000 })
|
||||
await expect(
|
||||
ctx.MongoManager.upsertIntoDocCollection(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
null,
|
||||
{ lines: ctx.lines, ranges: ctx.ranges }
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', () => {
|
||||
beforeEach(async ctx => {
|
||||
ctx.projectId = new ObjectId()
|
||||
ctx.db.docs.deleteMany = sinon.stub().resolves()
|
||||
await ctx.MongoManager.destroyProject(ctx.projectId)
|
||||
})
|
||||
|
||||
it('should destroy all docs', ctx => {
|
||||
sinon.assert.calledWith(ctx.db.docs.deleteMany, {
|
||||
project_id: ctx.projectId,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkRevUnchanged', ctx => {
|
||||
beforeEach(ctx => {
|
||||
ctx.doc = { _id: new ObjectId(), name: 'mock-doc', rev: 1 }
|
||||
})
|
||||
|
||||
it('should not error when the rev has not changed', async ctx => {
|
||||
ctx.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
|
||||
await ctx.MongoManager.checkRevUnchanged(ctx.doc)
|
||||
})
|
||||
|
||||
it('should return an error when the rev has changed', async ctx => {
|
||||
ctx.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||
await expect(
|
||||
ctx.MongoManager.checkRevUnchanged(ctx.doc)
|
||||
).to.be.rejectedWith(Errors.DocModifiedError)
|
||||
})
|
||||
|
||||
it('should return a value error if incoming rev is NaN', async ctx => {
|
||||
ctx.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||
ctx.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
|
||||
await expect(
|
||||
ctx.MongoManager.checkRevUnchanged(ctx.doc)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
|
||||
it('should return a value error if checked doc rev is NaN', async ctx => {
|
||||
ctx.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
|
||||
await expect(
|
||||
ctx.MongoManager.checkRevUnchanged(ctx.doc)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('restoreArchivedDoc', () => {
|
||||
beforeEach(ctx => {
|
||||
ctx.archivedDoc = {
|
||||
lines: ['a', 'b', 'c'],
|
||||
ranges: { some: 'ranges' },
|
||||
rev: 2,
|
||||
}
|
||||
})
|
||||
|
||||
describe('complete doc', () => {
|
||||
beforeEach(async ctx => {
|
||||
await ctx.MongoManager.restoreArchivedDoc(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
ctx.archivedDoc
|
||||
)
|
||||
})
|
||||
|
||||
it('updates Mongo', ctx => {
|
||||
expect(ctx.db.docs.updateOne).to.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(ctx.docId),
|
||||
project_id: new ObjectId(ctx.projectId),
|
||||
rev: ctx.archivedDoc.rev,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
lines: ctx.archivedDoc.lines,
|
||||
ranges: ctx.archivedDoc.ranges,
|
||||
},
|
||||
$unset: {
|
||||
inS3: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('without ranges', () => {
|
||||
beforeEach(async ctx => {
|
||||
delete ctx.archivedDoc.ranges
|
||||
await ctx.MongoManager.restoreArchivedDoc(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
ctx.archivedDoc
|
||||
)
|
||||
})
|
||||
|
||||
it('sets ranges to an empty object', ctx => {
|
||||
expect(ctx.db.docs.updateOne).to.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(ctx.docId),
|
||||
project_id: new ObjectId(ctx.projectId),
|
||||
rev: ctx.archivedDoc.rev,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
lines: ctx.archivedDoc.lines,
|
||||
ranges: {},
|
||||
},
|
||||
$unset: {
|
||||
inS3: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the update doesn't succeed", () => {
|
||||
it('throws a DocRevValueError', async ctx => {
|
||||
ctx.db.docs.updateOne.resolves({ matchedCount: 0 })
|
||||
await expect(
|
||||
ctx.MongoManager.restoreArchivedDoc(
|
||||
ctx.projectId,
|
||||
ctx.docId,
|
||||
ctx.archivedDoc
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,403 +0,0 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/MongoManager'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const { assert, expect } = require('chai')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
describe('MongoManager', function () {
|
||||
beforeEach(function () {
|
||||
this.db = {
|
||||
docs: {
|
||||
updateOne: sinon.stub().resolves({ matchedCount: 1 }),
|
||||
insertOne: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.MongoManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./mongodb': {
|
||||
db: this.db,
|
||||
ObjectId,
|
||||
},
|
||||
'@overleaf/settings': {
|
||||
max_deleted_docs: 42,
|
||||
docstore: { archivingLockDurationMs: 5000 },
|
||||
},
|
||||
'./Errors': Errors,
|
||||
},
|
||||
})
|
||||
this.projectId = new ObjectId().toString()
|
||||
this.docId = new ObjectId().toString()
|
||||
this.rev = 42
|
||||
this.stubbedErr = new Error('hello world')
|
||||
this.lines = ['Three French hens', 'Two turtle doves']
|
||||
})
|
||||
|
||||
describe('findDoc', function () {
|
||||
beforeEach(async function () {
|
||||
this.doc = { name: 'mock-doc' }
|
||||
this.db.docs.findOne = sinon.stub().resolves(this.doc)
|
||||
this.filter = { lines: true }
|
||||
this.result = await this.MongoManager.findDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the doc', function () {
|
||||
this.db.docs.findOne
|
||||
.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
},
|
||||
{
|
||||
projection: this.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc', function () {
|
||||
expect(this.doc).to.deep.equal(this.doc)
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', function () {
|
||||
beforeEach(async function () {
|
||||
this.meta = { name: 'foo.tex' }
|
||||
await this.MongoManager.patchDoc(this.projectId, this.docId, this.meta)
|
||||
})
|
||||
|
||||
it('should pass the parameter along', function () {
|
||||
this.db.docs.updateOne.should.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
},
|
||||
{
|
||||
$set: this.meta,
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProjectsDocs', function () {
|
||||
beforeEach(function () {
|
||||
this.filter = { lines: true }
|
||||
this.doc1 = { name: 'mock-doc1' }
|
||||
this.doc2 = { name: 'mock-doc2' }
|
||||
this.doc3 = { name: 'mock-doc3' }
|
||||
this.doc4 = { name: 'mock-doc4' }
|
||||
this.db.docs.find = sinon.stub().returns({
|
||||
toArray: sinon.stub().resolves([this.doc, this.doc3, this.doc4]),
|
||||
})
|
||||
})
|
||||
|
||||
describe('with included_deleted = false', function () {
|
||||
beforeEach(async function () {
|
||||
this.result = await this.MongoManager.getProjectsDocs(
|
||||
this.projectId,
|
||||
{ include_deleted: false },
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the non-deleted docs via the project_id', function () {
|
||||
this.db.docs.find
|
||||
.calledWith(
|
||||
{
|
||||
project_id: new ObjectId(this.projectId),
|
||||
deleted: { $ne: true },
|
||||
},
|
||||
{
|
||||
projection: this.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should call return the docs', function () {
|
||||
expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4])
|
||||
})
|
||||
})
|
||||
|
||||
describe('with included_deleted = true', function () {
|
||||
beforeEach(async function () {
|
||||
this.result = await this.MongoManager.getProjectsDocs(
|
||||
this.projectId,
|
||||
{ include_deleted: true },
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find all via the project_id', function () {
|
||||
this.db.docs.find
|
||||
.calledWith(
|
||||
{
|
||||
project_id: new ObjectId(this.projectId),
|
||||
},
|
||||
{
|
||||
projection: this.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the docs', function () {
|
||||
expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProjectsDeletedDocs', function () {
|
||||
beforeEach(async function () {
|
||||
this.filter = { name: true }
|
||||
this.doc1 = { _id: '1', name: 'mock-doc1.tex' }
|
||||
this.doc2 = { _id: '2', name: 'mock-doc2.tex' }
|
||||
this.doc3 = { _id: '3', name: 'mock-doc3.tex' }
|
||||
this.db.docs.find = sinon.stub().returns({
|
||||
toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]),
|
||||
})
|
||||
this.result = await this.MongoManager.getProjectsDeletedDocs(
|
||||
this.projectId,
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the deleted docs via the project_id', function () {
|
||||
this.db.docs.find
|
||||
.calledWith({
|
||||
project_id: new ObjectId(this.projectId),
|
||||
deleted: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should filter, sort by deletedAt and limit', function () {
|
||||
this.db.docs.find
|
||||
.calledWith(sinon.match.any, {
|
||||
projection: this.filter,
|
||||
sort: { deletedAt: -1 },
|
||||
limit: 42,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the docs', function () {
|
||||
expect(this.result).to.deep.equal([this.doc1, this.doc2, this.doc3])
|
||||
})
|
||||
})
|
||||
|
||||
describe('upsertIntoDocCollection', function () {
|
||||
beforeEach(function () {
|
||||
this.oldRev = 77
|
||||
})
|
||||
|
||||
it('should upsert the document', async function () {
|
||||
await this.MongoManager.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.oldRev,
|
||||
{ lines: this.lines }
|
||||
)
|
||||
|
||||
const args = this.db.docs.updateOne.args[0]
|
||||
assert.deepEqual(args[0], {
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: this.oldRev,
|
||||
})
|
||||
assert.equal(args[1].$set.lines, this.lines)
|
||||
assert.equal(args[1].$inc.rev, 1)
|
||||
})
|
||||
|
||||
it('should handle update error', async function () {
|
||||
this.db.docs.updateOne.rejects(this.stubbedErr)
|
||||
await expect(
|
||||
this.MongoManager.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.rev,
|
||||
{
|
||||
lines: this.lines,
|
||||
}
|
||||
)
|
||||
).to.be.rejectedWith(this.stubbedErr)
|
||||
})
|
||||
|
||||
it('should insert without a previous rev', async function () {
|
||||
await this.MongoManager.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
null,
|
||||
{ lines: this.lines, ranges: this.ranges }
|
||||
)
|
||||
|
||||
expect(this.db.docs.insertOne).to.have.been.calledWith({
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: 1,
|
||||
lines: this.lines,
|
||||
ranges: this.ranges,
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle generic insert error', async function () {
|
||||
this.db.docs.insertOne.rejects(this.stubbedErr)
|
||||
await expect(
|
||||
this.MongoManager.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
null,
|
||||
{ lines: this.lines, ranges: this.ranges }
|
||||
)
|
||||
).to.be.rejectedWith(this.stubbedErr)
|
||||
})
|
||||
|
||||
it('should handle duplicate insert error', async function () {
|
||||
this.db.docs.insertOne.rejects({ code: 11000 })
|
||||
await expect(
|
||||
this.MongoManager.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
null,
|
||||
{ lines: this.lines, ranges: this.ranges }
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', function () {
|
||||
beforeEach(async function () {
|
||||
this.projectId = new ObjectId()
|
||||
this.db.docs.deleteMany = sinon.stub().resolves()
|
||||
await this.MongoManager.destroyProject(this.projectId)
|
||||
})
|
||||
|
||||
it('should destroy all docs', function () {
|
||||
sinon.assert.calledWith(this.db.docs.deleteMany, {
|
||||
project_id: this.projectId,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkRevUnchanged', function () {
|
||||
this.beforeEach(function () {
|
||||
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: 1 }
|
||||
})
|
||||
|
||||
it('should not error when the rev has not changed', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
|
||||
await this.MongoManager.checkRevUnchanged(this.doc)
|
||||
})
|
||||
|
||||
it('should return an error when the rev has changed', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||
await expect(
|
||||
this.MongoManager.checkRevUnchanged(this.doc)
|
||||
).to.be.rejectedWith(Errors.DocModifiedError)
|
||||
})
|
||||
|
||||
it('should return a value error if incoming rev is NaN', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
|
||||
await expect(
|
||||
this.MongoManager.checkRevUnchanged(this.doc)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
|
||||
it('should return a value error if checked doc rev is NaN', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
|
||||
await expect(
|
||||
this.MongoManager.checkRevUnchanged(this.doc)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('restoreArchivedDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.archivedDoc = {
|
||||
lines: ['a', 'b', 'c'],
|
||||
ranges: { some: 'ranges' },
|
||||
rev: 2,
|
||||
}
|
||||
})
|
||||
|
||||
describe('complete doc', function () {
|
||||
beforeEach(async function () {
|
||||
await this.MongoManager.restoreArchivedDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.archivedDoc
|
||||
)
|
||||
})
|
||||
|
||||
it('updates Mongo', function () {
|
||||
expect(this.db.docs.updateOne).to.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: this.archivedDoc.rev,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
lines: this.archivedDoc.lines,
|
||||
ranges: this.archivedDoc.ranges,
|
||||
},
|
||||
$unset: {
|
||||
inS3: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('without ranges', function () {
|
||||
beforeEach(async function () {
|
||||
delete this.archivedDoc.ranges
|
||||
await this.MongoManager.restoreArchivedDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.archivedDoc
|
||||
)
|
||||
})
|
||||
|
||||
it('sets ranges to an empty object', function () {
|
||||
expect(this.db.docs.updateOne).to.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: this.archivedDoc.rev,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
lines: this.archivedDoc.lines,
|
||||
ranges: {},
|
||||
},
|
||||
$unset: {
|
||||
inS3: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the update doesn't succeed", function () {
|
||||
it('throws a DocRevValueError', async function () {
|
||||
this.db.docs.updateOne.resolves({ matchedCount: 0 })
|
||||
await expect(
|
||||
this.MongoManager.restoreArchivedDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.archivedDoc
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
57
services/docstore/test/unit/js/PersistorManager.test.js
Normal file
57
services/docstore/test/unit/js/PersistorManager.test.js
Normal file
@@ -0,0 +1,57 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const modulePath = '../../../app/js/PersistorManager.js'
|
||||
|
||||
describe('PersistorManager', () => {
|
||||
class FakePersistor {
|
||||
async sendStream() {
|
||||
return 'sent'
|
||||
}
|
||||
}
|
||||
|
||||
describe('configured', () => {
|
||||
it('should return fake persistor', async () => {
|
||||
const Settings = {
|
||||
docstore: {
|
||||
backend: 'gcs',
|
||||
bucket: 'wombat',
|
||||
},
|
||||
}
|
||||
vi.doMock('@overleaf/settings', () => ({
|
||||
default: Settings,
|
||||
}))
|
||||
vi.doMock('@overleaf/object-persistor', () => ({
|
||||
default: () => new FakePersistor(),
|
||||
}))
|
||||
vi.doMock('@overleaf/metrics', () => ({ default: {} }))
|
||||
const PersistorManger = (await import(modulePath)).default
|
||||
|
||||
expect(PersistorManger).to.be.instanceof(FakePersistor)
|
||||
expect(PersistorManger.sendStream()).to.eventually.equal('sent')
|
||||
})
|
||||
})
|
||||
|
||||
describe('not configured', () => {
|
||||
it('should return abstract persistor', async () => {
|
||||
const Settings = {
|
||||
docstore: {
|
||||
backend: undefined,
|
||||
bucket: 'wombat',
|
||||
},
|
||||
}
|
||||
vi.doMock('@overleaf/settings', () => ({
|
||||
default: Settings,
|
||||
}))
|
||||
vi.doMock('@overleaf/object-persistor', () => ({
|
||||
default: () => new FakePersistor(),
|
||||
}))
|
||||
vi.doMock('@overleaf/metrics', () => ({ default: {} }))
|
||||
const PersistorManger = (await import(modulePath)).default
|
||||
|
||||
expect(PersistorManger.constructor.name).to.equal('AbstractPersistor')
|
||||
expect(PersistorManger.sendStream()).to.eventually.be.rejectedWith(
|
||||
/method not implemented in persistor/
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,55 +0,0 @@
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../app/js/PersistorManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('PersistorManager', function () {
|
||||
class FakePersistor {
|
||||
async sendStream() {
|
||||
return 'sent'
|
||||
}
|
||||
}
|
||||
|
||||
describe('configured', function () {
|
||||
it('should return fake persistor', function () {
|
||||
const Settings = {
|
||||
docstore: {
|
||||
backend: 'gcs',
|
||||
bucket: 'wombat',
|
||||
},
|
||||
}
|
||||
const PersistorManger = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': Settings,
|
||||
'@overleaf/object-persistor': () => new FakePersistor(),
|
||||
'@overleaf/metrics': {},
|
||||
},
|
||||
})
|
||||
|
||||
expect(PersistorManger).to.be.instanceof(FakePersistor)
|
||||
expect(PersistorManger.sendStream()).to.eventually.equal('sent')
|
||||
})
|
||||
})
|
||||
|
||||
describe('not configured', function () {
|
||||
it('should return abstract persistor', async function () {
|
||||
const Settings = {
|
||||
docstore: {
|
||||
backend: undefined,
|
||||
bucket: 'wombat',
|
||||
},
|
||||
}
|
||||
const PersistorManger = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': Settings,
|
||||
'@overleaf/object-persistor': () => new FakePersistor(),
|
||||
'@overleaf/metrics': {},
|
||||
},
|
||||
})
|
||||
|
||||
expect(PersistorManger.constructor.name).to.equal('AbstractPersistor')
|
||||
expect(PersistorManger.sendStream()).to.eventually.be.rejectedWith(
|
||||
/method not implemented in persistor/
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
239
services/docstore/test/unit/js/RangeManager.test.js
Normal file
239
services/docstore/test/unit/js/RangeManager.test.js
Normal file
@@ -0,0 +1,239 @@
|
||||
import path from 'node:path'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { ObjectId } from 'mongodb-legacy'
|
||||
|
||||
const modulePath = path.join(
|
||||
import.meta.dirname,
|
||||
'../../../app/js/RangeManager'
|
||||
)
|
||||
|
||||
describe('RangeManager', () => {
|
||||
beforeEach(async ctx => {
|
||||
vi.doMock('../../../app/js/mongodb', () => ({
|
||||
default: {
|
||||
ObjectId,
|
||||
},
|
||||
}))
|
||||
|
||||
ctx.RangeManager = (await import(modulePath)).default
|
||||
})
|
||||
|
||||
describe('jsonRangesToMongo', () => {
|
||||
it('should convert ObjectIds and dates to proper objects and fix comment id', ctx => {
|
||||
const changeId = new ObjectId().toString()
|
||||
const commentId = new ObjectId().toString()
|
||||
const userId = new ObjectId().toString()
|
||||
const threadId = new ObjectId().toString()
|
||||
const ts = new Date().toJSON()
|
||||
ctx.RangeManager.jsonRangesToMongo({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
ts,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}).should.deep.equal({
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId(changeId),
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: new ObjectId(userId),
|
||||
ts: new Date(ts),
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: new ObjectId(threadId),
|
||||
op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('should leave malformed ObjectIds as they are', ctx => {
|
||||
const changeId = 'foo'
|
||||
const commentId = 'bar'
|
||||
const userId = 'baz'
|
||||
ctx.RangeManager.jsonRangesToMongo({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
},
|
||||
],
|
||||
}).should.deep.equal({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('should be consistent when transformed through json -> mongo -> json', ctx => {
|
||||
const changeId = new ObjectId().toString()
|
||||
const userId = new ObjectId().toString()
|
||||
const threadId = new ObjectId().toString()
|
||||
const ts = new Date().toJSON()
|
||||
const ranges1 = {
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
ts,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: threadId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}
|
||||
const ranges1Copy = JSON.parse(JSON.stringify(ranges1)) // jsonRangesToMongo modifies in place
|
||||
const ranges2 = JSON.parse(
|
||||
JSON.stringify(ctx.RangeManager.jsonRangesToMongo(ranges1Copy))
|
||||
)
|
||||
ranges1.should.deep.equal(ranges2)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('shouldUpdateRanges', () => {
|
||||
beforeEach(ctx => {
|
||||
const threadId = new ObjectId()
|
||||
ctx.ranges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: new ObjectId(),
|
||||
ts: new Date(),
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: threadId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}
|
||||
ctx.ranges_copy = ctx.RangeManager.jsonRangesToMongo(
|
||||
JSON.parse(JSON.stringify(ctx.ranges))
|
||||
)
|
||||
})
|
||||
|
||||
describe('with a blank new range', () => {
|
||||
it('should throw an error', ctx => {
|
||||
expect(() => {
|
||||
ctx.RangeManager.shouldUpdateRanges(ctx.ranges, null)
|
||||
}).to.throw(Error)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a blank old range', () => {
|
||||
it('should treat it like {}', ctx => {
|
||||
ctx.RangeManager.shouldUpdateRanges(null, {}).should.equal(false)
|
||||
ctx.RangeManager.shouldUpdateRanges(null, ctx.ranges).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with no changes', () => {
|
||||
it('should return false', ctx => {
|
||||
ctx.RangeManager.shouldUpdateRanges(
|
||||
ctx.ranges,
|
||||
ctx.ranges_copy
|
||||
).should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with changes', () => {
|
||||
it('should return true when the change id changes', ctx => {
|
||||
ctx.ranges_copy.changes[0].id = new ObjectId()
|
||||
ctx.RangeManager.shouldUpdateRanges(
|
||||
ctx.ranges,
|
||||
ctx.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change user id changes', ctx => {
|
||||
ctx.ranges_copy.changes[0].metadata.user_id = new ObjectId()
|
||||
ctx.RangeManager.shouldUpdateRanges(
|
||||
ctx.ranges,
|
||||
ctx.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change ts changes', ctx => {
|
||||
ctx.ranges_copy.changes[0].metadata.ts = new Date(Date.now() + 1000)
|
||||
ctx.RangeManager.shouldUpdateRanges(
|
||||
ctx.ranges,
|
||||
ctx.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change op changes', ctx => {
|
||||
ctx.ranges_copy.changes[0].op.i = 'bar'
|
||||
ctx.RangeManager.shouldUpdateRanges(
|
||||
ctx.ranges,
|
||||
ctx.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the comment id changes', ctx => {
|
||||
ctx.ranges_copy.comments[0].id = new ObjectId()
|
||||
ctx.RangeManager.shouldUpdateRanges(
|
||||
ctx.ranges,
|
||||
ctx.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the comment offset changes', ctx => {
|
||||
ctx.ranges_copy.comments[0].op.p = 17
|
||||
ctx.RangeManager.shouldUpdateRanges(
|
||||
ctx.ranges,
|
||||
ctx.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the comment content changes', ctx => {
|
||||
ctx.ranges_copy.comments[0].op.c = 'bar'
|
||||
ctx.RangeManager.shouldUpdateRanges(
|
||||
ctx.ranges,
|
||||
ctx.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,253 +0,0 @@
|
||||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { assert, expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/RangeManager'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
|
||||
describe('RangeManager', function () {
|
||||
beforeEach(function () {
|
||||
return (this.RangeManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./mongodb': {
|
||||
ObjectId,
|
||||
},
|
||||
},
|
||||
}))
|
||||
})
|
||||
|
||||
describe('jsonRangesToMongo', function () {
|
||||
it('should convert ObjectIds and dates to proper objects and fix comment id', function () {
|
||||
const changeId = new ObjectId().toString()
|
||||
const commentId = new ObjectId().toString()
|
||||
const userId = new ObjectId().toString()
|
||||
const threadId = new ObjectId().toString()
|
||||
const ts = new Date().toJSON()
|
||||
return this.RangeManager.jsonRangesToMongo({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
ts,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}).should.deep.equal({
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId(changeId),
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: new ObjectId(userId),
|
||||
ts: new Date(ts),
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: new ObjectId(threadId),
|
||||
op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('should leave malformed ObjectIds as they are', function () {
|
||||
const changeId = 'foo'
|
||||
const commentId = 'bar'
|
||||
const userId = 'baz'
|
||||
return this.RangeManager.jsonRangesToMongo({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
},
|
||||
],
|
||||
}).should.deep.equal({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
return it('should be consistent when transformed through json -> mongo -> json', function () {
|
||||
const changeId = new ObjectId().toString()
|
||||
const userId = new ObjectId().toString()
|
||||
const threadId = new ObjectId().toString()
|
||||
const ts = new Date().toJSON()
|
||||
const ranges1 = {
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
ts,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: threadId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}
|
||||
const ranges1Copy = JSON.parse(JSON.stringify(ranges1)) // jsonRangesToMongo modifies in place
|
||||
const ranges2 = JSON.parse(
|
||||
JSON.stringify(this.RangeManager.jsonRangesToMongo(ranges1Copy))
|
||||
)
|
||||
return ranges1.should.deep.equal(ranges2)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('shouldUpdateRanges', function () {
|
||||
beforeEach(function () {
|
||||
const threadId = new ObjectId()
|
||||
this.ranges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: new ObjectId(),
|
||||
ts: new Date(),
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: threadId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}
|
||||
return (this.ranges_copy = this.RangeManager.jsonRangesToMongo(
|
||||
JSON.parse(JSON.stringify(this.ranges))
|
||||
))
|
||||
})
|
||||
|
||||
describe('with a blank new range', function () {
|
||||
return it('should throw an error', function () {
|
||||
return expect(() => {
|
||||
return this.RangeManager.shouldUpdateRanges(this.ranges, null)
|
||||
}).to.throw(Error)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a blank old range', function () {
|
||||
return it('should treat it like {}', function () {
|
||||
this.RangeManager.shouldUpdateRanges(null, {}).should.equal(false)
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
null,
|
||||
this.ranges
|
||||
).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with no changes', function () {
|
||||
return it('should return false', function () {
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with changes', function () {
|
||||
it('should return true when the change id changes', function () {
|
||||
this.ranges_copy.changes[0].id = new ObjectId()
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change user id changes', function () {
|
||||
this.ranges_copy.changes[0].metadata.user_id = new ObjectId()
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change ts changes', function () {
|
||||
this.ranges_copy.changes[0].metadata.ts = new Date(Date.now() + 1000)
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change op changes', function () {
|
||||
this.ranges_copy.changes[0].op.i = 'bar'
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the comment id changes', function () {
|
||||
this.ranges_copy.comments[0].id = new ObjectId()
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the comment offset changes', function () {
|
||||
this.ranges_copy.comments[0].op.p = 17
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return true when the comment content changes', function () {
|
||||
this.ranges_copy.comments[0].op.c = 'bar'
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
40
services/docstore/test/unit/setup.js
Normal file
40
services/docstore/test/unit/setup.js
Normal file
@@ -0,0 +1,40 @@
|
||||
import { afterEach, beforeEach, chai, vi } from 'vitest'
|
||||
import sinon from 'sinon'
|
||||
import sinonChai from 'sinon-chai'
|
||||
import chaiAsPromised from 'chai-as-promised'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
|
||||
// ensure every ObjectId has the id string as a property for correct comparisons
|
||||
mongodb.ObjectId.cacheHexString = true
|
||||
|
||||
process.env.BACKEND = 'gcs'
|
||||
|
||||
// Chai configuration
|
||||
chai.should()
|
||||
chai.use(sinonChai)
|
||||
chai.use(chaiAsPromised)
|
||||
|
||||
// Global stubs
|
||||
const sandbox = sinon.createSandbox()
|
||||
const stubs = {
|
||||
logger: {
|
||||
debug: sandbox.stub(),
|
||||
log: sandbox.stub(),
|
||||
info: sandbox.stub(),
|
||||
warn: sandbox.stub(),
|
||||
err: sandbox.stub(),
|
||||
error: sandbox.stub(),
|
||||
fatal: sandbox.stub(),
|
||||
},
|
||||
}
|
||||
|
||||
beforeEach(ctx => {
|
||||
ctx.logger = stubs.logger
|
||||
vi.doMock('@overleaf/logger', () => ({ default: ctx.logger }))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
vi.resetModules()
|
||||
sandbox.reset()
|
||||
})
|
||||
@@ -8,6 +8,7 @@
|
||||
"config/**/*",
|
||||
"scripts/**/*",
|
||||
"test/**/*",
|
||||
"types"
|
||||
"types",
|
||||
"vitest.config.unit.cjs"
|
||||
]
|
||||
}
|
||||
|
||||
25
services/docstore/vitest.config.unit.cjs
Normal file
25
services/docstore/vitest.config.unit.cjs
Normal file
@@ -0,0 +1,25 @@
|
||||
const { defineConfig } = require('vitest/config')
|
||||
|
||||
let reporterOptions = {}
|
||||
if (process.env.CI) {
|
||||
reporterOptions = {
|
||||
reporters: [
|
||||
'default',
|
||||
[
|
||||
'junit',
|
||||
{
|
||||
classnameTemplate: `Unit tests.{filename}`,
|
||||
},
|
||||
],
|
||||
],
|
||||
outputFile: 'reports/junit-vitest-unit.xml',
|
||||
}
|
||||
}
|
||||
module.exports = defineConfig({
|
||||
test: {
|
||||
include: ['test/unit/js/*.test.{js,ts}'],
|
||||
setupFiles: ['./test/unit/setup.js'],
|
||||
isolate: true,
|
||||
...reporterOptions,
|
||||
},
|
||||
})
|
||||
@@ -1,18 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/mongo-utils/**
|
||||
libraries/o-error/**
|
||||
libraries/overleaf-editor-core/**
|
||||
libraries/promise-utils/**
|
||||
libraries/ranges-tracker/**
|
||||
libraries/redis-wrapper/**
|
||||
libraries/settings/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/document-updater/**
|
||||
tools/migrations/**
|
||||
161
services/document-updater/Jenkinsfile
vendored
161
services/document-updater/Jenkinsfile
vendored
@@ -1,161 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/document-updater') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/document-updater/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/document-updater') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/document-updater') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/document-updater') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'document-updater-eslint', name: 'document-updater eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/document-updater/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/document-updater') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/document-updater') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/document-updater') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
|
||||
}
|
||||
steps {
|
||||
dir('services/document-updater') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/document-updater') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'document-updater test results', testResults: 'services/document-updater/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/document-updater') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/o-error/**
|
||||
libraries/object-persistor/**
|
||||
libraries/settings/**
|
||||
libraries/stream-utils/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/filestore/**
|
||||
188
services/filestore/Jenkinsfile
vendored
188
services/filestore/Jenkinsfile
vendored
@@ -1,188 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/filestore/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'filestore-eslint', name: 'filestore eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/filestore/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance SHARD_01_') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance_shard_01_x"
|
||||
MOCHA_GREP = "SHARD_01_"
|
||||
}
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance SHARD_02_') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance_shard_02_x"
|
||||
MOCHA_GREP = "SHARD_02_"
|
||||
}
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance SHARD_03_') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance_shard_03_x"
|
||||
MOCHA_GREP = "SHARD_03_"
|
||||
}
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/filestore') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'filestore test results', testResults: 'services/filestore/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/filestore') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
services/git-bridge/**
|
||||
111
services/git-bridge/Jenkinsfile
vendored
111
services/git-bridge/Jenkinsfile
vendored
@@ -1,111 +0,0 @@
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/git-bridge') {
|
||||
sh 'make refresh_cache -j2'
|
||||
retry(count: 3) {
|
||||
sh 'make docker_build_base'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Build production and push') {
|
||||
steps {
|
||||
dir('services/git-bridge') {
|
||||
retry(count: 3) {
|
||||
sh 'make docker_build'
|
||||
}
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format Java') {
|
||||
steps {
|
||||
dir('services/git-bridge') {
|
||||
sh 'make docker_format'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test') {
|
||||
steps {
|
||||
dir('services/git-bridge') {
|
||||
retry(count: 3) {
|
||||
sh 'make docker_test'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/git-bridge') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
// Collect junit test results for both success and failure case.
|
||||
always {
|
||||
junit checksName: 'git-bridge test results', testResults: 'services/git-bridge/target/surefire-reports/*.xml'
|
||||
}
|
||||
cleanup {
|
||||
dir('services/git-bridge') {
|
||||
sh 'make clean_ci'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/mongo-utils/**
|
||||
libraries/o-error/**
|
||||
libraries/object-persistor/**
|
||||
libraries/overleaf-editor-core/**
|
||||
libraries/promise-utils/**
|
||||
libraries/redis-wrapper/**
|
||||
libraries/settings/**
|
||||
libraries/stream-utils/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/history-v1/**
|
||||
tools/migrations/**
|
||||
@@ -29,6 +29,7 @@ COPY libraries/promise-utils/package.json /overleaf/libraries/promise-utils/pack
|
||||
COPY libraries/redis-wrapper/package.json /overleaf/libraries/redis-wrapper/package.json
|
||||
COPY libraries/settings/package.json /overleaf/libraries/settings/package.json
|
||||
COPY libraries/stream-utils/package.json /overleaf/libraries/stream-utils/package.json
|
||||
COPY libraries/validation-tools/package.json /overleaf/libraries/validation-tools/package.json
|
||||
COPY services/history-v1/package.json /overleaf/services/history-v1/package.json
|
||||
COPY tools/migrations/package.json /overleaf/tools/migrations/package.json
|
||||
COPY patches/ /overleaf/patches/
|
||||
@@ -46,6 +47,7 @@ COPY libraries/promise-utils/ /overleaf/libraries/promise-utils/
|
||||
COPY libraries/redis-wrapper/ /overleaf/libraries/redis-wrapper/
|
||||
COPY libraries/settings/ /overleaf/libraries/settings/
|
||||
COPY libraries/stream-utils/ /overleaf/libraries/stream-utils/
|
||||
COPY libraries/validation-tools/ /overleaf/libraries/validation-tools/
|
||||
COPY services/history-v1/ /overleaf/services/history-v1/
|
||||
COPY tools/migrations/ /overleaf/tools/migrations/
|
||||
|
||||
|
||||
161
services/history-v1/Jenkinsfile
vendored
161
services/history-v1/Jenkinsfile
vendored
@@ -1,161 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/history-v1') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/history-v1/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/history-v1') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/history-v1') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/history-v1') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'history-v1-eslint', name: 'history-v1 eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/history-v1/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/history-v1') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/history-v1') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/history-v1') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
|
||||
}
|
||||
steps {
|
||||
dir('services/history-v1') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/history-v1') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'history-v1 test results', testResults: 'services/history-v1/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/history-v1') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,7 @@ IMAGE_CACHE ?= $(IMAGE_REPO):cache-$(shell cat \
|
||||
$(MONOREPO)/libraries/redis-wrapper/package.json \
|
||||
$(MONOREPO)/libraries/settings/package.json \
|
||||
$(MONOREPO)/libraries/stream-utils/package.json \
|
||||
$(MONOREPO)/libraries/validation-tools/package.json \
|
||||
$(MONOREPO)/services/history-v1/package.json \
|
||||
$(MONOREPO)/tools/migrations/package.json \
|
||||
$(MONOREPO)/patches/* \
|
||||
|
||||
144
services/history-v1/api/middleware/security.js
Normal file
144
services/history-v1/api/middleware/security.js
Normal file
@@ -0,0 +1,144 @@
|
||||
'use strict'
|
||||
|
||||
const basicAuth = require('basic-auth')
|
||||
const config = require('config')
|
||||
const HTTPStatus = require('http-status')
|
||||
const jwt = require('jsonwebtoken')
|
||||
const tsscmp = require('tsscmp')
|
||||
const { validateReq } = require('@overleaf/validation-tools')
|
||||
const schemas = require('../schema')
|
||||
|
||||
function hasValidBasicAuthCredentials(req) {
|
||||
const credentials = basicAuth(req)
|
||||
if (!credentials) return false
|
||||
|
||||
// No security in the name, so just use straight comparison.
|
||||
if (credentials.name !== 'staging') return false
|
||||
|
||||
const password = config.get('basicHttpAuth.password')
|
||||
if (password && tsscmp(credentials.pass, password)) return true
|
||||
|
||||
// Support an old password so we can change the password without downtime.
|
||||
if (config.has('basicHttpAuth.oldPassword')) {
|
||||
const oldPassword = config.get('basicHttpAuth.oldPassword')
|
||||
if (oldPassword && tsscmp(credentials.pass, oldPassword)) return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
function setupSSL(app) {
|
||||
const httpsOnly = config.get('httpsOnly') === 'true'
|
||||
if (!httpsOnly) {
|
||||
return
|
||||
}
|
||||
app.enable('trust proxy')
|
||||
app.use(function (req, res, next) {
|
||||
if (req.protocol === 'https') {
|
||||
next()
|
||||
return
|
||||
}
|
||||
if (req.method === 'GET' || req.method === 'HEAD') {
|
||||
res.redirect('https://' + req.headers.host + req.url)
|
||||
} else {
|
||||
res
|
||||
.status(HTTPStatus.FORBIDDEN)
|
||||
.send('Please use HTTPS when submitting data to this server.')
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
exports.setupSSL = setupSSL
|
||||
|
||||
function handleJWTAuth(req, res, next) {
|
||||
if (hasValidBasicAuthCredentials(req)) {
|
||||
return next()
|
||||
}
|
||||
|
||||
let token
|
||||
if (req.query.token) {
|
||||
token = req.query.token
|
||||
} else if (
|
||||
req.headers.authorization &&
|
||||
req.headers.authorization.split(' ')[0] === 'Bearer'
|
||||
) {
|
||||
token = req.headers.authorization.split(' ')[1]
|
||||
}
|
||||
|
||||
if (!token) {
|
||||
const err = new Error('jwt missing')
|
||||
err.statusCode = HTTPStatus.UNAUTHORIZED
|
||||
err.headers = { 'WWW-Authenticate': 'Bearer' }
|
||||
return next(err)
|
||||
}
|
||||
|
||||
let decoded
|
||||
try {
|
||||
decoded = decodeJWT(token)
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof jwt.JsonWebTokenError ||
|
||||
error instanceof jwt.TokenExpiredError
|
||||
) {
|
||||
const err = new Error(error.message)
|
||||
err.statusCode = HTTPStatus.UNAUTHORIZED
|
||||
err.headers = { 'WWW-Authenticate': 'Bearer error="invalid_token"' }
|
||||
return next(err)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
const { params } = validateReq(req, schemas.projectId)
|
||||
if (decoded.project_id.toString() !== params.project_id.toString()) {
|
||||
const err = new Error('Wrong project_id')
|
||||
err.statusCode = HTTPStatus.FORBIDDEN
|
||||
return next(err)
|
||||
}
|
||||
|
||||
req.jwt = decoded
|
||||
next()
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify and decode the given JSON Web Token
|
||||
*/
|
||||
function decodeJWT(token) {
|
||||
const key = config.get('jwtAuth.key')
|
||||
const algorithm = config.get('jwtAuth.algorithm')
|
||||
try {
|
||||
return jwt.verify(token, key, { algorithms: [algorithm] })
|
||||
} catch (err) {
|
||||
// Support an old key so we can change the key without downtime.
|
||||
if (config.has('jwtAuth.oldKey')) {
|
||||
const oldKey = config.get('jwtAuth.oldKey')
|
||||
return jwt.verify(token, oldKey, { algorithms: [algorithm] })
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function handleBasicAuth(req, res, next) {
|
||||
if (hasValidBasicAuthCredentials(req)) {
|
||||
return next()
|
||||
}
|
||||
const error = new Error()
|
||||
error.statusCode = HTTPStatus.UNAUTHORIZED
|
||||
error.headers = { 'WWW-Authenticate': 'Basic realm="Application"' }
|
||||
return next(error)
|
||||
}
|
||||
|
||||
function getAuthHandlers() {
|
||||
if (!config.has('jwtAuth.key') || !config.has('basicHttpAuth.password')) {
|
||||
throw new Error('missing authentication env vars')
|
||||
}
|
||||
|
||||
const handlers = {}
|
||||
handlers.jwt = handleJWTAuth
|
||||
handlers.basic = handleBasicAuth
|
||||
handlers.token = handleJWTAuth
|
||||
return handlers
|
||||
}
|
||||
|
||||
exports.hasValidBasicAuthCredentials = hasValidBasicAuthCredentials
|
||||
exports.getAuthHandlers = getAuthHandlers
|
||||
229
services/history-v1/api/schema.js
Normal file
229
services/history-v1/api/schema.js
Normal file
@@ -0,0 +1,229 @@
|
||||
'use strict'
|
||||
|
||||
const { z } = require('@overleaf/validation-tools')
|
||||
const Blob = require('overleaf-editor-core').Blob
|
||||
|
||||
const hexHashPattern = new RegExp(Blob.HEX_HASH_RX_STRING)
|
||||
|
||||
const fileSchema = z.object({
|
||||
hash: z.string().optional(),
|
||||
byteLength: z.number().int().nullable().optional(),
|
||||
stringLength: z.number().int().nullable().optional(),
|
||||
})
|
||||
|
||||
const snapshotSchema = z.object({
|
||||
files: z.record(z.string(), fileSchema),
|
||||
})
|
||||
|
||||
const v2DocVersionsSchema = z.object({
|
||||
pathname: z.string().optional(),
|
||||
v: z.number().int().optional(),
|
||||
})
|
||||
|
||||
const operationSchema = z.object({
|
||||
pathname: z.string().optional(),
|
||||
newPathname: z.string().optional(),
|
||||
blob: z
|
||||
.object({
|
||||
hash: z.string(),
|
||||
})
|
||||
.optional(),
|
||||
textOperation: z.array(z.any()).optional(),
|
||||
file: fileSchema.optional(),
|
||||
})
|
||||
|
||||
const changeSchema = z.object({
|
||||
timestamp: z.string(),
|
||||
operations: z.array(operationSchema),
|
||||
authors: z.array(z.number().int().nullable()).optional(),
|
||||
v2Authors: z.array(z.string().nullable()).optional(),
|
||||
projectVersion: z.string().optional(),
|
||||
v2DocVersions: z.record(v2DocVersionsSchema).optional(),
|
||||
})
|
||||
|
||||
const schemas = {
|
||||
projectId: z.object({
|
||||
params: z
|
||||
.object({
|
||||
project_id: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
}),
|
||||
initializeProject: z.object({
|
||||
body: z
|
||||
.object({
|
||||
projectId: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
}),
|
||||
|
||||
getProjectBlobsStats: z.object({
|
||||
body: z.object({
|
||||
projectIds: z.array(z.string()),
|
||||
}),
|
||||
}),
|
||||
|
||||
getBlobStats: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
body: z.object({
|
||||
blobHashes: z.array(z.string()),
|
||||
}),
|
||||
}),
|
||||
|
||||
deleteProject: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
body: z.any().optional(),
|
||||
}),
|
||||
|
||||
getProjectBlob: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
hash: z.string().regex(hexHashPattern),
|
||||
}),
|
||||
headers: z.object({
|
||||
range: z.string().optional(),
|
||||
}),
|
||||
}),
|
||||
|
||||
headProjectBlob: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
hash: z.string().regex(hexHashPattern),
|
||||
}),
|
||||
}),
|
||||
|
||||
createProjectBlob: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
hash: z.string().regex(hexHashPattern),
|
||||
}),
|
||||
body: z.any().optional(),
|
||||
}),
|
||||
|
||||
copyProjectBlob: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
hash: z.string().regex(hexHashPattern),
|
||||
}),
|
||||
query: z.object({
|
||||
copyFrom: z.string(),
|
||||
}),
|
||||
body: z.any().optional(),
|
||||
}),
|
||||
|
||||
getLatestContent: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
}),
|
||||
|
||||
getLatestHashedContent: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
}),
|
||||
|
||||
getLatestHistory: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
}),
|
||||
|
||||
getLatestHistoryRaw: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
query: z.object({
|
||||
readOnly: z.boolean().optional(),
|
||||
}),
|
||||
}),
|
||||
|
||||
getLatestPersistedHistory: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
}),
|
||||
|
||||
getHistory: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
version: z.coerce.number(),
|
||||
}),
|
||||
}),
|
||||
|
||||
getContentAtVersion: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
version: z.coerce.number(),
|
||||
}),
|
||||
}),
|
||||
|
||||
getHistoryBefore: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
timestamp: z.iso.datetime(),
|
||||
}),
|
||||
}),
|
||||
|
||||
getZip: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
version: z.coerce.number(),
|
||||
}),
|
||||
}),
|
||||
|
||||
createZip: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
version: z.coerce.number(),
|
||||
}),
|
||||
body: z.any().optional(),
|
||||
}),
|
||||
|
||||
getChanges: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
query: z.object({
|
||||
since: z.coerce.number().optional(),
|
||||
}),
|
||||
}),
|
||||
|
||||
importSnapshot: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
body: snapshotSchema,
|
||||
}),
|
||||
|
||||
importChanges: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
query: z.object({
|
||||
end_version: z.coerce.number(),
|
||||
return_snapshot: z.enum(['hashed', 'none']).optional(),
|
||||
}),
|
||||
body: z.array(changeSchema),
|
||||
}),
|
||||
|
||||
flushChanges: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
body: z.any().optional(),
|
||||
}),
|
||||
|
||||
expireProject: z.object({
|
||||
params: z.object({
|
||||
project_id: z.string(),
|
||||
}),
|
||||
body: z.any().optional(),
|
||||
}),
|
||||
}
|
||||
|
||||
module.exports = schemas
|
||||
@@ -17,6 +17,7 @@
|
||||
"@overleaf/redis-wrapper": "*",
|
||||
"@overleaf/settings": "*",
|
||||
"@overleaf/stream-utils": "^0.1.0",
|
||||
"@overleaf/validation-tools": "*",
|
||||
"archiver": "^5.3.0",
|
||||
"basic-auth": "^2.0.1",
|
||||
"bluebird": "^3.7.2",
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/mongo-utils/**
|
||||
libraries/o-error/**
|
||||
libraries/promise-utils/**
|
||||
libraries/settings/**
|
||||
libraries/validation-tools/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/notifications/**
|
||||
tools/migrations/**
|
||||
161
services/notifications/Jenkinsfile
vendored
161
services/notifications/Jenkinsfile
vendored
@@ -1,161 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/notifications') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/notifications/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/notifications') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/notifications') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/notifications') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'notifications-eslint', name: 'notifications eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/notifications/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/notifications') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/notifications') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/notifications') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
|
||||
}
|
||||
steps {
|
||||
dir('services/notifications') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/notifications') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'notifications test results', testResults: 'services/notifications/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/notifications') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/mongo-utils/**
|
||||
libraries/o-error/**
|
||||
libraries/overleaf-editor-core/**
|
||||
libraries/promise-utils/**
|
||||
libraries/redis-wrapper/**
|
||||
libraries/settings/**
|
||||
libraries/stream-utils/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/project-history/**
|
||||
tools/migrations/**
|
||||
161
services/project-history/Jenkinsfile
vendored
161
services/project-history/Jenkinsfile
vendored
@@ -1,161 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/project-history') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/project-history/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/project-history') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/project-history') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/project-history') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'project-history-eslint', name: 'project-history eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/project-history/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/project-history') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/project-history') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/project-history') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
|
||||
}
|
||||
steps {
|
||||
dir('services/project-history') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/project-history') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'project-history test results', testResults: 'services/project-history/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/project-history') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
# Autogenerated by build scripts. Do not edit.
|
||||
.eslint*
|
||||
.prettier*
|
||||
libraries/fetch-utils/**
|
||||
libraries/logger/**
|
||||
libraries/metrics/**
|
||||
libraries/o-error/**
|
||||
libraries/redis-wrapper/**
|
||||
libraries/settings/**
|
||||
libraries/validation-tools/**
|
||||
package-lock.json
|
||||
package.json
|
||||
patches/**
|
||||
services/real-time/**
|
||||
161
services/real-time/Jenkinsfile
vendored
161
services/real-time/Jenkinsfile
vendored
@@ -1,161 +0,0 @@
|
||||
// Autogenerated by build scripts. Do not edit.
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'jenkins-agent-web'
|
||||
customWorkspace '/workspace'
|
||||
}
|
||||
}
|
||||
options {
|
||||
timestamps()
|
||||
timeout(time: 15, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
|
||||
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
|
||||
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
|
||||
}
|
||||
stages {
|
||||
stage('Set Build Variables') {
|
||||
steps {
|
||||
script {
|
||||
def relevantCommitHash
|
||||
if (env.CHANGE_BRANCH) {
|
||||
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
|
||||
if (commitExistsOnRemote) {
|
||||
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
} else {
|
||||
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
|
||||
if (parentCommits.size() >= 2) {
|
||||
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
|
||||
relevantCommitHash = parentCommits[0]
|
||||
echo "Using first parent (branch commit): ${relevantCommitHash}"
|
||||
} else {
|
||||
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
}
|
||||
} else {
|
||||
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
|
||||
relevantCommitHash = "${GIT_COMMIT}"
|
||||
}
|
||||
env.COMMIT_SHA = relevantCommitHash
|
||||
env.SHORT_SHA = relevantCommitHash.take(7)
|
||||
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 1') {
|
||||
parallel {
|
||||
stage('Build') {
|
||||
steps {
|
||||
dir('services/real-time') {
|
||||
retry(count: 3) {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Create reports folder') {
|
||||
steps {
|
||||
sh 'mkdir services/real-time/reports'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stage 2') {
|
||||
parallel {
|
||||
stage('Push Branch Image') {
|
||||
steps {
|
||||
dir('services/real-time') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Shellcheck') {
|
||||
steps {
|
||||
dir('services/real-time') {
|
||||
sh 'make shellcheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Lint') {
|
||||
steps {
|
||||
dir('services/real-time') {
|
||||
sh 'make lint_ci'
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'real-time-eslint', name: 'real-time eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/real-time/reports/eslint.json')]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Format') {
|
||||
steps {
|
||||
dir('services/real-time') {
|
||||
sh 'make format_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
dir('services/real-time') {
|
||||
sh 'make typecheck_ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Unit') {
|
||||
steps {
|
||||
dir('services/real-time') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_unit'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Test Acceptance') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
|
||||
}
|
||||
steps {
|
||||
dir('services/real-time') {
|
||||
retry(count: 3) {
|
||||
sh 'make test_acceptance'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Production') {
|
||||
steps {
|
||||
dir('services/real-time') {
|
||||
sh 'make push'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit checksName: 'real-time test results', testResults: 'services/real-time/reports/junit-*.xml'
|
||||
}
|
||||
failure {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'main') {
|
||||
node('built-in') {
|
||||
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup {
|
||||
dir('services/real-time') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins -j10'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -71,7 +71,7 @@ module.exports = {
|
||||
files: ['**/test/**/*.*'],
|
||||
excludedFiles: [
|
||||
'**/test/unit/src/**/*.test.mjs',
|
||||
'test/unit/vitest_bootstrap.mjs',
|
||||
'test/unit/bootstrap.mjs',
|
||||
], // exclude vitest files
|
||||
plugins: ['mocha', 'chai-expect', 'chai-friendly'],
|
||||
env: {
|
||||
@@ -105,10 +105,7 @@ module.exports = {
|
||||
},
|
||||
},
|
||||
{
|
||||
files: [
|
||||
'**/test/unit/src/**/*.test.mjs',
|
||||
'test/unit/vitest_bootstrap.mjs',
|
||||
],
|
||||
files: ['**/test/unit/src/**/*.test.mjs', 'test/unit/bootstrap.mjs'],
|
||||
env: {
|
||||
jest: true, // best match for vitest API etc.
|
||||
},
|
||||
@@ -137,8 +134,8 @@ module.exports = {
|
||||
'app.mjs',
|
||||
'scripts/**/*.mjs',
|
||||
'migrations/**/*.mjs',
|
||||
'test/acceptance/src/**/*.mjs',
|
||||
'test/unit/src/**/*.mjs',
|
||||
'**/test/acceptance/src/**/*.mjs',
|
||||
'**/test/unit/src/**/*.mjs',
|
||||
],
|
||||
excludedFiles: [
|
||||
// migration template file
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user