Merge pull request #30018 from overleaf/msm-fix-esm-import

[web] Fix ESM import

GitOrigin-RevId: 887f4927248241cb3d237e0bdad36b05928657ef
This commit is contained in:
Miguel Serrano
2025-12-02 10:07:50 +01:00
committed by Copybot
parent fa1aa0116a
commit a3ec5b2797
58 changed files with 0 additions and 4672 deletions

View File

@@ -1,7 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/access-token-encryptor/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/access-token-encryptor/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/access-token-encryptor monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/access-token-encryptor/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'access-token-encryptor-eslint', name: 'access-token-encryptor eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/access-token-encryptor/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/access-token-encryptor monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/access-token-encryptor monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/access-token-encryptor monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'access-token-encryptor test results', testResults: 'libraries/access-token-encryptor/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/access-token-encryptor/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,8 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/o-error/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/fetch-utils/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/fetch-utils monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/fetch-utils/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'fetch-utils-eslint', name: 'fetch-utils eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/fetch-utils/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/fetch-utils monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/fetch-utils monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/fetch-utils monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'fetch-utils test results', testResults: 'libraries/fetch-utils/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/fetch-utils/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,9 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/o-error/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/logger/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/logger monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/logger/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'logger-eslint', name: 'logger eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/logger/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/logger monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/logger monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/logger monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'logger test results', testResults: 'libraries/logger/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/logger/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,7 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/metrics/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/metrics/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/metrics monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/metrics/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'metrics-eslint', name: 'metrics eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/metrics/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/metrics monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/metrics monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/metrics monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'metrics test results', testResults: 'libraries/metrics/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/metrics/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,7 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/mongo-utils/**
package-lock.json
package.json
patches/**

View File

@@ -1,108 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/mongo-utils/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/mongo-utils monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/mongo-utils/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'mongo-utils-eslint', name: 'mongo-utils eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/mongo-utils/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/mongo-utils monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/mongo-utils monorepo npm run types:check'
}
}
}
}
}
post {
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/mongo-utils/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,7 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/o-error/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/o-error/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/o-error monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/o-error/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'o-error-eslint', name: 'o-error eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/o-error/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/o-error monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/o-error monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/o-error monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'o-error test results', testResults: 'libraries/o-error/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/o-error/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,12 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/o-error/**
libraries/object-persistor/**
libraries/stream-utils/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/object-persistor/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/object-persistor monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/object-persistor/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'object-persistor-eslint', name: 'object-persistor eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/object-persistor/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/object-persistor monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/object-persistor monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/object-persistor monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'object-persistor test results', testResults: 'libraries/object-persistor/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/object-persistor/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,8 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/o-error/**
libraries/overleaf-editor-core/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/overleaf-editor-core/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/overleaf-editor-core monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/overleaf-editor-core/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'overleaf-editor-core-eslint', name: 'overleaf-editor-core eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/overleaf-editor-core/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/overleaf-editor-core monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/overleaf-editor-core monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/overleaf-editor-core monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'overleaf-editor-core test results', testResults: 'libraries/overleaf-editor-core/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/overleaf-editor-core/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,7 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/promise-utils/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/promise-utils/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/promise-utils monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/promise-utils/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'promise-utils-eslint', name: 'promise-utils eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/promise-utils/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/promise-utils monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/promise-utils monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/promise-utils monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'promise-utils test results', testResults: 'libraries/promise-utils/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/promise-utils/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,7 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/ranges-tracker/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/ranges-tracker/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/ranges-tracker monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/ranges-tracker/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'ranges-tracker-eslint', name: 'ranges-tracker eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/ranges-tracker/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/ranges-tracker monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/ranges-tracker monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/ranges-tracker monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'ranges-tracker test results', testResults: 'libraries/ranges-tracker/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/ranges-tracker/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,10 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/o-error/**
libraries/redis-wrapper/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/redis-wrapper/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/redis-wrapper monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/redis-wrapper/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'redis-wrapper-eslint', name: 'redis-wrapper eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/redis-wrapper/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/redis-wrapper monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/redis-wrapper monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/redis-wrapper monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'redis-wrapper test results', testResults: 'libraries/redis-wrapper/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/redis-wrapper/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,7 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/settings/**
package-lock.json
package.json
patches/**

View File

@@ -1,108 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/settings/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/settings monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/settings/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'settings-eslint', name: 'settings eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/settings/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/settings monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/settings monorepo npm run types:check'
}
}
}
}
}
post {
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/settings/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,7 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/stream-utils/**
package-lock.json
package.json
patches/**

View File

@@ -1,118 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/stream-utils/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/stream-utils monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/stream-utils/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'stream-utils-eslint', name: 'stream-utils eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/stream-utils/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/stream-utils monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/stream-utils monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/stream-utils monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'stream-utils test results', testResults: 'libraries/stream-utils/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf libraries/stream-utils/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,8 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/o-error/**
libraries/validation-tools/**
package-lock.json
package.json
patches/**

View File

@@ -1,109 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir libraries/validation-tools/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/libraries/validation-tools monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' libraries/validation-tools/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'validation-tools-eslint', name: 'validation-tools eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'libraries/validation-tools/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/libraries/validation-tools monorepo npm run format'
}
}
stage('Typecheck') {
steps {
sh 'bin/run -w /overleaf/libraries/validation-tools monorepo npm run types:check'
}
}
stage('Test') {
steps {
retry(count: 3) {
sh 'bin/run -w /overleaf/libraries/validation-tools monorepo npm run test:ci'
}
}
}
}
}
}
post {
always {
junit checksName: 'validation-tools test results', testResults: 'libraries/validation-tools/reports/junit-*.xml'
}
cleanup {
sh 'rm -rf libraries/validation-tools/reports'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,34 +0,0 @@
copybara/**
libraries/**
patches/**
server-ce/**
server-pro/**
# echo chat clsi contacts docstore document-updater filestore history-v1 notifications project-history real-time references templates web | xargs -n1 echo | xargs -I% echo 'services/%/**'
# BEGIN GENERATED
services/chat/**
services/clsi/**
services/contacts/**
services/docstore/**
services/document-updater/**
services/filestore/**
services/history-v1/**
services/notifications/**
services/project-history/**
services/real-time/**
services/references/**
services/templates/**
services/web/**
# END GENERATED
tools/migrations/**
.dockerignore
.eslint*
.pretter*
package.json
package-lock.json
tsconfig.backend.json

View File

@@ -1,385 +0,0 @@
// Initialize variables to signal that a given stage finished.
// We use them to build a graph of interconnected steps/dependencies.
// - Incoming edges use "waitUntil" and reference the given variables of dependencies.
// - Outgoing edges set the given variable to true.
def job_copybara_done = false
def job_npm_install_done = false
def job_prefetch_custom_done = false
def job_prefetch_default_done = false
def job_server_ce_build_done = false
def job_server_pro_build_done = false
pipeline {
agent {
node {
// Select a VM with the given tabel.
label 'jenkins-agent-web'
// Use the monorepo checkout in /workspace.
customWorkspace '/workspace'
}
}
options {
// Print timestamp next to each log line.
timestamps()
// Abort build after hitting first failure.
parallelsAlwaysFailFast()
timeout(time: 20, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${GIT_BRANCH.replace('origin/', '')}"
COMMIT_SHA = "${GIT_COMMIT}"
SHORT_SHA = "${GIT_COMMIT.take(7)}"
OVERLEAF_BASE_BRANCH = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base-internal:${BRANCH_NAME}"
OVERLEAF_BASE_LATEST = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base-internal:main"
OVERLEAF_BASE_TAG = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base-internal:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}"
OVERLEAF_BRANCH = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-internal:${BRANCH_NAME}"
OVERLEAF_LATEST = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-internal:main"
OVERLEAF_TAG = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-internal:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}"
IMAGE_TAG_CE = "${OVERLEAF_TAG}"
IMAGE_TAG_PRO = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}"
OVERLEAF_PRO_TAG_BRANCH = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro-internal:${BRANCH_NAME}"
OVERLEAF_PRO_TAG_LATEST = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro-internal:main"
}
stages {
stage('Create reports folder') {
steps {
sh 'mkdir server-ce/test/reports'
}
}
stage('Parallel') {
parallel {
stage('Install deps') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
script {
job_npm_install_done = true
}
}
}
stage('shellcheck') {
steps {
dir('server-ce') {
sh 'make shellcheck'
}
}
}
stage('Format') {
steps {
script {
waitUntil {
return job_npm_install_done
}
}
sh 'bin/run -w /overleaf/server-ce/test monorepo npm run format'
}
}
stage('Lint') {
steps {
script {
waitUntil {
return job_npm_install_done
}
}
sh 'bin/run -w /overleaf/server-ce/test monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' server-ce/test/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'server-pro-e2e-tests-eslint', name: 'Server-Pro-E2E-Tests eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'server-ce/test/reports/eslint.json')]
}
}
}
stage('Copybara') {
steps {
sh 'copybara/bin/sync'
script {
job_copybara_done = true
}
}
}
stage('Build CE image') {
steps {
script {
waitUntil {
return job_copybara_done
}
}
dir('copybara/public/repo/server-ce') {
sh 'make refresh-cache -j2'
retry(count: 3) {
sh 'make build-base'
}
retry(count: 3) {
sh 'make build-community'
}
}
script {
job_server_ce_build_done = true
}
}
}
stage('Push CE to internal') {
steps {
script {
waitUntil {
return job_server_ce_build_done
}
}
dir('copybara/public/repo/server-ce') {
sh 'make push'
}
}
}
stage('Build Pro image') {
environment {
OVERLEAF_CE_TAG = "${OVERLEAF_TAG}"
OVERLEAF_PRO_TAG = "${IMAGE_TAG_PRO}"
}
steps {
script {
waitUntil {
return job_server_ce_build_done
}
}
dir('server-pro') {
retry(count: 3) {
sh 'make build-ci'
}
}
script {
job_server_pro_build_done = true
}
}
}
stage('Push Pro to internal') {
steps {
script {
waitUntil {
return job_server_pro_build_done
}
}
dir('server-pro') {
sh 'make push_branch'
}
}
}
stage('Prefetch default') {
steps {
dir('server-ce/test') {
sh 'make prefetch_default -j4'
}
script {
job_prefetch_default_done = true
}
}
}
stage('Prefetch custom') {
steps {
dir('server-ce/test') {
sh 'make prefetch_custom -j4'
}
script {
job_prefetch_custom_done = true
}
}
}
stage('CE default') {
environment {
CYPRESS_SHARD = "CE_DEFAULT"
COMPOSE_PROJECT_NAME = "test-ce-default"
}
steps {
script {
waitUntil {
return job_npm_install_done && job_server_ce_build_done && job_prefetch_default_done
}
}
dir('server-ce/test') {
retry(count: 3) {
sh 'make test-e2e'
}
}
}
}
stage('CE custom 1') {
environment {
CYPRESS_SHARD = "CE_CUSTOM_1"
COMPOSE_PROJECT_NAME = "test-ce-custom-1"
}
steps {
script {
waitUntil {
return job_npm_install_done && job_server_ce_build_done && job_prefetch_default_done && job_prefetch_custom_done
}
}
dir('server-ce/test') {
retry(count: 3) {
sh 'make test-e2e'
}
}
}
}
stage('PRO default 1') {
environment {
CYPRESS_SHARD = "PRO_DEFAULT_1"
COMPOSE_PROJECT_NAME = "test-pro-default-1"
}
steps {
script {
waitUntil {
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done
}
}
dir('server-ce/test') {
retry(count: 3) {
sh 'make test-e2e'
}
}
}
}
stage('PRO default 2') {
environment {
CYPRESS_SHARD = "PRO_DEFAULT_2"
COMPOSE_PROJECT_NAME = "test-pro-default-2"
}
steps {
script {
waitUntil {
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done
}
}
dir('server-ce/test') {
retry(count: 3) {
sh 'make test-e2e'
}
}
}
}
stage('PRO custom 1') {
environment {
CYPRESS_SHARD = "PRO_CUSTOM_1"
COMPOSE_PROJECT_NAME = "test-pro-custom-1"
}
steps {
script {
waitUntil {
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
}
}
dir('server-ce/test') {
retry(count: 3) {
sh 'make test-e2e'
}
}
}
}
stage('PRO custom 2') {
environment {
CYPRESS_SHARD = "PRO_CUSTOM_2"
COMPOSE_PROJECT_NAME = "test-pro-custom-2"
}
steps {
script {
waitUntil {
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
}
}
dir('server-ce/test') {
retry(count: 3) {
sh 'make test-e2e'
}
}
}
}
stage('PRO custom 3') {
environment {
CYPRESS_SHARD = "PRO_CUSTOM_3"
COMPOSE_PROJECT_NAME = "test-pro-custom-3"
}
steps {
script {
waitUntil {
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
}
}
dir('server-ce/test') {
retry(count: 3) {
sh 'make test-e2e'
}
}
}
}
stage('PRO custom 4') {
environment {
CYPRESS_SHARD = "PRO_CUSTOM_4"
COMPOSE_PROJECT_NAME = "test-pro-custom-4"
}
steps {
script {
waitUntil {
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
}
}
dir('server-ce/test') {
retry(count: 3) {
sh 'make test-e2e'
}
}
}
}
stage('PRO custom 5') {
environment {
CYPRESS_SHARD = "PRO_CUSTOM_5"
COMPOSE_PROJECT_NAME = "test-pro-custom-5"
}
steps {
script {
waitUntil {
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
}
}
dir('server-ce/test') {
retry(count: 3) {
sh 'make test-e2e'
}
}
}
}
}
}
}
post {
// Collect junit test results for both success and failure case.
always {
junit checksName: 'Server-Pro-E2E-Tests results', testResults: 'server-ce/test/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="B2B"'
}
}
}
}
// Ensure tear down of test containers, remove CE docker images, then run general Jenkins VM cleanup.
cleanup {
dir('server-ce/test') {
sh 'make clean -j10'
}
dir('server-ce') {
sh 'make clean'
}
dir('server-pro') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}
// vim: set ft=groovy :

View File

@@ -1,15 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/mongo-utils/**
libraries/o-error/**
libraries/promise-utils/**
libraries/settings/**
package-lock.json
package.json
patches/**
services/chat/**
tools/migrations/**

View File

@@ -1,161 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/chat') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/chat/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/chat') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/chat') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/chat') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'chat-eslint', name: 'chat eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/chat/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/chat') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/chat') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/chat') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
}
steps {
dir('services/chat') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/chat') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'chat test results', testResults: 'services/chat/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
}
}
}
}
cleanup {
dir('services/chat') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,14 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/o-error/**
libraries/promise-utils/**
libraries/settings/**
libraries/stream-utils/**
package-lock.json
package.json
patches/**
services/clsi/**

View File

@@ -1,167 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/clsi') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Pull TL2017') {
steps {
// Remove after new worker VM image is live.
sh 'docker pull us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2017.1'
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/clsi/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/clsi') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/clsi') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/clsi') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'clsi-eslint', name: 'clsi eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/clsi/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/clsi') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/clsi') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/clsi') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
}
steps {
dir('services/clsi') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/clsi') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'clsi test results', testResults: 'services/clsi/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
dir('services/clsi') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,15 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/mongo-utils/**
libraries/o-error/**
libraries/promise-utils/**
libraries/settings/**
package-lock.json
package.json
patches/**
services/contacts/**
tools/migrations/**

View File

@@ -1,161 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/contacts') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/contacts/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/contacts') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/contacts') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/contacts') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'contacts-eslint', name: 'contacts eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/contacts/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/contacts') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/contacts') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/contacts') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
}
steps {
dir('services/contacts') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/contacts') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'contacts test results', testResults: 'services/contacts/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="B2C"'
}
}
}
}
cleanup {
dir('services/contacts') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,17 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/mongo-utils/**
libraries/o-error/**
libraries/object-persistor/**
libraries/promise-utils/**
libraries/settings/**
libraries/stream-utils/**
package-lock.json
package.json
patches/**
services/docstore/**
tools/migrations/**

View File

@@ -1,161 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/docstore') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/docstore/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/docstore') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/docstore') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/docstore') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'docstore-eslint', name: 'docstore eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/docstore/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/docstore') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/docstore') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/docstore') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
}
steps {
dir('services/docstore') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/docstore') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'docstore test results', testResults: 'services/docstore/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
dir('services/docstore') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,18 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/mongo-utils/**
libraries/o-error/**
libraries/overleaf-editor-core/**
libraries/promise-utils/**
libraries/ranges-tracker/**
libraries/redis-wrapper/**
libraries/settings/**
package-lock.json
package.json
patches/**
services/document-updater/**
tools/migrations/**

View File

@@ -1,161 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/document-updater') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/document-updater/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/document-updater') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/document-updater') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/document-updater') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'document-updater-eslint', name: 'document-updater eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/document-updater/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/document-updater') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/document-updater') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/document-updater') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
}
steps {
dir('services/document-updater') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/document-updater') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'document-updater test results', testResults: 'services/document-updater/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
}
}
}
}
cleanup {
dir('services/document-updater') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,14 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/o-error/**
libraries/object-persistor/**
libraries/settings/**
libraries/stream-utils/**
package-lock.json
package.json
patches/**
services/filestore/**

View File

@@ -1,188 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/filestore') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/filestore/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/filestore') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/filestore') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/filestore') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'filestore-eslint', name: 'filestore eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/filestore/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/filestore') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/filestore') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/filestore') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance SHARD_01_') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance_shard_01_x"
MOCHA_GREP = "SHARD_01_"
}
steps {
dir('services/filestore') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
stage('Test Acceptance SHARD_02_') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance_shard_02_x"
MOCHA_GREP = "SHARD_02_"
}
steps {
dir('services/filestore') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
stage('Test Acceptance SHARD_03_') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance_shard_03_x"
MOCHA_GREP = "SHARD_03_"
}
steps {
dir('services/filestore') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/filestore') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'filestore test results', testResults: 'services/filestore/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
dir('services/filestore') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1 +0,0 @@
services/git-bridge/**

View File

@@ -1,111 +0,0 @@
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/git-bridge') {
sh 'make refresh_cache -j2'
retry(count: 3) {
sh 'make docker_build_base'
}
}
}
}
}
}
stage('Stage 2') {
parallel {
stage('Build production and push') {
steps {
dir('services/git-bridge') {
retry(count: 3) {
sh 'make docker_build'
}
sh 'make push_branch'
}
}
}
stage('Format Java') {
steps {
dir('services/git-bridge') {
sh 'make docker_format'
}
}
}
stage('Test') {
steps {
dir('services/git-bridge') {
retry(count: 3) {
sh 'make docker_test'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/git-bridge') {
sh 'make push'
}
}
}
}
post {
// Collect junit test results for both success and failure case.
always {
junit checksName: 'git-bridge test results', testResults: 'services/git-bridge/target/surefire-reports/*.xml'
}
cleanup {
dir('services/git-bridge') {
sh 'make clean_ci'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,19 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/mongo-utils/**
libraries/o-error/**
libraries/object-persistor/**
libraries/overleaf-editor-core/**
libraries/promise-utils/**
libraries/redis-wrapper/**
libraries/settings/**
libraries/stream-utils/**
package-lock.json
package.json
patches/**
services/history-v1/**
tools/migrations/**

View File

@@ -1,161 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/history-v1') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/history-v1/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/history-v1') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/history-v1') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/history-v1') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'history-v1-eslint', name: 'history-v1 eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/history-v1/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/history-v1') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/history-v1') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/history-v1') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
}
steps {
dir('services/history-v1') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/history-v1') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'history-v1 test results', testResults: 'services/history-v1/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
}
}
}
}
cleanup {
dir('services/history-v1') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,16 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/mongo-utils/**
libraries/o-error/**
libraries/promise-utils/**
libraries/settings/**
libraries/validation-tools/**
package-lock.json
package.json
patches/**
services/notifications/**
tools/migrations/**

View File

@@ -1,161 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/notifications') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/notifications/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/notifications') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/notifications') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/notifications') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'notifications-eslint', name: 'notifications eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/notifications/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/notifications') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/notifications') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/notifications') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
}
steps {
dir('services/notifications') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/notifications') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'notifications test results', testResults: 'services/notifications/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
dir('services/notifications') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,18 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/mongo-utils/**
libraries/o-error/**
libraries/overleaf-editor-core/**
libraries/promise-utils/**
libraries/redis-wrapper/**
libraries/settings/**
libraries/stream-utils/**
package-lock.json
package.json
patches/**
services/project-history/**
tools/migrations/**

View File

@@ -1,161 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/project-history') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/project-history/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/project-history') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/project-history') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/project-history') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'project-history-eslint', name: 'project-history eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/project-history/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/project-history') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/project-history') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/project-history') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
}
steps {
dir('services/project-history') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/project-history') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'project-history test results', testResults: 'services/project-history/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🥑 Core"'
}
}
}
}
cleanup {
dir('services/project-history') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,14 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/o-error/**
libraries/redis-wrapper/**
libraries/settings/**
libraries/validation-tools/**
package-lock.json
package.json
patches/**
services/real-time/**

View File

@@ -1,161 +0,0 @@
// Autogenerated by build scripts. Do not edit.
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/real-time') {
retry(count: 3) {
sh 'make build'
}
}
}
}
stage('Create reports folder') {
steps {
sh 'mkdir services/real-time/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Branch Image') {
steps {
dir('services/real-time') {
sh 'make push_branch'
}
}
}
stage('Shellcheck') {
steps {
dir('services/real-time') {
sh 'make shellcheck'
}
}
}
stage('Lint') {
steps {
dir('services/real-time') {
sh 'make lint_ci'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'real-time-eslint', name: 'real-time eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/real-time/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
dir('services/real-time') {
sh 'make format_ci'
}
}
}
stage('Typecheck') {
steps {
dir('services/real-time') {
sh 'make typecheck_ci'
}
}
}
stage('Test Unit') {
steps {
dir('services/real-time') {
retry(count: 3) {
sh 'make test_unit'
}
}
}
}
stage('Test Acceptance') {
environment {
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE = "test_acceptance"
}
steps {
dir('services/real-time') {
retry(count: 3) {
sh 'make test_acceptance'
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/real-time') {
sh 'make push'
}
}
}
}
post {
always {
junit checksName: 'real-time test results', testResults: 'services/real-time/reports/junit-*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
dir('services/real-time') {
sh 'make clean'
}
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,23 +0,0 @@
# Autogenerated by build scripts. Do not edit.
.eslint*
.prettier*
libraries/access-token-encryptor/**
libraries/eslint-plugin/**
libraries/fetch-utils/**
libraries/logger/**
libraries/metrics/**
libraries/mongo-utils/**
libraries/o-error/**
libraries/object-persistor/**
libraries/overleaf-editor-core/**
libraries/promise-utils/**
libraries/ranges-tracker/**
libraries/redis-wrapper/**
libraries/settings/**
libraries/stream-utils/**
libraries/validation-tools/**
package-lock.json
package.json
patches/**
services/web/**
tools/migrations/**

View File

@@ -1,50 +0,0 @@
services/web/Makefile
services/web/.storybook/**
services/web/config/settings.webpack.js
services/web/frontend/**
services/web/locales/en.json
services/web/public/**
# echo services/web/modules/*/frontend | xargs -n1 | grep -v writefull | xargs -I% echo '%/**' | sort
# BEGIN GENERATED
services/web/modules/admin-panel/frontend/**
services/web/modules/ai/frontend/**
services/web/modules/algolia-search/frontend/**
services/web/modules/cms/frontend/**
services/web/modules/dropbox/frontend/**
services/web/modules/full-project-search/frontend/**
services/web/modules/git-bridge/frontend/**
services/web/modules/github-sync/frontend/**
services/web/modules/group-audit-log/frontend/**
services/web/modules/group-settings/frontend/**
services/web/modules/institutions/frontend/**
services/web/modules/labs/frontend/**
services/web/modules/launchpad/frontend/**
services/web/modules/learn/frontend/**
services/web/modules/metrics/frontend/**
services/web/modules/oauth2-server/frontend/**
services/web/modules/offline-mode/frontend/**
services/web/modules/onboarding/frontend/**
services/web/modules/open-in-overleaf/frontend/**
services/web/modules/portals/frontend/**
services/web/modules/publish-modal/frontend/**
services/web/modules/references-search/frontend/**
services/web/modules/split-test/frontend/**
services/web/modules/subscriptions/frontend/**
services/web/modules/support/frontend/**
services/web/modules/survey/frontend/**
services/web/modules/symbol-palette/frontend/**
services/web/modules/templates/frontend/**
services/web/modules/tpr-webmodule/frontend/**
services/web/modules/two-factor-authentication/frontend/**
services/web/modules/user-activate/frontend/**
services/web/modules/v1-projects/frontend/**
services/web/modules/v2-templates/frontend/**
# END GENERATED
# Do not include the Writefull integration.
services/web/modules/writefull/frontend/components/**
services/web/modules/writefull/frontend/shared/**
package.json
package-lock.json

View File

@@ -1,65 +0,0 @@
pipeline {
agent {
node {
// Select a VM with the given tabel.
label 'jenkins-agent-web'
// Use the monorepo checkout in /workspace.
customWorkspace '/workspace'
}
}
options {
// Print timestamp next to each log line.
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
BUCKET = "gs://overleaf-dev-storybook"
}
stages {
stage('Install') {
steps {
retry(count: 3) {
sh 'make monorepo_setup'
}
}
}
stage('Build') {
steps {
sh 'bin/run -e BRANCH_NAME -w /overleaf/services/web monorepo make build_storybook'
}
}
stage('Copy stories') {
steps {
sh 'gsutil -m copy -r services/web/data/storybook/* "${BUCKET}/"'
}
}
stage('Generate index') {
steps {
dir('services/web') {
sh 'make storybook_index'
}
}
}
stage('Copy index') {
steps {
sh 'gsutil copy services/web/data/storybook/index.html "${BUCKET}/index.html"'
}
}
}
post {
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
cleanup {
sh 'rm -rf services/web/data/storybook/'
sh 'make clean_jenkins -j10'
}
}
}

View File

@@ -1,396 +0,0 @@
pipeline {
agent {
node {
// Select a VM with the given tabel.
label 'jenkins-agent-web'
// Use the monorepo checkout in /workspace.
customWorkspace '/workspace'
}
}
options {
// Print timestamp next to each log line.
timestamps()
timeout(time: 15, unit: 'MINUTES')
}
environment {
IMAGE_NAME = 'web'
PROJECT_NAME = 'web'
IMAGE_REPO = 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/web'
AR_REPO_LOCATION = 'us-east1'
AR_URL = 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker'
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}" // preserve original BUILD_NUMBER
CDN_STAG = "gs://ol-stag-web-assets-1"
CDN_PROD = "gs://mgcp-1117973-ol-prod-web-assets-1"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
def commitExistsOnRemote = sh(script: "git branch --remotes --contains ${GIT_COMMIT}", returnStdout: true).trim()
if (commitExistsOnRemote) {
echo "PR build detected, but commit exists on remote. Using ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
} else {
def parentCommits = sh(script: 'git rev-parse HEAD^@', returnStdout: true).trim().split('\n')
if (parentCommits.size() >= 2) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT} (parents: ${parentCommits.join(', ')})"
relevantCommitHash = parentCommits[0]
echo "Using first parent (branch commit): ${relevantCommitHash}"
} else {
echo "WARN: PR build detected, but ${GIT_COMMIT} is neither a merge commit, nor does it exist on the remote."
relevantCommitHash = "${GIT_COMMIT}"
}
}
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Prefetch Tests Images') {
steps {
dir('services/web') {
sh 'docker compose -f docker-compose.ci.yml pull --quiet test_frontend_ct mongo redis_test ldap saml'
}
}
}
stage('Build') {
steps {
dir('services/web') {
sh 'bin/copy_external_pages'
retry(count: 3) {
sh 'make build_deps'
}
retry(count: 3) {
sh 'make build_dev'
}
sh 'make build_test_frontend_ct'
}
}
}
}
}
stage('Stage 2') {
parallel {
stage('Push Deps') {
steps {
dir('services/web') {
sh 'make push_branch'
}
}
}
stage('Format') {
steps {
dir('services/web') {
sh 'make format_in_docker'
}
}
}
stage('Lint') {
steps {
dir('services/web') {
sh 'make lint_in_docker'
}
}
post {
always {
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'web-eslint', name: 'Web eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'services/web/data/reports/eslint.json')]
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'web-stylelint', name: 'Web stylelint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [styleLint(pattern: 'services/web/data/reports/stylelint.json')]
}
}
}
stage('Shellcheck') {
steps {
dir('services/web') {
sh 'make shellcheck'
}
}
}
stage('Acceptance SaaS') {
steps {
dir('services/web') {
retry(count: 3) {
sh 'make test_acceptance_app_saas'
}
}
}
}
stage('Acceptance Server CE') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_acceptance_app_server_ce"
}
}
}
}
stage('Acceptance Server Pro') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_acceptance_app_server_pro"
}
}
}
}
stage('test_acceptance_modules_merged_saas_1') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_acceptance_modules_merged_saas_1"
}
}
}
}
stage('test_acceptance_modules_merged_saas_2') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_acceptance_modules_merged_saas_2"
}
}
}
}
stage('test_acceptance_modules_merged_saas_3') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_acceptance_modules_merged_saas_3"
}
}
}
}
stage('test_acceptance_modules_merged_saas_4') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_acceptance_modules_merged_saas_4"
}
}
}
}
stage('test_acceptance_modules_merged_server_ce') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_acceptance_modules_merged_server_ce"
}
}
}
}
stage('test_acceptance_modules_merged_server_pro') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_acceptance_modules_merged_server_pro"
}
}
}
}
stage('test_frontend') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_frontend"
}
}
}
}
stage('test_writefull') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_writefull"
}
}
}
}
stage('test_frontend_ct_core_other') {
environment {
CYPRESS_INTERNAL_BROWSER_CONNECT_TIMEOUT = '120000'
}
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_frontend_ct_core_other"
}
}
}
}
stage('test_frontend_ct_core_features') {
environment {
CYPRESS_INTERNAL_BROWSER_CONNECT_TIMEOUT = '120000'
}
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_frontend_ct_core_features"
}
}
}
}
stage('test_frontend_ct_modules') {
environment {
CYPRESS_INTERNAL_BROWSER_CONNECT_TIMEOUT = '120000'
}
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_frontend_ct_modules"
}
}
}
}
stage('test_frontend_ct_editor_visual') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_frontend_ct_editor_visual"
}
}
}
}
stage('test_frontend_ct_editor_other') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_frontend_ct_editor_other"
}
}
}
}
stage('Test Unit ESM - Parallel') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_unit_esm_parallel"
}
}
}
}
stage('Test Unit ESM - Sequential') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_unit_esm_sequential"
}
}
}
}
stage('Test Unit Mocha') {
steps {
dir('services/web') {
retry(count: 3) {
sh "make test_unit_mocha"
}
}
}
}
stage('Build webpack + production + cdn upload + sentry upload') {
stages {
stage('Wait a bit to give tests all the CPU capacity') {
steps {
sh 'sleep 60'
}
}
stage('Build Webpack') {
steps {
dir('services/web') {
sh 'make build_webpack'
}
}
}
stage('Build Pug') {
steps {
dir('services/web') {
sh 'make build_pug'
}
}
}
stage('CDN Upload Image') {
steps {
dir('services/web') {
sh 'make tar'
retry(count: 3) {
sh 'bin/cdn_upload'
}
}
}
}
stage('Build Production') {
steps {
dir('services/web') {
sh 'make build'
}
}
}
stage('Sentry Upload') {
steps {
dir('services/web') {
sh 'gcloud secrets versions access latest --secret=web-sentryclirc > .sentryclirc'
retry(count: 3) {
sh 'make sentry_upload'
}
}
}
post {
cleanup {
dir('services/web') {
sh 'rm -f .sentryclirc'
}
}
}
}
stage('Push Production image early') {
steps {
dir('services/web') {
sh 'make push_scratch'
}
}
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/web') {
sh 'make publish'
}
}
}
}
post {
always {
junit checksName: 'Web test results', testResults: 'services/web/data/reports/junit-*.xml,services/web/data/reports/junit-*/**/*.xml'
}
failure {
script {
if (env.BRANCH_NAME == 'main') {
node('built-in') {
sh '/usr/local/bin/open-gh-failure-issue --project="🚉 Platform"'
}
}
}
}
// Ensure tear down of test containers, then run general Jenkins VM cleanup.
cleanup {
dir('services/web') {
sh 'make clean -j10'
}
sh 'make clean_jenkins -j10'
}
}
}
// vim: set ft=groovy :

View File

@@ -1,6 +0,0 @@
tools/migrations/**
package.json
package-lock.json
Makefile

View File

@@ -1,64 +0,0 @@
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
parallelsAlwaysFailFast()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
BUILD_NUMBER = "${SHORT_SHA}_${BUILD_NUMBER}"
COMMIT_SHA = "${GIT_COMMIT}"
SHORT_SHA = "${GIT_COMMIT.take(7)}"
}
stages {
stage('Stage 1') {
parallel {
stage('Install monorepo') {
steps {
sh 'make monorepo_setup'
}
}
stage('Create reports folder') {
steps {
sh 'mkdir tools/migrations/reports'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Lint') {
steps {
sh 'bin/run -w /overleaf/tools/migrations monorepo npm run lint -- --format json --output-file reports/eslint.json'
}
post {
always {
sh """
sed -i 's_"filePath":"/overleaf_"filePath":"/workspace_g' tools/migrations/reports/eslint.json
"""
recordIssues checksAnnotationScope: 'ALL', enabledForFailure: true, failOnError: true, id: 'migrations-eslint', name: 'migrations eslint', qualityGates: [[integerThreshold: 1, threshold: 1.0, type: 'TOTAL']], sourceCodeRetention: 'LAST_BUILD', tools: [esLint(pattern: 'tools/migrations/reports/eslint.json')]
}
}
}
stage('Format') {
steps {
sh 'bin/run -w /overleaf/tools/migrations monorepo npm run format'
}
}
}
}
}
post {
cleanup {
sh 'rm -rf tools/migrations/reports'
sh 'make clean_jenkins'
}
}
}