18 Commits

Author SHA1 Message Date
Borja
07234fd7d2 Enable ai shortcut for new users (#28844)
GitOrigin-RevId: b6215b4ccf97f1924265439edf5b93b1a7118e37
2025-10-07 08:07:18 +00:00
Jakob Ackermann
e03ca5a3a8 [server-ce] tests: migrate host-admin to ESM, zod and npm-workspaces (#28838)
* [server-ce] tests: migrate host-admin to ESM, zod and npm-workspaces

* [server-ce] test: use import.meta.dirname

Co-authored-by: Eric Mc Sween <eric.mcsween@overleaf.com>

* [server-ce] test: fix zod schema for docker compose endpoint

---------

Co-authored-by: Eric Mc Sween <eric.mcsween@overleaf.com>
GitOrigin-RevId: d490948693f341210c0ab5c2947db7c9a17775ef
2025-10-07 08:07:06 +00:00
Jakob Ackermann
c621d0f318 [git-bridge] migrate pipeline to Jenkins (#28763)
* [git-bridge] migrate pipeline to Jenkins

* [terraform] remove cloud build trigger for git-bridge

GitOrigin-RevId: fdf842a561c00a609f2de0cab894a761900322c6
2025-10-07 08:06:48 +00:00
Alf Eaton
7e74841a97 Ensure that invalid search regexp is correctly highlighted (#28724)
GitOrigin-RevId: f122e2ea649d8cbfa984ddad0b7424aa96bd015a
2025-10-07 08:06:40 +00:00
Alf Eaton
93a4498c93 Merge pull request #28733 from overleaf/ae-tooltip-delay
Honour `delay: 0` in tooltip config

GitOrigin-RevId: d69f3cb055b79c3335d4220f2d91a44e93bc47b3
2025-10-07 08:06:36 +00:00
Simon Gardner
1bef1bb1d4 Prevent Group subscription admins from deleting their own group subscription when invited to join
GitOrigin-RevId: a467a690cf4ee8b1a1081496205559a7f183a1f9
2025-10-07 08:06:00 +00:00
Simon Gardner
14356f2675 Improve UX for entering multi-digit quantities in Buy More Licenses feature
GitOrigin-RevId: c51e2146dbb53144e6951a16f7162ba2d10c5c4e
2025-10-07 08:05:55 +00:00
Domagoj Kriskovic
bc6ae7816c [web] When main document is not set open last selected file (#28737)
* [web] if root doc is not found, open last selected file

* call openInitialDoc even without rootdocid

GitOrigin-RevId: fe629482c1827c6205d5cf2de497e593110bd36e
2025-10-07 08:05:47 +00:00
Domagoj Kriskovic
6d84dfddcc Skip checking status code for fetchJson
GitOrigin-RevId: 1470fdd5aabde3448c5ac5ed8e9203fc509b13e8
2025-10-07 08:05:43 +00:00
Domagoj Kriskovic
62ec3f9644 Promisify ReadingASnapshotTests
GitOrigin-RevId: 281b3a84466175a38b5ebbf9282b4c996e04d221
2025-10-07 08:05:39 +00:00
Domagoj Kriskovic
aed47ef01b Promisify LatestSnapshotTests
GitOrigin-RevId: 40b0811e25ac9ee75c295c89cec7ad07570007c4
2025-10-07 08:05:34 +00:00
Domagoj Kriskovic
64d1f160a1 promisify LabelsTests and SummarisedUpdatesTests
GitOrigin-RevId: 32552c8c63ebbd8e73e8c179fcce67b3b955251d
2025-10-07 08:05:30 +00:00
Domagoj Kriskovic
9150fc332c Promisify SummarisedUpdatesTests
GitOrigin-RevId: 65782b0cf6e552425d6fad716f74f8f3f22cf7e9
2025-10-07 08:05:26 +00:00
Domagoj Kriskovic
c1bfd2192d promisify GetChangesInChunkSince
GitOrigin-RevId: 82af36d7cb0301e3bf9527b219c67ceb6fc9b34f
2025-10-07 08:05:21 +00:00
Domagoj Kriskovic
54190e1040 promisidy FlushManagerTests
GitOrigin-RevId: fa26499be5e40d6741ea1d012188a0a70f519b51
2025-10-07 08:05:17 +00:00
Domagoj Kriskovic
1c2aa6251e promisify FileTreeDiffTests
GitOrigin-RevId: f7c9c119f28bdbf98c526d767ee06286c33ecfa2
2025-10-07 08:05:12 +00:00
roo hutton
c8d4edfa85 Merge pull request #28824 from overleaf/rh-promisify-ott-handler
Convert OneTimeTokenHandler to async/await

GitOrigin-RevId: 4fb3187ed2003add695b8c6a5e95c9a380dab9f7
2025-10-06 08:05:36 +00:00
roo hutton
37918244fc Merge pull request #28578 from overleaf/rh-account-settings-links
Improve consistency of account settings link text

GitOrigin-RevId: d30c37fe32c425a571af6d580ebfe37403ebd142
2025-10-06 08:05:24 +00:00
38 changed files with 1542 additions and 5661 deletions

71
package-lock.json generated
View File

@@ -9,6 +9,7 @@
"workspaces": [
"jobs/mirror-documentation",
"libraries/*",
"server-ce/test",
"services/analytics",
"services/chat",
"services/clsi",
@@ -14838,6 +14839,10 @@
"resolved": "tools/saas-e2e",
"link": true
},
"node_modules/@overleaf/server-ce-test": {
"resolved": "server-ce/test",
"link": true
},
"node_modules/@overleaf/settings": {
"resolved": "libraries/settings",
"link": true
@@ -21364,17 +21369,17 @@
}
},
"node_modules/@writefull/core": {
"version": "1.27.24",
"resolved": "https://registry.npmjs.org/@writefull/core/-/core-1.27.24.tgz",
"integrity": "sha512-0f0zc4rb0+44dFBRDuognknrv/z/jfgyU15hX+s334q3H4lOH/H0N6g8HwxcYbCzvzIBQLyrWmsJA8Nr/iAs3g==",
"version": "1.27.26",
"resolved": "https://registry.npmjs.org/@writefull/core/-/core-1.27.26.tgz",
"integrity": "sha512-EI8te62cSuxTLT6tv9tOuk0ddkVIcciD/a/HdTaCsEAF+vpAJHEtD4fkLHeZt+U4P5cJhQPNt6lG/Ei0O2AR9g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@bugsnag/js": "^7.23.0",
"@bugsnag/plugin-react": "^7.24.0",
"@growthbook/growthbook": "^1.4.1",
"@writefull/ui": "^1.27.24",
"@writefull/utils": "^1.27.24",
"@writefull/ui": "^1.27.26",
"@writefull/utils": "^1.27.26",
"axios": "^1.8.3",
"idb": "^8.0.2",
"inversify": "^6.0.2",
@@ -21386,14 +21391,14 @@
}
},
"node_modules/@writefull/ui": {
"version": "1.27.24",
"resolved": "https://registry.npmjs.org/@writefull/ui/-/ui-1.27.24.tgz",
"integrity": "sha512-Rxq5eSJIVGLkEruLPA6kXXfNdo1X7p1A2CKsFf2IIw6Kw7jfckqMu+jC9y/ItccbbzhY5pt7mQXZtLjL+U72wQ==",
"version": "1.27.26",
"resolved": "https://registry.npmjs.org/@writefull/ui/-/ui-1.27.26.tgz",
"integrity": "sha512-I9hcCKz6VE8bpmo3/MDAZPNX01TkBj63FcBpKcPQ/bkvNAwQvjJ1zaB1K65GBPIZS1FvFN6fXEO8+LPj/0Z+Kg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@floating-ui/react": "^0.27.5",
"@writefull/utils": "^1.27.24"
"@writefull/utils": "^1.27.26"
},
"peerDependencies": {
"react": ">= 18",
@@ -21401,9 +21406,9 @@
}
},
"node_modules/@writefull/utils": {
"version": "1.27.24",
"resolved": "https://registry.npmjs.org/@writefull/utils/-/utils-1.27.24.tgz",
"integrity": "sha512-b+d4hhT6Z92w3+m2itJJmm3iCNt1v0SHKXZ8jxre+ZmjmIkcno2z/naOs55Q2weo9AgGZAEKhRiir0gSTLLi3A==",
"version": "1.27.26",
"resolved": "https://registry.npmjs.org/@writefull/utils/-/utils-1.27.26.tgz",
"integrity": "sha512-cb1nGLP0RBKSvwzGfEGj8xZN9jy15JPoPbNiijHlILiR2+KQ0ICu2uWSos2K2OaKO8mK/2P0nFU4rJOZ/9jc8w==",
"dev": true,
"license": "MIT"
},
@@ -50695,6 +50700,42 @@
}
}
},
"server-ce/test": {
"name": "@overleaf/server-ce-test",
"devDependencies": {
"@isomorphic-git/lightning-fs": "^4.6.0",
"@overleaf/validation-tools": "*",
"@testing-library/cypress": "^10.0.3",
"@types/adm-zip": "^0.5.7",
"@types/pdf-parse": "^1.1.5",
"@types/uuid": "^9.0.8",
"adm-zip": "^0.5.12",
"body-parser": "^1.20.3",
"cypress": "13.13.2",
"cypress-multi-reporters": "^2.0.5",
"express": "^4.21.2",
"isomorphic-git": "^1.33.1",
"js-yaml": "^4.1.0",
"mocha-junit-reporter": "^2.2.1",
"pdf-parse": "^1.1.1",
"uuid": "^9.0.1",
"zod-validation-error": "^4.0.1"
}
},
"server-ce/test/node_modules/uuid": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz",
"integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==",
"dev": true,
"funding": [
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
"license": "MIT",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"services/analytics": {
"name": "@overleaf/analytics",
"dependencies": {
@@ -52703,9 +52744,9 @@
"@uppy/utils": "^5.7.0",
"@uppy/xhr-upload": "^3.6.0",
"@vitest/eslint-plugin": "1.1.44",
"@writefull/core": "^1.27.24",
"@writefull/ui": "^1.27.24",
"@writefull/utils": "^1.27.24",
"@writefull/core": "^1.27.26",
"@writefull/ui": "^1.27.26",
"@writefull/utils": "^1.27.26",
"5to6-codemod": "^1.8.0",
"abort-controller": "^3.0.0",
"acorn": "^7.1.1",

View File

@@ -52,6 +52,7 @@
"workspaces": [
"jobs/mirror-documentation",
"libraries/*",
"server-ce/test",
"services/analytics",
"services/chat",
"services/clsi",

View File

@@ -64,8 +64,7 @@ pipeline {
parallel {
stage('Install deps') {
steps {
sh 'make install -j10'
sh 'make -C server-ce/test npm_install_in_docker'
sh 'make monorepo_setup'
script {
job_npm_install_done = true
}
@@ -85,9 +84,7 @@ pipeline {
return job_npm_install_done
}
}
dir('server-ce/test') {
sh 'make format_in_docker'
}
sh 'bin/run -w /overleaf/server-ce/test monorepo npm run format'
}
}
stage('Copybara') {

View File

@@ -5,6 +5,7 @@ all: test-e2e
# We need to have both file-system layouts agree on the path for the docker compose project.
# Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance).
export PWD = $(shell pwd)
export MONOREPO = $(shell cd ../../ && pwd)
export TEX_LIVE_DOCKER_IMAGE ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1
export ALL_TEX_LIVE_DOCKER_IMAGES ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2023.1,us-east1-docker.pkg.dev/overleaf-ops/ol-docker/texlive-full:2022.1
@@ -96,14 +97,4 @@ build_mailtrap:
git clone https://github.com/dbck/docker-mailtrap.git || true && cd docker-mailtrap && git checkout v1.5.0
docker build -t mailtrap docker-mailtrap/build
npm_install_in_docker: export COMPOSE_PROJECT_NAME=
npm_install_in_docker:
$(MAKE) -C ../../ .metadata/docker-image/monorepo
cd ../../ && bin/run --no-deps --workdir /overleaf/server-ce/test monorepo npm --no-dry-run install
format_in_docker: export COMPOSE_PROJECT_NAME=
format_in_docker:
$(MAKE) -C ../../ .metadata/docker-image/monorepo
cd ../../ && bin/run --no-deps --workdir /overleaf/server-ce/test monorepo npm run format
.PHONY: test-e2e test-e2e-open

View File

@@ -1,6 +1,6 @@
const { defineConfig } = require('cypress')
const { readPdf, readFileInZip } = require('./helpers/read-file')
const fs = require('node:fs')
import { defineConfig } from 'cypress'
import { readFileInZip, readPdf } from './helpers/read-file'
import fs from 'node:fs'
if (process.env.CYPRESS_SHARD && !process.env.SPEC_PATTERN) {
// Running Cypress on all the specs is wasteful (~1min) when only few of them
@@ -36,20 +36,19 @@ const specPattern = process.env.SPEC_PATTERN || './**/*.spec.ts'
let reporterOptions = {}
if (process.env.CI) {
reporterOptions = {
reporter: '/overleaf/server-ce/test/node_modules/cypress-multi-reporters',
reporter: `${process.env.MONOREPO}/node_modules/cypress-multi-reporters`,
reporterOptions: {
configFile: 'cypress/cypress-multi-reporters.json',
},
}
}
module.exports = defineConfig({
export default defineConfig({
defaultCommandTimeout: 10_000,
fixturesFolder: 'cypress/fixtures',
video: process.env.CYPRESS_VIDEO === 'true',
screenshotsFolder: 'cypress/results',
videosFolder: 'cypress/results',
videoUploadOnPasses: false,
viewportHeight: 768,
viewportWidth: 1024,
e2e: {

View File

@@ -70,11 +70,15 @@ services:
stop_grace_period: 0s
entrypoint: npm
command: run cypress:run
working_dir: /overleaf/server-ce/test
# See comment in Makefile regarding matching file paths
working_dir: $PWD
volumes:
- ./:/overleaf/server-ce/test
- $PWD:$PWD
- $MONOREPO/libraries:$MONOREPO/libraries:ro
- $MONOREPO/node_modules:$MONOREPO/node_modules:ro
user: "${DOCKER_USER:-1000:1000}"
environment:
MONOREPO:
CYPRESS_SHARD:
CYPRESS_BASE_URL: http://sharelatex
CYPRESS_FULL_FILESTORE_MIGRATION:
@@ -101,11 +105,14 @@ services:
working_dir: $PWD
volumes:
- $PWD:$PWD
- $MONOREPO/libraries:$MONOREPO/libraries:ro
- $MONOREPO/node_modules:$MONOREPO/node_modules:ro
- /var/run/docker.sock:/var/run/docker.sock
stop_grace_period: 0s
environment:
CI:
PWD:
MONOREPO:
CYPRESS_SHARD:
COMPOSE_PROJECT_NAME:
TEX_LIVE_DOCKER_IMAGE:

View File

@@ -1,10 +1,9 @@
import fs from 'fs'
import path from 'path'
import pdf from 'pdf-parse'
// @ts-ignore broken package entrypoint
import pdf from 'pdf-parse/lib/pdf-parse.js'
import AdmZip from 'adm-zip'
import { promisify } from 'util'
const sleep = promisify(setTimeout)
import { setTimeout } from 'timers/promises'
const MAX_ATTEMPTS = 15
const POLL_INTERVAL = 500
@@ -31,7 +30,7 @@ export async function readFileInZip({
throw new Error(`${fileToRead} not found in ${pathToZip}`)
}
}
await sleep(POLL_INTERVAL)
await setTimeout(POLL_INTERVAL)
attempt++
}
throw new Error(`${pathToZip} not found`)
@@ -45,7 +44,7 @@ export async function readPdf(file: string) {
const { text } = await pdf(dataBuffer)
return text
}
await sleep(POLL_INTERVAL)
await setTimeout(POLL_INTERVAL)
attempt++
}
throw new Error(`${file} not found`)

View File

@@ -1,17 +1,14 @@
const fs = require('fs')
const Path = require('path')
const { execFile } = require('child_process')
const express = require('express')
const bodyParser = require('body-parser')
const {
celebrate: validate,
Joi,
errors: handleValidationErrors,
} = require('celebrate')
const YAML = require('js-yaml')
import fs from 'node:fs'
import Path from 'node:path'
import { execFile } from 'node:child_process'
import bodyParser from 'body-parser'
import express from 'express'
import YAML from 'js-yaml'
import { isZodErrorLike } from 'zod-validation-error'
import { ParamsError, validateReq, z } from '@overleaf/validation-tools'
const DATA_DIR = Path.join(
__dirname,
import.meta.dirname,
'data',
// Give each shard their own data dir.
process.env.CYPRESS_SHARD || 'default'
@@ -108,84 +105,80 @@ app.use((req, res, next) => {
next()
})
app.post(
'/run/script',
validate(
{
body: {
cwd: Joi.string().required(),
script: Joi.string().required(),
args: Joi.array().items(Joi.string()),
user: Joi.string().required(),
hasOverleafEnv: Joi.boolean().required(),
},
},
{ allowUnknown: false }
),
(req, res) => {
const { cwd, script, args, user, hasOverleafEnv } = req.body
app.post('/run/script', (req, res) => {
const {
body: { cwd, script, args, user, hasOverleafEnv },
} = validateReq(
req,
z.object({
body: z.object({
cwd: z.string(),
script: z.string(),
args: z.array(z.string()),
user: z.string(),
hasOverleafEnv: z.boolean(),
}),
})
)
const env = hasOverleafEnv
? 'source /etc/overleaf/env.sh || source /etc/sharelatex/env.sh'
: 'true'
const env = hasOverleafEnv
? 'source /etc/overleaf/env.sh || source /etc/sharelatex/env.sh'
: 'true'
runDockerCompose(
'exec',
[
'--workdir',
`/overleaf/${cwd}`,
'sharelatex',
'bash',
'-c',
`source /etc/container_environment.sh && ${env} && /sbin/setuser ${user} node ${script} ${args.map(a => JSON.stringify(a)).join(' ')}`,
],
(error, stdout, stderr) => {
res.json({
error,
stdout,
stderr,
})
}
)
}
)
runDockerCompose(
'exec',
[
'--workdir',
`/overleaf/${cwd}`,
'sharelatex',
'bash',
'-c',
`source /etc/container_environment.sh && ${env} && /sbin/setuser ${user} node ${script} ${args.map(a => JSON.stringify(a)).join(' ')}`,
],
(error, stdout, stderr) => {
res.json({
error,
stdout,
stderr,
})
}
)
})
app.post(
'/run/gruntTask',
validate(
{
body: {
task: Joi.string().required(),
args: Joi.array().items(Joi.string()),
},
},
{ allowUnknown: false }
),
(req, res) => {
const { task, args } = req.body
app.post('/run/gruntTask', (req, res) => {
const {
body: { task, args },
} = validateReq(
req,
z.object({
body: z.object({
task: z.string(),
args: z.array(z.string()),
}),
})
)
runDockerCompose(
'exec',
[
'--workdir',
'/var/www/sharelatex',
'sharelatex',
'bash',
'-c',
`source /etc/container_environment.sh && /sbin/setuser www-data grunt ${JSON.stringify(task)} ${args.map(a => JSON.stringify(a)).join(' ')}`,
],
(error, stdout, stderr) => {
res.json({
error,
stdout,
stderr,
})
}
)
}
)
runDockerCompose(
'exec',
[
'--workdir',
'/var/www/sharelatex',
'sharelatex',
'bash',
'-c',
`source /etc/container_environment.sh && /sbin/setuser www-data grunt ${JSON.stringify(task)} ${args.map(a => JSON.stringify(a)).join(' ')}`,
],
(error, stdout, stderr) => {
res.json({
error,
stdout,
stderr,
})
}
)
})
const allowedVars = Joi.object(
const allowedVars = z.object(
Object.fromEntries(
[
'OVERLEAF_APP_NAME',
@@ -227,7 +220,7 @@ const allowedVars = Joi.object(
'SHARELATEX_SITE_URL',
'SHARELATEX_MONGO_URL',
'SHARELATEX_REDIS_HOST',
].map(name => [name, Joi.string()])
].map(name => [name, z.string().optional()])
)
)
@@ -296,36 +289,37 @@ function setVarsDockerCompose({
writeDockerComposeOverride(cfg)
}
app.post(
'/docker/compose/:cmd',
validate(
{
body: {
args: Joi.array().allow(
'--detach',
'--wait',
'--volumes',
'--timeout=60',
'sharelatex',
'git-bridge',
'mongo',
'redis'
app.post('/docker/compose/:cmd', (req, res) => {
const {
params: { cmd },
body: { args },
} = validateReq(
req,
z.object({
params: z.object({
cmd: z.literal(['up', 'stop', 'down', 'ps', 'logs']),
}),
body: z.object({
args: z.array(
z.literal([
'--detach',
'--wait',
'--volumes',
'--timeout=60',
'sharelatex',
'git-bridge',
'mongo',
'redis',
])
),
},
params: {
cmd: Joi.allow('up', 'stop', 'down', 'ps', 'logs'),
},
},
{ allowUnknown: false }
),
(req, res) => {
const { cmd } = req.params
const { args } = req.body
runDockerCompose(cmd, args, (error, stdout, stderr) => {
res.json({ error, stdout, stderr })
}),
})
}
)
)
runDockerCompose(cmd, args, (error, stdout, stderr) => {
res.json({ error, stdout, stderr })
})
})
function maybeResetData(resetData, callback) {
if (!resetData) return callback()
@@ -347,88 +341,78 @@ function maybeResetData(resetData, callback) {
)
}
app.post(
'/reconfigure',
validate(
{
body: {
pro: Joi.boolean().required(),
mongoVersion: Joi.string().allow('').optional(),
version: Joi.string().required(),
app.post('/reconfigure', (req, res) => {
const {
body: { pro, version, vars, withDataDir, resetData, mongoVersion },
} = validateReq(
req,
z.object({
body: z.object({
pro: z.boolean(),
version: z.string(),
vars: allowedVars,
withDataDir: Joi.boolean().optional(),
resetData: Joi.boolean().optional(),
},
},
{ allowUnknown: false }
),
(req, res) => {
const { pro, version, vars, withDataDir, resetData, mongoVersion } =
req.body
maybeResetData(resetData, (error, stdout, stderr) => {
if (error) return res.json({ error, stdout, stderr })
const previousConfigServer = previousConfig
const newConfig = JSON.stringify(req.body)
if (previousConfig === newConfig) {
return res.json({ previousConfigServer })
}
try {
setVarsDockerCompose({ pro, version, vars, withDataDir, mongoVersion })
} catch (error) {
return res.json({ error })
}
if (error) return res.json({ error, stdout, stderr })
runDockerCompose(
'up',
['--detach', '--wait', 'sharelatex'],
(error, stdout, stderr) => {
previousConfig = newConfig
res.json({ error, stdout, stderr, previousConfigServer })
}
)
withDataDir: z.boolean(),
resetData: z.boolean(),
mongoVersion: z.string(),
}),
})
}
)
)
maybeResetData(resetData, (error, stdout, stderr) => {
if (error) return res.json({ error, stdout, stderr })
app.post(
'/mongo/setFeatureCompatibilityVersion',
validate(
{
body: {
mongoVersion: Joi.string().required(),
},
},
{ allowUnknown: false }
),
(req, res) => {
const { mongoVersion } = req.body
const mongosh = mongoVersion > '5' ? 'mongosh' : 'mongo'
const params = {
setFeatureCompatibilityVersion: mongoVersion,
const previousConfigServer = previousConfig
const newConfig = JSON.stringify(req.body)
if (previousConfig === newConfig) {
return res.json({ previousConfigServer })
}
if (mongoVersion >= '7.0') {
// MongoServerError: Once you have upgraded to 7.0, you will not be able to downgrade FCV and binary version without support assistance. Please re-run this command with 'confirm: true' to acknowledge this and continue with the FCV upgrade.
// NOTE: 6.0 does not know about this flag. So conditionally add it.
// MongoServerError: BSON field 'setFeatureCompatibilityVersion.confirm' is an unknown field.
params.confirm = true
try {
setVarsDockerCompose({ pro, version, vars, withDataDir, mongoVersion })
} catch (error) {
return res.json({ error })
}
if (error) return res.json({ error, stdout, stderr })
runDockerCompose(
'exec',
[
'mongo',
mongosh,
'--eval',
`db.adminCommand(${JSON.stringify(params)})`,
],
'up',
['--detach', '--wait', 'sharelatex'],
(error, stdout, stderr) => {
res.json({ error, stdout, stderr })
previousConfig = newConfig
res.json({ error, stdout, stderr, previousConfigServer })
}
)
})
})
app.post('/mongo/setFeatureCompatibilityVersion', (req, res) => {
const {
body: { mongoVersion },
} = validateReq(
req,
z.object({
body: z.object({
mongoVersion: z.string(),
}),
})
)
const mongosh = mongoVersion > '5' ? 'mongosh' : 'mongo'
const params = {
setFeatureCompatibilityVersion: mongoVersion,
}
)
if (mongoVersion >= '7.0') {
// MongoServerError: Once you have upgraded to 7.0, you will not be able to downgrade FCV and binary version without support assistance. Please re-run this command with 'confirm: true' to acknowledge this and continue with the FCV upgrade.
// NOTE: 6.0 does not know about this flag. So conditionally add it.
// MongoServerError: BSON field 'setFeatureCompatibilityVersion.confirm' is an unknown field.
params.confirm = true
}
runDockerCompose(
'exec',
['mongo', mongosh, '--eval', `db.adminCommand(${JSON.stringify(params)})`],
(error, stdout, stderr) => {
res.json({ error, stdout, stderr })
}
)
})
app.get('/redis/keys', (req, res) => {
runDockerCompose(
@@ -450,7 +434,14 @@ app.delete('/data/user_files', (req, res) => {
)
})
app.use(handleValidationErrors())
app.use((error, req, res, next) => {
if (error instanceof ParamsError) {
res.status(404).json({ error })
} else if (isZodErrorLike(error)) {
res.status(400).json({ error })
}
next(error)
})
purgeDataDir()
writeDockerComposeOverride(defaultDockerComposeOverride())

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +1,23 @@
{
"name": "@overleaf/server-ce/test",
"name": "@overleaf/server-ce-test",
"description": "e2e tests for Overleaf Community Edition",
"private": true,
"type": "module",
"scripts": {
"cypress:open": "cypress open --e2e --browser chrome",
"cypress:run": "cypress run --e2e --browser chrome",
"format": "prettier --list-different $PWD/'**/*.{js,mjs,ts,tsx,json}'",
"format:fix": "prettier --write $PWD/'**/*.{js,mjs,ts,tsx,json}'"
},
"dependencies": {
"devDependencies": {
"@isomorphic-git/lightning-fs": "^4.6.0",
"@overleaf/validation-tools": "*",
"@testing-library/cypress": "^10.0.3",
"@types/adm-zip": "^0.5.7",
"@types/pdf-parse": "^1.1.5",
"@types/uuid": "^9.0.8",
"adm-zip": "^0.5.12",
"body-parser": "^1.20.3",
"celebrate": "^15.0.3",
"cypress": "13.13.2",
"cypress-multi-reporters": "^2.0.5",
"express": "^4.21.2",
@@ -24,7 +25,7 @@
"js-yaml": "^4.1.0",
"mocha-junit-reporter": "^2.2.1",
"pdf-parse": "^1.1.1",
"typescript": "^5.0.4",
"uuid": "^9.0.1"
"uuid": "^9.0.1",
"zod-validation-error": "^4.0.1"
}
}

View File

@@ -0,0 +1 @@
services/git-bridge/**

88
services/git-bridge/Jenkinsfile vendored Normal file
View File

@@ -0,0 +1,88 @@
pipeline {
agent {
node {
label 'jenkins-agent-web'
customWorkspace '/workspace'
}
}
options {
timestamps()
parallelsAlwaysFailFast()
timeout(time: 15, unit: 'MINUTES')
}
environment {
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}"
BUILD_NUMBER = "${SHORT_SHA}_${BUILD_NUMBER}"
COMMIT_SHA = "${GIT_COMMIT}"
SHORT_SHA = "${GIT_COMMIT.take(7)}"
}
stages {
stage('Stage 1') {
parallel {
stage('Build') {
steps {
dir('services/git-bridge') {
sh 'make docker_build_base'
}
}
}
stage('Install monorepo') {
steps {
sh 'make monorepo_setup'
}
}
}
}
stage('Stage 2') {
parallel {
stage('Build production and push') {
steps {
dir('services/git-bridge') {
sh 'make docker_build'
sh 'make push_branch'
}
}
}
stage('Format Java') {
steps {
dir('services/git-bridge') {
sh 'make docker_format'
}
}
}
stage('Format Jenkinsfile') {
steps {
sh 'bin/run monorepo npm run format:jenkins'
}
}
stage('Test') {
steps {
dir('services/git-bridge') {
sh 'make docker_test'
}
}
}
}
}
stage('Push Production') {
steps {
dir('services/git-bridge') {
sh 'make push'
}
}
}
}
post {
// Collect junit test results for both success and failure case.
always {
junit checksName: 'git-bridge test results', testResults: 'services/git-bridge/target/surefire-reports/*.xml'
}
cleanup {
dir('services/git-bridge') {
sh 'make clean_ci'
}
sh 'make clean_jenkins'
}
}
}

View File

@@ -3,6 +3,17 @@
MVN_OPTS := --no-transfer-progress
MVN_TARGET := target/writelatex-git-bridge-1.0-SNAPSHOT-jar-with-dependencies.jar
export BUILD_NUMBER ?= local
export BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
export COMMIT_SHA ?= $(shell git rev-parse HEAD)
PROJECT_NAME = git-bridge
IMAGE_CI ?= ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
IMAGE_REPO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME)
IMAGE_REPO_BRANCH ?= $(IMAGE_REPO):$(BRANCH_NAME)
IMAGE_REPO_MAIN ?= $(IMAGE_REPO):main
IMAGE_REPO_FINAL ?= $(IMAGE_REPO_BRANCH)-$(BUILD_NUMBER)
runtime-conf:
/opt/envsubst < conf/envsubst_template.json > conf/runtime.json
@@ -16,10 +27,17 @@ $(MVN_TARGET): $(shell find src -type f) pom.xml
build: $(MVN_TARGET)
docker_build_base:
docker build --tag $(IMAGE_CI) --target base .
docker_build:
docker build --tag $(IMAGE_REPO_BRANCH) --tag $(IMAGE_REPO_FINAL) .
format:
mvn $(MVN_OPTS) com.spotify.fmt:fmt-maven-plugin:check
docker_format:
docker run --rm -v $(PWD):$(PWD) -w $(PWD) --user node $(IMAGE_CI) make format
format_fix:
mvn $(MVN_OPTS) com.spotify.fmt:fmt-maven-plugin:format
@@ -28,6 +46,8 @@ format_fix:
test:
mvn $(MVN_OPTS) test
docker_test:
docker run --rm -v $(PWD):$(PWD) -w $(PWD) --user node $(IMAGE_CI) make test
clean:
mvn $(MVN_OPTS) clean
@@ -36,5 +56,14 @@ clean:
package: clean
mvn $(MVN_OPTS) package -DskipTests
push:
docker push $(IMAGE_REPO_FINAL)
push_branch:
docker push $(IMAGE_REPO_BRANCH)
clean_ci:
-docker rmi -f $(IMAGE_CI) $(IMAGE_REPO_BRANCH) $(IMAGE_REPO_FINAL)
-git clean -xdf .
.PHONY: run package build clean test runtime-conf

View File

@@ -1,72 +1,41 @@
/* eslint-disable
no-undef,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import sinon from 'sinon'
import { expect } from 'chai'
import Settings from '@overleaf/settings'
import request from 'request'
import assert from 'node:assert'
import Path from 'node:path'
import crypto from 'node:crypto'
import mongodb from 'mongodb-legacy'
import nock from 'nock'
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
import * as HistoryId from './helpers/HistoryId.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockFileStore = () => nock('http://127.0.0.1:3009')
const MockWeb = () => nock('http://127.0.0.1:3000')
const sha = data => crypto.createHash('sha1').update(data).digest('hex')
describe('FileTree Diffs', function () {
beforeEach(function (done) {
return ProjectHistoryApp.ensureRunning(error => {
if (error != null) {
throw error
}
beforeEach(async function () {
await ProjectHistoryApp.promises.ensureRunning()
this.historyId = new ObjectId().toString()
this.projectId = new ObjectId().toString()
this.historyId = new ObjectId().toString()
this.projectId = new ObjectId().toString()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
})
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: this.historyId } },
})
return ProjectHistoryClient.initializeProject(
this.historyId,
(error, olProject) => {
if (error != null) {
throw error
}
return done()
}
)
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
})
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: this.historyId } },
})
await ProjectHistoryClient.promises.initializeProject(this.historyId)
})
afterEach(function () {
return nock.cleanAll()
})
it('should return a diff of the updates to a doc from a single chunk', function (done) {
it('should return a diff of the updates to a doc from a single chunk', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/7/history`)
.reply(200, {
@@ -139,45 +108,39 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
3,
7,
(error, diff) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [
{
pathname: 'foo.tex',
operation: 'edited',
},
{
pathname: 'deleted.tex',
operation: 'removed',
deletedAtV: 5,
editable: true,
},
{
newPathname: 'newName.tex',
pathname: 'renamed.tex',
operation: 'renamed',
editable: true,
},
{
pathname: 'added.tex',
operation: 'added',
editable: true,
},
],
})
return done()
}
7
)
expect(diff).to.deep.equal({
diff: [
{
pathname: 'foo.tex',
operation: 'edited',
},
{
pathname: 'deleted.tex',
operation: 'removed',
deletedAtV: 5,
editable: true,
},
{
newPathname: 'newName.tex',
pathname: 'renamed.tex',
operation: 'renamed',
editable: true,
},
{
pathname: 'added.tex',
operation: 'added',
editable: true,
},
],
})
})
it('should return a diff of the updates to a doc across multiple chunks', function (done) {
it('should return a diff of the updates to a doc across multiple chunks', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.reply(200, {
@@ -299,53 +262,47 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
2,
7,
(error, diff) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [
{
pathname: 'foo.tex',
operation: 'edited',
},
{
pathname: 'bar.tex',
operation: 'edited',
},
{
pathname: 'baz.tex',
editable: true,
},
{
pathname: 'deleted.tex',
operation: 'removed',
deletedAtV: 4,
editable: true,
},
{
newPathname: 'newName.tex',
pathname: 'renamed.tex',
operation: 'renamed',
editable: true,
},
{
pathname: 'added.tex',
operation: 'added',
editable: true,
},
],
})
return done()
}
7
)
expect(diff).to.deep.equal({
diff: [
{
pathname: 'foo.tex',
operation: 'edited',
},
{
pathname: 'bar.tex',
operation: 'edited',
},
{
pathname: 'baz.tex',
editable: true,
},
{
pathname: 'deleted.tex',
operation: 'removed',
deletedAtV: 4,
editable: true,
},
{
newPathname: 'newName.tex',
pathname: 'renamed.tex',
operation: 'renamed',
editable: true,
},
{
pathname: 'added.tex',
operation: 'added',
editable: true,
},
],
})
})
it('should return a diff that includes multiple renames', function (done) {
it('should return a diff that includes multiple renames', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.reply(200, {
@@ -387,30 +344,24 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
3,
5,
(error, diff) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [
{
newPathname: 'three.tex',
pathname: 'one.tex',
operation: 'renamed',
editable: true,
},
],
})
return done()
}
5
)
expect(diff).to.deep.equal({
diff: [
{
newPathname: 'three.tex',
pathname: 'one.tex',
operation: 'renamed',
editable: true,
},
],
})
})
it('should handle deleting then re-adding a file', function (done) {
it('should handle deleting then re-adding a file', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.reply(200, {
@@ -454,29 +405,23 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
3,
5,
(error, diff) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [
{
pathname: 'one.tex',
operation: 'added',
editable: null,
},
],
})
return done()
}
5
)
expect(diff).to.deep.equal({
diff: [
{
pathname: 'one.tex',
operation: 'added',
editable: null,
},
],
})
})
it('should handle deleting the renaming a file to the same place', function (done) {
it('should handle deleting the renaming a file to the same place', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.reply(200, {
@@ -522,30 +467,24 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
3,
5,
(error, diff) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [
{
pathname: 'two.tex',
newPathname: 'one.tex',
operation: 'renamed',
editable: true,
},
],
})
return done()
}
5
)
expect(diff).to.deep.equal({
diff: [
{
pathname: 'two.tex',
newPathname: 'one.tex',
operation: 'renamed',
editable: true,
},
],
})
})
it('should handle adding then renaming a file', function (done) {
it('should handle adding then renaming a file', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.reply(200, {
@@ -585,29 +524,23 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
3,
5,
(error, diff) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [
{
pathname: 'two.tex',
operation: 'added',
editable: true,
},
],
})
return done()
}
5
)
expect(diff).to.deep.equal({
diff: [
{
pathname: 'two.tex',
operation: 'added',
editable: true,
},
],
})
})
it('should return 422 with a chunk with an invalid rename', function (done) {
it('should return 422 with a chunk with an invalid rename', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/6/history`)
.reply(200, {
@@ -643,21 +576,15 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { statusCode } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
5,
6,
(error, diff, statusCode) => {
if (error != null) {
throw error
}
expect(statusCode).to.equal(422)
return done()
}
6
)
expect(statusCode).to.equal(422)
})
it('should return 200 with a chunk with an invalid add', function (done) {
it('should return 200 with a chunk with an invalid add', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/6/history`)
.reply(200, {
@@ -691,30 +618,24 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff, statusCode } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
5,
6,
(error, diff, statusCode) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [
{
pathname: 'foo.tex',
operation: 'added',
editable: null,
},
],
})
expect(statusCode).to.equal(200)
return done()
}
6
)
expect(diff).to.deep.equal({
diff: [
{
pathname: 'foo.tex',
operation: 'added',
editable: null,
},
],
})
expect(statusCode).to.equal(200)
})
it('should handle edits of missing/invalid files ', function (done) {
it('should handle edits of missing/invalid files ', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.reply(200, {
@@ -751,28 +672,22 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
3,
5,
(error, diff) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [
{
operation: 'edited',
pathname: 'new.tex',
},
],
})
return done()
}
5
)
expect(diff).to.deep.equal({
diff: [
{
operation: 'edited',
pathname: 'new.tex',
},
],
})
})
it('should handle deletions of missing/invalid files ', function (done) {
it('should handle deletions of missing/invalid files ', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.reply(200, {
@@ -809,23 +724,17 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
3,
5,
(error, diff) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [],
})
return done()
}
5
)
expect(diff).to.deep.equal({
diff: [],
})
})
return it('should handle renames of missing/invalid files ', function (done) {
return it('should handle renames of missing/invalid files ', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.reply(200, {
@@ -862,19 +771,13 @@ describe('FileTree Diffs', function () {
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
})
return ProjectHistoryClient.getFileTreeDiff(
const { diff } = await ProjectHistoryClient.getFileTreeDiff(
this.projectId,
3,
5,
(error, diff) => {
if (error != null) {
throw error
}
expect(diff).to.deep.equal({
diff: [],
})
return done()
}
5
)
expect(diff).to.deep.equal({
diff: [],
})
})
})

View File

@@ -1,7 +1,6 @@
import async from 'async'
import nock from 'nock'
import { expect } from 'chai'
import request from 'request'
import { fetchNothing, fetchJsonWithResponse } from '@overleaf/fetch-utils'
import assert from 'node:assert'
import mongodb from 'mongodb-legacy'
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
@@ -15,42 +14,38 @@ const MockWeb = () => nock('http://127.0.0.1:3000')
describe('Flushing old queues', function () {
const historyId = new ObjectId().toString()
beforeEach(function (done) {
beforeEach(async function () {
this.timestamp = new Date()
ProjectHistoryApp.ensureRunning(error => {
if (error) {
throw error
}
this.projectId = new ObjectId().toString()
this.docId = new ObjectId().toString()
this.fileId = new ObjectId().toString()
await ProjectHistoryApp.promises.ensureRunning()
this.projectId = new ObjectId().toString()
this.docId = new ObjectId().toString()
this.fileId = new ObjectId().toString()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: historyId,
})
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: {
history: {
id: historyId,
},
},
})
MockHistoryStore()
.get(`/api/projects/${historyId}/latest/history`)
.reply(200, {
chunk: {
startVersion: 0,
history: {
changes: [],
},
},
})
ProjectHistoryClient.initializeProject(historyId, done)
MockHistoryStore().post('/api/projects').reply(200, {
projectId: historyId,
})
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: {
history: {
id: historyId,
},
},
})
MockHistoryStore()
.get(`/api/projects/${historyId}/latest/history`)
.reply(200, {
chunk: {
startVersion: 0,
history: {
changes: [],
},
},
})
await ProjectHistoryClient.promises.initializeProject(historyId)
})
afterEach(function () {
@@ -59,7 +54,7 @@ describe('Flushing old queues', function () {
describe('retrying an unflushed project', function () {
describe('when the update is older than the cutoff', function () {
beforeEach(function (done) {
beforeEach(async function () {
this.flushCall = MockHistoryStore()
.put(
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
@@ -73,69 +68,56 @@ describe('Flushing old queues', function () {
doc: this.docId,
meta: { user_id: this.user_id, ts: new Date() },
}
async.series(
[
cb =>
ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb),
cb =>
ProjectHistoryClient.setFirstOpTimestamp(
this.projectId,
Date.now() - 24 * 3600 * 1000,
cb
),
],
done
await ProjectHistoryClient.promises.pushRawUpdate(
this.projectId,
update
)
await ProjectHistoryClient.promises.setFirstOpTimestamp(
this.projectId,
Date.now() - 24 * 3600 * 1000
)
})
it('flushes the project history queue', function (done) {
request.post(
it('flushes the project history queue', async function () {
const response = await fetchNothing(
'http://127.0.0.1:3054/flush/old?maxAge=10800',
{
url: 'http://127.0.0.1:3054/flush/old?maxAge=10800',
},
(error, res, body) => {
if (error) {
return done(error)
}
expect(res.statusCode).to.equal(200)
assert(
this.flushCall.isDone(),
'made calls to history service to store updates'
)
done()
method: 'POST',
}
)
expect(response.status).to.equal(200)
assert(
this.flushCall.isDone(),
'made calls to history service to store updates'
)
})
it('flushes the project history queue in the background when requested', function (done) {
request.post(
it('flushes the project history queue in the background when requested', async function () {
const { json, response } = await fetchJsonWithResponse(
'http://127.0.0.1:3054/flush/old?maxAge=10800&background=1',
{
url: 'http://127.0.0.1:3054/flush/old?maxAge=10800&background=1',
},
(error, res, body) => {
if (error) {
return done(error)
}
expect(res.statusCode).to.equal(200)
expect(body).to.equal('{"message":"running flush in background"}')
assert(
!this.flushCall.isDone(),
'did not make calls to history service to store updates in the foreground'
)
setTimeout(() => {
assert(
this.flushCall.isDone(),
'made calls to history service to store updates in the background'
)
done()
}, 1_000)
method: 'POST',
}
)
expect(response.status).to.equal(200)
expect(json).to.deep.equal({
message: 'running flush in background',
})
assert(
!this.flushCall.isDone(),
'did not make calls to history service to store updates in the foreground'
)
await new Promise(resolve => setTimeout(resolve, 1000))
assert(
this.flushCall.isDone(),
'made calls to history service to store updates in the background'
)
})
})
describe('when the update is newer than the cutoff', function () {
beforeEach(function (done) {
beforeEach(async function () {
this.flushCall = MockHistoryStore()
.put(
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
@@ -149,38 +131,28 @@ describe('Flushing old queues', function () {
doc: this.docId,
meta: { user_id: this.user_id, ts: new Date() },
}
async.series(
[
cb =>
ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb),
cb =>
ProjectHistoryClient.setFirstOpTimestamp(
this.projectId,
Date.now() - 60 * 1000,
cb
),
],
done
await ProjectHistoryClient.promises.pushRawUpdate(
this.projectId,
update
)
await ProjectHistoryClient.promises.setFirstOpTimestamp(
this.projectId,
Date.now() - 60 * 1000
)
})
it('does not flush the project history queue', function (done) {
request.post(
it('does not flush the project history queue', async function () {
const response = await fetchNothing(
`http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
{
url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
},
(error, res, body) => {
if (error) {
return done(error)
}
expect(res.statusCode).to.equal(200)
assert(
!this.flushCall.isDone(),
'did not make calls to history service to store updates'
)
done()
method: 'POST',
}
)
expect(response.status).to.equal(200)
assert(
!this.flushCall.isDone(),
'did not make calls to history service to store updates'
)
})
})
@@ -191,7 +163,7 @@ describe('Flushing old queues', function () {
afterEach(function () {
Settings.shortHistoryQueues.length = 0
})
beforeEach(function (done) {
beforeEach(async function () {
this.flushCall = MockHistoryStore()
.put(
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
@@ -205,69 +177,56 @@ describe('Flushing old queues', function () {
doc: this.docId,
meta: { user_id: this.user_id, ts: new Date() },
}
async.series(
[
cb =>
ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb),
cb =>
ProjectHistoryClient.setFirstOpTimestamp(
this.projectId,
Date.now() - 60 * 1000,
cb
),
],
done
await ProjectHistoryClient.promises.pushRawUpdate(
this.projectId,
update
)
await ProjectHistoryClient.promises.setFirstOpTimestamp(
this.projectId,
Date.now() - 60 * 1000
)
})
it('flushes the project history queue', function (done) {
request.post(
it('flushes the project history queue', async function () {
const response = await fetchNothing(
`http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
{
url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
},
(error, res, body) => {
if (error) {
return done(error)
}
expect(res.statusCode).to.equal(200)
assert(
this.flushCall.isDone(),
'made calls to history service to store updates'
)
done()
method: 'POST',
}
)
expect(response.status).to.equal(200)
assert(
this.flushCall.isDone(),
'made calls to history service to store updates'
)
})
it('flushes the project history queue in the background when requested', function (done) {
request.post(
it('flushes the project history queue in the background when requested', async function () {
const { json, response } = await fetchJsonWithResponse(
`http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}&background=1`,
{
url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}&background=1`,
},
(error, res, body) => {
if (error) {
return done(error)
}
expect(res.statusCode).to.equal(200)
expect(body).to.equal('{"message":"running flush in background"}')
assert(
!this.flushCall.isDone(),
'did not make calls to history service to store updates in the foreground'
)
setTimeout(() => {
assert(
this.flushCall.isDone(),
'made calls to history service to store updates in the background'
)
done()
}, 1_000)
method: 'POST',
}
)
expect(response.status).to.equal(200)
expect(json).to.deep.equal({
message: 'running flush in background',
})
assert(
!this.flushCall.isDone(),
'did not make calls to history service to store updates in the foreground'
)
await new Promise(resolve => setTimeout(resolve, 1000))
assert(
this.flushCall.isDone(),
'made calls to history service to store updates in the background'
)
})
})
describe('when the update does not have a timestamp', function () {
beforeEach(function (done) {
beforeEach(async function () {
this.flushCall = MockHistoryStore()
.put(
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
@@ -282,43 +241,41 @@ describe('Flushing old queues', function () {
meta: { user_id: this.user_id, ts: new Date() },
}
this.startDate = Date.now()
async.series(
[
cb =>
ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb),
cb =>
ProjectHistoryClient.clearFirstOpTimestamp(this.projectId, cb),
],
done
await ProjectHistoryClient.promises.pushRawUpdate(
this.projectId,
update
)
await new Promise((resolve, reject) => {
ProjectHistoryClient.clearFirstOpTimestamp(this.projectId, err => {
if (err) reject(err)
else resolve()
})
})
})
it('flushes the project history queue anyway', function (done) {
request.post(
it('flushes the project history queue anyway', async function () {
const response = await fetchNothing(
`http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
{
url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
},
(error, res, body) => {
if (error) {
return done(error)
}
expect(res.statusCode).to.equal(200)
assert(
this.flushCall.isDone(),
'made calls to history service to store updates'
)
ProjectHistoryClient.getFirstOpTimestamp(
this.projectId,
(err, result) => {
if (err) {
return done(err)
}
expect(result).to.be.null
done()
}
)
method: 'POST',
}
)
expect(response.status).to.equal(200)
assert(
this.flushCall.isDone(),
'made calls to history service to store updates'
)
const result = await new Promise((resolve, reject) => {
ProjectHistoryClient.getFirstOpTimestamp(
this.projectId,
(err, result) => {
if (err) reject(err)
else resolve(result)
}
)
})
expect(result).to.be.null
})
})
})

View File

@@ -16,76 +16,67 @@ const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
describe('GetChangesInChunkSince', function () {
let projectId, historyId
beforeEach(function (done) {
beforeEach(async function () {
projectId = new ObjectId().toString()
historyId = new ObjectId().toString()
ProjectHistoryApp.ensureRunning(error => {
if (error) throw error
await ProjectHistoryApp.promises.ensureRunning()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: historyId,
})
ProjectHistoryClient.initializeProject(historyId, (error, olProject) => {
if (error) throw error
MockWeb()
.get(`/project/${projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: olProject.id } },
})
MockHistoryStore()
.get(`/api/projects/${historyId}/latest/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/7/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/6/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/5/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/4/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/3/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/2/history`)
.replyWithFile(200, fixture('chunks/0-3.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/1/history`)
.replyWithFile(200, fixture('chunks/0-3.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/0/history`)
.replyWithFile(200, fixture('chunks/0-3.json'))
done()
})
MockHistoryStore().post('/api/projects').reply(200, {
projectId: historyId,
})
const olProject =
await ProjectHistoryClient.promises.initializeProject(historyId)
MockWeb()
.get(`/project/${projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: olProject.id } },
})
MockHistoryStore()
.get(`/api/projects/${historyId}/latest/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/7/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/6/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/5/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/4/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/3/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/2/history`)
.replyWithFile(200, fixture('chunks/0-3.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/1/history`)
.replyWithFile(200, fixture('chunks/0-3.json'))
MockHistoryStore()
.get(`/api/projects/${historyId}/versions/0/history`)
.replyWithFile(200, fixture('chunks/0-3.json'))
})
afterEach(function () {
nock.cleanAll()
})
function expectChangesSince(version, n, changes, done) {
ProjectHistoryClient.getChangesInChunkSince(
async function expectChangesSince(version, n, changes) {
const { body } = await ProjectHistoryClient.getChangesInChunkSince(
projectId,
version,
{},
(error, got) => {
if (error) throw error
expect(got.latestStartVersion).to.equal(6)
expect(got.changes).to.have.length(n)
expect(got.changes.map(c => Core.Change.fromRaw(c))).to.deep.equal(
changes.map(c => Core.Change.fromRaw(c))
)
done()
}
{}
)
expect(body.latestStartVersion).to.equal(6)
expect(body.changes).to.have.length(n)
expect(body.changes.map(c => Core.Change.fromRaw(c))).to.deep.equal(
changes.map(c => Core.Change.fromRaw(c))
)
}
@@ -138,21 +129,19 @@ describe('GetChangesInChunkSince', function () {
}
for (const [since, { name, n, changes }] of Object.entries(cases)) {
it(name, function (done) {
expectChangesSince(since, n, changes, done)
it(name, async function () {
await expectChangesSince(since, n, changes)
})
}
it('should return an error when past the end version', function (done) {
ProjectHistoryClient.getChangesInChunkSince(
it('should return an error when past the end version', async function () {
const { statusCode } = await ProjectHistoryClient.getChangesInChunkSince(
projectId,
9,
{ allowErrors: true },
(error, _body, statusCode) => {
if (error) throw error
expect(statusCode).to.equal(400)
done()
{
allowErrors: true,
}
)
expect(statusCode).to.equal(400)
})
})

View File

@@ -11,272 +11,176 @@ const MockWeb = () => nock('http://127.0.0.1:3000')
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
describe('Labels', function () {
beforeEach(function (done) {
ProjectHistoryApp.ensureRunning(error => {
if (error != null) {
throw error
}
beforeEach(async function () {
await ProjectHistoryApp.promises.ensureRunning()
this.historyId = new ObjectId().toString()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
this.historyId = new ObjectId().toString()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
})
const olProject = await ProjectHistoryClient.promises.initializeProject(
this.historyId
)
this.project_id = new ObjectId().toString()
MockWeb()
.get(`/project/${this.project_id}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: olProject.id } },
})
ProjectHistoryClient.initializeProject(
this.historyId,
(error, olProject) => {
if (error != null) {
throw error
}
this.project_id = new ObjectId().toString()
MockWeb()
.get(`/project/${this.project_id}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: olProject.id } },
})
MockHistoryStore()
.get(`/api/projects/${this.historyId}/latest/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${this.historyId}/latest/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/7/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
.persist()
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/8/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
.persist()
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/7/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
.persist()
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/8/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
.persist()
this.comment = 'a saved version comment'
this.comment2 = 'another saved version comment'
this.user_id = new ObjectId().toString()
this.created_at = new Date(1)
done()
}
)
})
this.comment = 'a saved version comment'
this.comment2 = 'another saved version comment'
this.user_id = new ObjectId().toString()
this.created_at = new Date(1)
})
afterEach(function () {
nock.cleanAll()
})
it('can create and get labels', function (done) {
ProjectHistoryClient.createLabel(
it('can create and get labels', async function () {
const label = await ProjectHistoryClient.createLabel(
this.project_id,
this.user_id,
7,
this.comment,
this.created_at,
(error, label) => {
if (error != null) {
throw error
}
ProjectHistoryClient.getLabels(this.project_id, (error, labels) => {
if (error != null) {
throw error
}
expect(labels).to.deep.equal([label])
done()
})
}
this.created_at
)
const labels = await ProjectHistoryClient.getLabels(this.project_id)
expect(labels).to.deep.equal([label])
})
it('can create and get labels with no user id', function (done) {
it('can create and get labels with no user id', async function () {
const userId = undefined
ProjectHistoryClient.createLabel(
const label = await ProjectHistoryClient.createLabel(
this.project_id,
userId,
7,
this.comment,
this.created_at,
(error, label) => {
if (error != null) {
throw error
}
ProjectHistoryClient.getLabels(this.project_id, (error, labels) => {
if (error != null) {
throw error
}
expect(labels).to.deep.equal([label])
done()
})
}
this.created_at
)
const labels = await ProjectHistoryClient.getLabels(this.project_id)
expect(labels).to.deep.equal([label])
})
it('can delete labels', function (done) {
ProjectHistoryClient.createLabel(
it('can delete labels', async function () {
const label = await ProjectHistoryClient.createLabel(
this.project_id,
this.user_id,
7,
this.comment,
this.created_at,
(error, label) => {
if (error != null) {
throw error
}
ProjectHistoryClient.deleteLabel(this.project_id, label.id, error => {
if (error != null) {
throw error
}
ProjectHistoryClient.getLabels(this.project_id, (error, labels) => {
if (error != null) {
throw error
}
expect(labels).to.deep.equal([])
done()
})
})
}
this.created_at
)
await ProjectHistoryClient.deleteLabel(this.project_id, label.id)
const labels = await ProjectHistoryClient.getLabels(this.project_id)
expect(labels).to.deep.equal([])
})
it('can delete labels for the current user', function (done) {
ProjectHistoryClient.createLabel(
it('can delete labels for the current user', async function () {
const label = await ProjectHistoryClient.createLabel(
this.project_id,
this.user_id,
7,
this.comment,
this.created_at,
(error, label) => {
if (error != null) {
throw error
}
ProjectHistoryClient.deleteLabelForUser(
this.project_id,
this.user_id,
label.id,
error => {
if (error != null) {
throw error
}
ProjectHistoryClient.getLabels(this.project_id, (error, labels) => {
if (error != null) {
throw error
}
expect(labels).to.deep.equal([])
done()
})
}
)
}
this.created_at
)
await ProjectHistoryClient.deleteLabelForUser(
this.project_id,
this.user_id,
label.id
)
const labels = await ProjectHistoryClient.getLabels(this.project_id)
expect(labels).to.deep.equal([])
})
it('can transfer ownership of labels', function (done) {
it('can transfer ownership of labels', async function () {
const fromUser = new ObjectId().toString()
const toUser = new ObjectId().toString()
ProjectHistoryClient.createLabel(
const label = await ProjectHistoryClient.createLabel(
this.project_id,
fromUser,
7,
this.comment,
this.created_at,
(error, label) => {
if (error != null) {
throw error
}
ProjectHistoryClient.createLabel(
this.project_id,
fromUser,
7,
this.comment2,
this.created_at,
(error, label2) => {
if (error != null) {
throw error
}
ProjectHistoryClient.transferLabelOwnership(
fromUser,
toUser,
error => {
if (error != null) {
throw error
}
ProjectHistoryClient.getLabels(
this.project_id,
(error, labels) => {
if (error != null) {
throw error
}
expect(labels).to.deep.equal([
{
id: label.id,
comment: label.comment,
version: label.version,
created_at: label.created_at,
user_id: toUser,
},
{
id: label2.id,
comment: label2.comment,
version: label2.version,
created_at: label2.created_at,
user_id: toUser,
},
])
done()
}
)
}
)
}
)
}
this.created_at
)
const label2 = await ProjectHistoryClient.createLabel(
this.project_id,
fromUser,
7,
this.comment2,
this.created_at
)
await ProjectHistoryClient.transferLabelOwnership(fromUser, toUser)
const labels = await ProjectHistoryClient.getLabels(this.project_id)
expect(labels).to.deep.equal([
{
id: label.id,
comment: label.comment,
version: label.version,
created_at: label.created_at,
user_id: toUser,
},
{
id: label2.id,
comment: label2.comment,
version: label2.version,
created_at: label2.created_at,
user_id: toUser,
},
])
})
it('should return labels with summarized updates', function (done) {
ProjectHistoryClient.createLabel(
it('should return labels with summarized updates', async function () {
const label = await ProjectHistoryClient.createLabel(
this.project_id,
this.user_id,
8,
this.comment,
this.created_at,
(error, label) => {
if (error != null) {
throw error
}
ProjectHistoryClient.getSummarizedUpdates(
this.project_id,
{ min_count: 1 },
(error, updates) => {
if (error != null) {
throw error
}
expect(updates).to.deep.equal({
nextBeforeTimestamp: 6,
updates: [
{
fromV: 6,
toV: 8,
meta: {
users: ['5a5637efdac84e81b71014c4', 31],
start_ts: 1512383567277,
end_ts: 1512383572877,
},
pathnames: ['bar.tex', 'main.tex'],
project_ops: [],
labels: [
{
id: label.id.toString(),
comment: this.comment,
version: 8,
user_id: this.user_id,
created_at: this.created_at.toISOString(),
},
],
},
],
})
done()
}
)
}
this.created_at
)
const updates = await ProjectHistoryClient.getSummarizedUpdates(
this.project_id,
{ min_count: 1 }
)
expect(updates).to.deep.equal({
nextBeforeTimestamp: 6,
updates: [
{
fromV: 6,
toV: 8,
meta: {
users: ['5a5637efdac84e81b71014c4', 31],
start_ts: 1512383567277,
end_ts: 1512383572877,
},
pathnames: ['bar.tex', 'main.tex'],
project_ops: [],
labels: [
{
id: label.id.toString(),
comment: this.comment,
version: 8,
user_id: this.user_id,
created_at: this.created_at.toISOString(),
},
],
},
],
})
})
})

View File

@@ -12,41 +12,31 @@ const MockWeb = () => nock('http://127.0.0.1:3000')
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
describe('LatestSnapshot', function () {
beforeEach(function (done) {
ProjectHistoryApp.ensureRunning(error => {
if (error) {
throw error
}
beforeEach(async function () {
await ProjectHistoryApp.promises.ensureRunning()
this.historyId = new ObjectId().toString()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
})
ProjectHistoryClient.initializeProject(
this.historyId,
(error, v1Project) => {
if (error) {
throw error
}
this.projectId = new ObjectId().toString()
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: v1Project.id } },
})
done()
}
)
this.historyId = new ObjectId().toString()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
})
const v1Project = await ProjectHistoryClient.promises.initializeProject(
this.historyId
)
this.projectId = new ObjectId().toString()
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: v1Project.id } },
})
})
afterEach(function () {
nock.cleanAll()
})
it('should return the snapshot with applied changes, metadata and without full content', function (done) {
it('should return the snapshot with applied changes, metadata and without full content', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/latest/history`)
.replyWithFile(200, fixture('chunks/0-3.json'))
@@ -57,30 +47,25 @@ describe('LatestSnapshot', function () {
const changes = fixtureData.chunk.history.changes
const lastTimestamp = changes[changes.length - 1].timestamp
ProjectHistoryClient.getLatestSnapshot(this.projectId, (error, body) => {
if (error) {
throw error
}
expect(body).to.deep.equal({
snapshot: {
files: {
'main.tex': {
hash: 'f28571f561d198b87c24cc6a98b78e87b665e22d',
stringLength: 20649,
operations: [{ textOperation: [1912, 'Hello world', 18726] }],
metadata: { main: true },
},
'foo.tex': {
hash: '4f785a4c192155b240e3042b3a7388b47603f423',
stringLength: 41,
operations: [{ textOperation: [26, '\n\nFour five six'] }],
},
const body = await ProjectHistoryClient.getLatestSnapshot(this.projectId)
expect(body).to.deep.equal({
snapshot: {
files: {
'main.tex': {
hash: 'f28571f561d198b87c24cc6a98b78e87b665e22d',
stringLength: 20649,
operations: [{ textOperation: [1912, 'Hello world', 18726] }],
metadata: { main: true },
},
'foo.tex': {
hash: '4f785a4c192155b240e3042b3a7388b47603f423',
stringLength: 41,
operations: [{ textOperation: [26, '\n\nFour five six'] }],
},
timestamp: lastTimestamp,
},
version: 3,
})
done()
timestamp: lastTimestamp,
},
version: 3,
})
})
})

View File

@@ -11,34 +11,24 @@ const MockWeb = () => nock('http://127.0.0.1:3000')
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
describe('ReadSnapshot', function () {
beforeEach(function (done) {
ProjectHistoryApp.ensureRunning(error => {
if (error) {
throw error
}
beforeEach(async function () {
await ProjectHistoryApp.promises.ensureRunning()
this.historyId = new ObjectId().toString()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
})
ProjectHistoryClient.initializeProject(
this.historyId,
(error, v1Project) => {
if (error) {
throw error
}
this.projectId = new ObjectId().toString()
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: v1Project.id } },
})
done()
}
)
this.historyId = new ObjectId().toString()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
})
const v1Project = await ProjectHistoryClient.promises.initializeProject(
this.historyId
)
this.projectId = new ObjectId().toString()
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: v1Project.id } },
})
})
afterEach(function () {
@@ -46,7 +36,7 @@ describe('ReadSnapshot', function () {
})
describe('of a text file', function () {
it('should return the snapshot of a doc at the given version', function (done) {
it('should return the snapshot of a doc at the given version', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
@@ -59,16 +49,13 @@ describe('ReadSnapshot', function () {
fixture('blobs/c6654ea913979e13e22022653d284444f284a172')
)
ProjectHistoryClient.getSnapshot(
const { body } = await ProjectHistoryClient.getSnapshot(
this.projectId,
'foo.tex',
5,
(error, body) => {
if (error) {
throw error
}
expect(body).to.deep.equal(
`\
5
)
expect(body).to.deep.equal(
`\
Hello world
One two three
@@ -77,13 +64,10 @@ Four five six
Seven eight nine\
`.replace(/^\t/g, '')
)
done()
}
)
})
it('should return the snapshot of a doc at a different version', function (done) {
it('should return the snapshot of a doc at a different version', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/4/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
@@ -96,16 +80,13 @@ Seven eight nine\
fixture('blobs/c6654ea913979e13e22022653d284444f284a172')
)
ProjectHistoryClient.getSnapshot(
const { body } = await ProjectHistoryClient.getSnapshot(
this.projectId,
'foo.tex',
4,
(error, body) => {
if (error) {
throw error
}
expect(body).to.deep.equal(
`\
4
)
expect(body).to.deep.equal(
`\
Hello world
One two three
@@ -114,13 +95,10 @@ Four five six
Seven eight nince\
`.replace(/^\t/g, '')
)
done()
}
)
})
it('should return the snapshot of a doc after a rename version', function (done) {
it('should return the snapshot of a doc after a rename version', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/6/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
@@ -133,16 +111,13 @@ Seven eight nince\
fixture('blobs/c6654ea913979e13e22022653d284444f284a172')
)
ProjectHistoryClient.getSnapshot(
const { body } = await ProjectHistoryClient.getSnapshot(
this.projectId,
'bar.tex',
6,
(error, body) => {
if (error) {
throw error
}
expect(body).to.deep.equal(
`\
6
)
expect(body).to.deep.equal(
`\
Hello world
One two three
@@ -151,9 +126,6 @@ Four five six
Seven eight nine\
`.replace(/^\t/g, '')
)
done()
}
)
})
})
@@ -181,7 +153,7 @@ Seven eight nine\
})
})
it('should return the snapshot of the file at the given version', function (done) {
it('should return the snapshot of the file at the given version', async function () {
MockHistoryStore()
.get(
`/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172`
@@ -191,29 +163,23 @@ Seven eight nine\
fixture('blobs/c6654ea913979e13e22022653d284444f284a172')
)
ProjectHistoryClient.getSnapshot(
const { body } = await ProjectHistoryClient.getSnapshot(
this.projectId,
'binary_file',
4,
(error, body) => {
if (error) {
throw error
}
expect(body).to.deep.equal(
`\
4
)
expect(body).to.deep.equal(
`\
Hello world
One two three
Four five six\
`.replace(/^\t/g, '')
)
done()
}
)
})
it("should return an error when the blob doesn't exist", function (done) {
it("should return an error when the blob doesn't exist", async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/4/history`)
.reply(200, {
@@ -239,22 +205,16 @@ Four five six\
)
.reply(404)
ProjectHistoryClient.getSnapshot(
const { statusCode } = await ProjectHistoryClient.getSnapshot(
this.projectId,
'binary_file',
4,
{ allowErrors: true },
(error, body, statusCode) => {
if (error) {
throw error
}
expect(statusCode).to.equal(500)
done()
}
{ allowErrors: true }
)
expect(statusCode).to.equal(500)
})
it('should return an error when the blob request errors', function (done) {
it('should return an error when the blob request errors', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/4/history`)
.reply(200, {
@@ -280,19 +240,13 @@ Four five six\
)
.replyWithError('oh no!')
ProjectHistoryClient.getSnapshot(
const { statusCode } = await ProjectHistoryClient.getSnapshot(
this.projectId,
'binary_file',
4,
{ allowErrors: true },
(error, body, statusCode) => {
if (error) {
throw error
}
expect(statusCode).to.equal(500)
done()
}
{ allowErrors: true }
)
expect(statusCode).to.equal(500)
})
})
})

View File

@@ -1,20 +1,4 @@
/* eslint-disable
no-undef,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import sinon from 'sinon'
import { expect } from 'chai'
import Settings from '@overleaf/settings'
import request from 'request'
import assert from 'node:assert'
import mongodb from 'mongodb-legacy'
import nock from 'nock'
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
@@ -22,228 +6,196 @@ import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
const { ObjectId } = mongodb
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
const MockFileStore = () => nock('http://127.0.0.1:3009')
const MockWeb = () => nock('http://127.0.0.1:3000')
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
describe('Summarized updates', function () {
beforeEach(function (done) {
beforeEach(async function () {
this.projectId = new ObjectId().toString()
this.historyId = new ObjectId().toString()
return ProjectHistoryApp.ensureRunning(error => {
if (error != null) {
throw error
}
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
await ProjectHistoryApp.promises.ensureRunning()
MockHistoryStore().post('/api/projects').reply(200, {
projectId: this.historyId,
})
const olProject = await ProjectHistoryClient.promises.initializeProject(
this.historyId
)
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: olProject.id } },
})
return ProjectHistoryClient.initializeProject(
this.historyId,
(error, olProject) => {
if (error != null) {
throw error
}
MockWeb()
.get(`/project/${this.projectId}/details`)
.reply(200, {
name: 'Test Project',
overleaf: { history: { id: olProject.id } },
})
MockHistoryStore()
.get(`/api/projects/${this.historyId}/latest/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/6/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/3/history`)
.replyWithFile(200, fixture('chunks/0-3.json'))
return done()
}
)
})
MockHistoryStore()
.get(`/api/projects/${this.historyId}/latest/history`)
.replyWithFile(200, fixture('chunks/7-8.json'))
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/6/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/3/history`)
.replyWithFile(200, fixture('chunks/0-3.json'))
})
afterEach(function () {
return nock.cleanAll()
})
it('should return the latest summarized updates from a single chunk', function (done) {
return ProjectHistoryClient.getSummarizedUpdates(
it('should return the latest summarized updates from a single chunk', async function () {
const updates = await ProjectHistoryClient.getSummarizedUpdates(
this.projectId,
{ min_count: 1 },
(error, updates) => {
if (error != null) {
throw error
}
expect(updates).to.deep.equal({
nextBeforeTimestamp: 6,
updates: [
{
fromV: 6,
toV: 8,
meta: {
users: ['5a5637efdac84e81b71014c4', 31],
start_ts: 1512383567277,
end_ts: 1512383572877,
},
pathnames: ['bar.tex', 'main.tex'],
project_ops: [],
labels: [],
},
],
})
return done()
}
{ min_count: 1 }
)
expect(updates).to.deep.equal({
nextBeforeTimestamp: 6,
updates: [
{
fromV: 6,
toV: 8,
meta: {
users: ['5a5637efdac84e81b71014c4', 31],
start_ts: 1512383567277,
end_ts: 1512383572877,
},
pathnames: ['bar.tex', 'main.tex'],
project_ops: [],
labels: [],
},
],
})
})
it('should return the latest summarized updates, with min_count spanning multiple chunks', function (done) {
return ProjectHistoryClient.getSummarizedUpdates(
it('should return the latest summarized updates, with min_count spanning multiple chunks', async function () {
const updates = await ProjectHistoryClient.getSummarizedUpdates(
this.projectId,
{ min_count: 5 },
(error, updates) => {
if (error != null) {
throw error
}
expect(updates).to.deep.equal({
updates: [
{ min_count: 5 }
)
expect(updates).to.deep.equal({
updates: [
{
fromV: 6,
toV: 8,
meta: {
users: ['5a5637efdac84e81b71014c4', 31],
start_ts: 1512383567277,
end_ts: 1512383572877,
},
pathnames: ['bar.tex', 'main.tex'],
project_ops: [],
labels: [],
},
{
fromV: 5,
toV: 6,
meta: {
users: [31],
start_ts: 1512383366120,
end_ts: 1512383366120,
},
pathnames: [],
project_ops: [
{
fromV: 6,
toV: 8,
meta: {
users: ['5a5637efdac84e81b71014c4', 31],
start_ts: 1512383567277,
end_ts: 1512383572877,
atV: 5,
rename: {
pathname: 'foo.tex',
newPathname: 'bar.tex',
},
pathnames: ['bar.tex', 'main.tex'],
project_ops: [],
labels: [],
},
{
fromV: 5,
toV: 6,
meta: {
users: [31],
start_ts: 1512383366120,
end_ts: 1512383366120,
},
pathnames: [],
project_ops: [
{
atV: 5,
rename: {
pathname: 'foo.tex',
newPathname: 'bar.tex',
},
},
],
labels: [],
},
{
fromV: 2,
toV: 5,
meta: {
users: [31],
start_ts: 1512383313724,
end_ts: 1512383362905,
},
pathnames: ['foo.tex'],
project_ops: [],
labels: [],
},
{
fromV: 1,
toV: 2,
meta: {
users: [31],
start_ts: 1512383246874,
end_ts: 1512383246874,
},
pathnames: [],
project_ops: [
{
atV: 1,
rename: {
pathname: 'bar.tex',
newPathname: 'foo.tex',
},
},
],
labels: [],
},
{
fromV: 0,
toV: 1,
meta: {
users: [31],
start_ts: 1512383015633,
end_ts: 1512383015633,
},
pathnames: ['main.tex'],
project_ops: [],
labels: [],
},
],
})
return done()
}
)
labels: [],
},
{
fromV: 2,
toV: 5,
meta: {
users: [31],
start_ts: 1512383313724,
end_ts: 1512383362905,
},
pathnames: ['foo.tex'],
project_ops: [],
labels: [],
},
{
fromV: 1,
toV: 2,
meta: {
users: [31],
start_ts: 1512383246874,
end_ts: 1512383246874,
},
pathnames: [],
project_ops: [
{
atV: 1,
rename: {
pathname: 'bar.tex',
newPathname: 'foo.tex',
},
},
],
labels: [],
},
{
fromV: 0,
toV: 1,
meta: {
users: [31],
start_ts: 1512383015633,
end_ts: 1512383015633,
},
pathnames: ['main.tex'],
project_ops: [],
labels: [],
},
],
})
})
it('should return the summarized updates from a before version at the start of a chunk', function (done) {
it('should return the summarized updates from a before version at the start of a chunk', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/4/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
return ProjectHistoryClient.getSummarizedUpdates(
const updates = await ProjectHistoryClient.getSummarizedUpdates(
this.projectId,
{ before: 4 },
(error, updates) => {
if (error != null) {
throw error
}
expect(updates.updates[0].toV).to.equal(4)
return done()
}
{ before: 4 }
)
expect(updates.updates[0].toV).to.equal(4)
})
it('should return the summarized updates from a before version in the middle of a chunk', function (done) {
it('should return the summarized updates from a before version in the middle of a chunk', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/5/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
return ProjectHistoryClient.getSummarizedUpdates(
const updates = await ProjectHistoryClient.getSummarizedUpdates(
this.projectId,
{ before: 5 },
(error, updates) => {
if (error != null) {
throw error
}
expect(updates.updates[0].toV).to.equal(5)
return done()
}
{ before: 5 }
)
expect(updates.updates[0].toV).to.equal(5)
})
return it('should return the summarized updates from a before version at the end of a chunk', function (done) {
it('should return the summarized updates from a before version at the end of a chunk', async function () {
MockHistoryStore()
.get(`/api/projects/${this.historyId}/versions/6/history`)
.replyWithFile(200, fixture('chunks/4-6.json'))
return ProjectHistoryClient.getSummarizedUpdates(
const updates = await ProjectHistoryClient.getSummarizedUpdates(
this.projectId,
{ before: 6 },
(error, updates) => {
if (error != null) {
throw error
}
expect(updates.updates[0].toV).to.equal(6)
return done()
}
{ before: 6 }
)
expect(updates.updates[0].toV).to.equal(6)
})
})

View File

@@ -4,7 +4,13 @@ import Settings from '@overleaf/settings'
import RedisWrapper from '@overleaf/redis-wrapper'
import { db } from '../../../../app/js/mongodb.js'
import { promisify } from '@overleaf/promise-utils'
import { fetchJsonWithResponse, fetchNothing } from '@overleaf/fetch-utils'
import {
fetchJson,
fetchJsonWithResponse,
fetchNothing,
fetchStringWithResponse,
RequestFailedError,
} from '@overleaf/fetch-utils'
const rclient = RedisWrapper.createClient(Settings.redis.project_history)
const Keys = Settings.redis.project_history.key_schema
@@ -53,21 +59,13 @@ export function flushProject(projectId, options, callback) {
)
}
export function getSummarizedUpdates(projectId, query, callback) {
request.get(
{
url: `http://127.0.0.1:3054/project/${projectId}/updates`,
qs: query,
json: true,
},
(error, res, body) => {
if (error) {
return callback(error)
}
expect(res.statusCode).to.equal(200)
callback(error, body)
}
)
export async function getSummarizedUpdates(projectId, query) {
const url = new URL(`http://127.0.0.1:3054/project/${projectId}/updates`)
Object.keys(query).forEach(key => {
url.searchParams.set(key, query[key])
})
return await fetchJson(url.toString())
}
export async function getDiff(projectId, pathname, from, to) {
@@ -76,89 +74,65 @@ export async function getDiff(projectId, pathname, from, to) {
url.searchParams.set('from', from)
url.searchParams.set('to', to)
const { response, json } = await fetchJsonWithResponse(url.toString())
expect(response.status).to.equal(200)
return json
return await fetchJson(url.toString())
}
export function getFileTreeDiff(projectId, from, to, callback) {
request.get(
{
url: `http://127.0.0.1:3054/project/${projectId}/filetree/diff`,
qs: {
from,
to,
},
json: true,
},
(error, res, body) => {
if (error) {
return callback(error)
}
callback(error, body, res.statusCode)
}
export async function getFileTreeDiff(projectId, from, to) {
const url = new URL(
`http://127.0.0.1:3054/project/${projectId}/filetree/diff`
)
}
url.searchParams.set('from', from)
url.searchParams.set('to', to)
export function getChangesInChunkSince(projectId, since, options, callback) {
request.get(
{
url: `http://127.0.0.1:3054/project/${projectId}/changes-in-chunk`,
qs: {
since,
},
json: true,
},
(error, res, body) => {
if (error) return callback(error)
if (!options.allowErrors) {
expect(res.statusCode).to.equal(200)
}
callback(null, body, res.statusCode)
try {
const { response, json } = await fetchJsonWithResponse(url.toString())
return { diff: json, statusCode: response.status }
} catch (error) {
if (error instanceof RequestFailedError) {
return { diff: null, statusCode: error.response.status }
}
)
}
export function getLatestSnapshot(projectId, callback) {
request.get(
{
url: `http://127.0.0.1:3054/project/${projectId}/snapshot`,
json: true,
},
(error, res, body) => {
if (error) {
return callback(error)
}
expect(res.statusCode).to.equal(200)
callback(null, body)
}
)
}
export function getSnapshot(projectId, pathname, version, options, callback) {
if (typeof options === 'function') {
callback = options
options = null
throw error
}
if (!options) {
options = { allowErrors: false }
}
request.get(
{
url: `http://127.0.0.1:3054/project/${projectId}/version/${version}/${encodeURIComponent(
pathname
)}`,
},
(error, res, body) => {
if (error) {
return callback(error)
}
if (!options.allowErrors) {
expect(res.statusCode).to.equal(200)
}
callback(error, body, res.statusCode)
}
}
export async function getChangesInChunkSince(projectId, since, options = {}) {
const url = new URL(
`http://127.0.0.1:3054/project/${projectId}/changes-in-chunk`
)
url.searchParams.set('since', since)
try {
const { response, json } = await fetchJsonWithResponse(url.toString())
return { body: json, statusCode: response.status }
} catch (error) {
if (options.allowErrors && error instanceof RequestFailedError) {
return { body: null, statusCode: error.response.status }
}
throw error
}
}
export async function getLatestSnapshot(projectId) {
return await fetchJson(`http://127.0.0.1:3054/project/${projectId}/snapshot`)
}
export async function getSnapshot(projectId, pathname, version, options = {}) {
const url = `http://127.0.0.1:3054/project/${projectId}/version/${version}/${encodeURIComponent(
pathname
)}`
try {
const { response, body } = await fetchStringWithResponse(url)
if (!options.allowErrors) {
expect(response.status).to.equal(200)
}
return { body, statusCode: response.status }
} catch (error) {
if (options.allowErrors && error instanceof RequestFailedError) {
return { body: null, statusCode: error.response.status }
}
throw error
}
}
export function pushRawUpdate(projectId, update, callback) {
@@ -222,73 +196,37 @@ export function resyncHistory(projectId, callback) {
)
}
export function createLabel(
export async function createLabel(
projectId,
userId,
version,
comment,
createdAt,
callback
createdAt
) {
request.post(
{
url: `http://127.0.0.1:3054/project/${projectId}/labels`,
json: { comment, version, created_at: createdAt, user_id: userId },
},
(error, res, body) => {
if (error) {
return callback(error)
}
expect(res.statusCode).to.equal(200)
callback(null, body)
}
)
return await fetchJson(`http://127.0.0.1:3054/project/${projectId}/labels`, {
method: 'POST',
json: { comment, version, created_at: createdAt, user_id: userId },
})
}
export function getLabels(projectId, callback) {
request.get(
{
url: `http://127.0.0.1:3054/project/${projectId}/labels`,
json: true,
},
(error, res, body) => {
if (error) {
return callback(error)
}
expect(res.statusCode).to.equal(200)
callback(null, body)
}
)
export async function getLabels(projectId) {
return await fetchJson(`http://127.0.0.1:3054/project/${projectId}/labels`)
}
export function deleteLabelForUser(projectId, userId, labelId, callback) {
request.delete(
{
url: `http://127.0.0.1:3054/project/${projectId}/user/${userId}/labels/${labelId}`,
},
(error, res, body) => {
if (error) {
return callback(error)
}
expect(res.statusCode).to.equal(204)
callback(null, body)
}
export async function deleteLabelForUser(projectId, userId, labelId) {
const response = await fetchNothing(
`http://127.0.0.1:3054/project/${projectId}/user/${userId}/labels/${labelId}`,
{ method: 'DELETE' }
)
expect(response.status).to.equal(204)
}
export function deleteLabel(projectId, labelId, callback) {
request.delete(
{
url: `http://127.0.0.1:3054/project/${projectId}/labels/${labelId}`,
},
(error, res, body) => {
if (error) {
return callback(error)
}
expect(res.statusCode).to.equal(204)
callback(null, body)
}
export async function deleteLabel(projectId, labelId) {
const response = await fetchNothing(
`http://127.0.0.1:3054/project/${projectId}/labels/${labelId}`,
{ method: 'DELETE' }
)
expect(response.status).to.equal(204)
}
export async function setFailure(failureEntry) {
@@ -300,27 +238,16 @@ export function getFailure(projectId, callback) {
db.projectHistoryFailures.findOne({ project_id: projectId }, callback)
}
export function transferLabelOwnership(fromUser, toUser, callback) {
request.post(
{
url: `http://127.0.0.1:3054/user/${fromUser}/labels/transfer/${toUser}`,
},
(error, res, body) => {
if (error) {
return callback(error)
}
expect(res.statusCode).to.equal(204)
callback(null, body)
}
export async function transferLabelOwnership(fromUser, toUser) {
const response = await fetchNothing(
`http://127.0.0.1:3054/user/${fromUser}/labels/transfer/${toUser}`,
{ method: 'POST' }
)
expect(response.status).to.equal(204)
}
export async function getDump(projectId) {
const { response, json } = await fetchJsonWithResponse(
`http://127.0.0.1:3054/project/${projectId}/dump`
)
expect(response.status).to.equal(200)
return json
return await fetchJson(`http://127.0.0.1:3054/project/${projectId}/dump`)
}
export async function deleteProject(projectId) {

View File

@@ -1,7 +1,6 @@
const crypto = require('crypto')
const { db } = require('../../infrastructure/mongodb')
const Errors = require('../Errors/Errors')
const { promisifyAll } = require('@overleaf/promise-utils')
const { callbackify } = require('util')
const ONE_HOUR_IN_S = 60 * 60
@@ -32,106 +31,92 @@ async function peekValueFromToken(use, token) {
return { data: tokenDoc.data, remainingPeeks }
}
async function getNewToken(use, data, options = {}) {
const expiresIn = options.expiresIn || ONE_HOUR_IN_S
const createdAt = new Date()
const expiresAt = new Date(createdAt.getTime() + expiresIn * 1000)
const token = crypto.randomBytes(32).toString('hex')
await db.tokens.insertOne({
use,
token,
data,
createdAt,
expiresAt,
})
return token
}
async function getValueFromTokenAndExpire(use, token) {
const now = new Date()
const tokenDoc = await db.tokens.findOneAndUpdate(
{
use,
token,
expiresAt: { $gt: now },
usedAt: { $exists: false },
peekCount: { $not: { $gte: OneTimeTokenHandler.MAX_PEEKS } },
},
{
$set: {
usedAt: now,
},
}
)
if (!tokenDoc) {
throw new Errors.NotFoundError('no token found')
}
return tokenDoc.data
}
async function expireToken(use, token) {
const now = new Date()
await db.tokens.updateOne(
{
use,
token,
},
{
$set: {
usedAt: now,
},
}
)
}
async function expireAllTokensForUser(userId, use) {
const now = new Date()
await db.tokens.updateMany(
{
use,
'data.user_id': userId.toString(),
usedAt: { $exists: false },
},
{
$set: {
usedAt: now,
},
}
)
}
const OneTimeTokenHandler = {
MAX_PEEKS: 4,
getNewToken(use, data, options, callback) {
// options is optional
if (!options) {
options = {}
}
if (typeof options === 'function') {
callback = options
options = {}
}
const expiresIn = options.expiresIn || ONE_HOUR_IN_S
const createdAt = new Date()
const expiresAt = new Date(createdAt.getTime() + expiresIn * 1000)
const token = crypto.randomBytes(32).toString('hex')
db.tokens.insertOne(
{
use,
token,
data,
createdAt,
expiresAt,
},
function (error) {
if (error) {
return callback(error)
}
callback(null, token)
}
)
},
getValueFromTokenAndExpire(use, token, callback) {
const now = new Date()
db.tokens.findOneAndUpdate(
{
use,
token,
expiresAt: { $gt: now },
usedAt: { $exists: false },
peekCount: { $not: { $gte: OneTimeTokenHandler.MAX_PEEKS } },
},
{
$set: {
usedAt: now,
},
},
function (error, token) {
if (error) {
return callback(error)
}
if (!token) {
return callback(new Errors.NotFoundError('no token found'))
}
callback(null, token.data)
}
)
},
getNewToken: callbackify(getNewToken),
getValueFromTokenAndExpire: callbackify(getValueFromTokenAndExpire),
peekValueFromToken: callbackify(peekValueFromToken),
expireToken(use, token, callback) {
const now = new Date()
db.tokens.updateOne(
{
use,
token,
},
{
$set: {
usedAt: now,
},
},
error => {
callback(error)
}
)
},
expireAllTokensForUser(userId, use, callback) {
const now = new Date()
db.tokens.updateMany(
{
use,
'data.user_id': userId.toString(),
usedAt: { $exists: false },
},
{
$set: {
usedAt: now,
},
},
error => {
callback(error)
}
)
expireToken: callbackify(expireToken),
expireAllTokensForUser: callbackify(expireAllTokensForUser),
promises: {
getNewToken,
getValueFromTokenAndExpire,
peekValueFromToken,
expireToken,
expireAllTokensForUser,
},
}
OneTimeTokenHandler.promises = promisifyAll(OneTimeTokenHandler)
module.exports = OneTimeTokenHandler

View File

@@ -65,40 +65,36 @@ async function importInvite(subscription, inviterName, email, token, sentAt) {
return subscription.save()
}
async function _deleteUserSubscription(userId, ipAddress) {
async function _deleteUserSubscription(subscription, userId, ipAddress) {
// Delete released user subscription to make it on a free plan
const subscription =
await SubscriptionLocator.promises.getUsersSubscription(userId)
if (subscription) {
logger.debug(
{
subscriptionId: subscription._id,
},
'deleting user subscription'
)
logger.debug(
{
subscriptionId: subscription._id,
},
'deleting user subscription'
)
const deleterData = {
id: userId,
ip: ipAddress,
}
await SubscriptionUpdater.promises.deleteSubscription(
subscription,
deleterData
)
const deleterData = {
id: userId,
ip: ipAddress,
}
await SubscriptionUpdater.promises.deleteSubscription(
subscription,
deleterData
)
// Terminate the subscription in Recurly
if (subscription.recurlySubscription_id) {
try {
await RecurlyClient.promises.terminateSubscriptionByUuid(
subscription.recurlySubscription_id
)
} catch (err) {
logger.error(
{ err, subscriptionId: subscription._id },
'terminating subscription failed'
)
}
// Terminate the subscription in Recurly
if (subscription.recurlySubscription_id) {
try {
await RecurlyClient.promises.terminateSubscriptionByUuid(
subscription.recurlySubscription_id
)
} catch (err) {
logger.error(
{ err, subscriptionId: subscription._id },
'terminating subscription failed'
)
}
}
}
@@ -117,7 +113,17 @@ async function acceptInvite(token, userId, ipAddress) {
)
if (subscription.managedUsersEnabled) {
await _deleteUserSubscription(userId, ipAddress)
// check if user has a personal subscription
const userSubscription =
await SubscriptionLocator.promises.getUsersSubscription(userId)
if (userSubscription) {
// if user has a personal subscription and joins a managed group, delete their personal subscription
// but make sure that it's not the same subscription as the group one.
if (!userSubscription._id.equals(subscription._id)) {
await _deleteUserSubscription(userSubscription, userId, ipAddress)
}
}
await Modules.promises.hooks.fire(
'enrollInManagedSubscription',
userId,

View File

@@ -20,6 +20,7 @@ import PoNumber from '@/features/group-management/components/add-seats/po-number
import CostSummary from '@/features/group-management/components/add-seats/cost-summary'
import RequestStatus from '@/features/group-management/components/request-status'
import useAsync from '@/shared/hooks/use-async'
import useAsyncWithCancel from '@/shared/hooks/use-async-with-cancel'
import getMeta from '@/utils/meta'
import { FetchError, postJSON } from '@/infrastructure/fetch-json'
import { debugConsole } from '@/utils/debugging'
@@ -50,7 +51,6 @@ function AddSeats() {
const [addSeatsInputError, setAddSeatsInputError] = useState<string>()
const [poNumberInputError, setPoNumberInputError] = useState<string>()
const [shouldContactSales, setShouldContactSales] = useState(false)
const controller = useAbortController()
const { signal: addSeatsSignal } = useAbortController()
const { signal: contactSalesSignal } = useAbortController()
const {
@@ -60,7 +60,8 @@ function AddSeats() {
data: costSummaryData,
reset: resetCostSummaryData,
error: errorCostSummary,
} = useAsync<CostSummaryData, FetchError>()
cancelAll: cancelCostSummaryRequest,
} = useAsyncWithCancel<CostSummaryData, FetchError>()
const [isAddingSeats, setIsAddingSeats] = useState(false)
const [isErrorAddingSeats, setIsErrorAddingSeats] = useState(false)
const [isSuccessAddingSeats, setIsSuccessAddingSeats] = useState(false)
@@ -85,14 +86,21 @@ function AddSeats() {
const debouncedCostSummaryRequest = useMemo(
() =>
debounce((value: number, signal: AbortSignal) => {
const post = postJSON('/user/subscription/group/add-users/preview', {
signal,
body: { adding: value },
debounce((value: number) => {
cancelCostSummaryRequest()
const post = (signal: AbortSignal) =>
postJSON('/user/subscription/group/add-users/preview', {
body: { adding: value },
signal,
})
runAsyncCostSummary(post).catch(error => {
if (error.name !== 'AbortError') {
debugConsole.error(error)
}
})
runAsyncCostSummary(post).catch(debugConsole.error)
}, 500),
[runAsyncCostSummary]
[runAsyncCostSummary, cancelCostSummaryRequest]
)
const debouncedTrackUserEnterSeatNumberEvent = useMemo(
@@ -168,14 +176,15 @@ function AddSeats() {
debouncedCostSummaryRequest.cancel()
shouldContactSales = true
} else {
debouncedCostSummaryRequest(seats, controller.signal)
debouncedCostSummaryRequest(seats)
}
} else {
debouncedTrackUserEnterSeatNumberEvent.cancel()
debouncedCostSummaryRequest.cancel()
cancelCostSummaryRequest()
resetCostSummaryData()
}
resetCostSummaryData()
setShouldContactSales(shouldContactSales)
}
@@ -374,7 +383,6 @@ function AddSeats() {
required
className="w-25"
name="seats"
disabled={isLoadingCostSummary}
onChange={handleSeatsChange}
isInvalid={Boolean(addSeatsInputError)}
/>

View File

@@ -28,7 +28,7 @@ function CostSummary({ subscriptionChange, totalLicenses }: CostSummaryProps) {
data-testid="cost-summary"
>
<Card.Body className="d-grid gap-2 p-3">
<div>
<div data-testid="adding-licenses-summary">
<div className="fw-bold">{t('cost_summary')}</div>
{subscriptionChange ? (
<Trans

View File

@@ -58,7 +58,7 @@ export type EditorManager = {
openDoc: (document: Doc, options?: OpenDocOptions) => void
openDocs: OpenDocuments
openFileWithId: (fileId: string) => void
openInitialDoc: (docId: string) => void
openInitialDoc: (docId?: string) => void
isLoading: boolean
jumpToLine: (options: GotoLineOptions) => void
debugTimers: React.MutableRefObject<Record<string, number>>
@@ -513,7 +513,7 @@ export const EditorManagerProvider: FC<React.PropsWithChildren> = ({
)
const openInitialDoc = useCallback(
(fallbackDocId: string) => {
(fallbackDocId?: string) => {
const docId =
customLocalStorage.getItem(currentDocumentIdStorageKey) || fallbackDocId
if (docId) {

View File

@@ -134,12 +134,7 @@ export const FileTreeOpenProvider: FC<React.PropsWithChildren> = ({
// Open a document once the file tree and project are ready
const initialOpenDoneRef = useRef(false)
useEffect(() => {
if (
rootDocId &&
fileTreeReady &&
projectJoined &&
!initialOpenDoneRef.current
) {
if (fileTreeReady && projectJoined && !initialOpenDoneRef.current) {
initialOpenDoneRef.current = true
openInitialDoc(rootDocId)
}

View File

@@ -37,6 +37,7 @@ import { debounce } from 'lodash'
import { EditorSelection, EditorState } from '@codemirror/state'
import { sendSearchEvent } from '@/features/event-tracking/search-events'
import { FullProjectSearchButton } from './full-project-search-button'
import { isInvalidRegExp } from '../utils/regexp'
const MATCH_COUNT_DEBOUNCE_WAIT = 100 // the amount of ms to wait before counting matches
const MAX_MATCH_COUNT = 999 // the maximum number of matches to count
@@ -253,8 +254,7 @@ const CodeMirrorSearchForm: FC<React.PropsWithChildren> = () => {
<div className="ol-cm-search-controls">
<span
className={classnames('ol-cm-search-input-group', {
'ol-cm-search-input-error':
query.regexp && isInvalidRegExp(query.search),
'ol-cm-search-input-error': query.regexp && isInvalidRegExp(query),
})}
>
<OLFormControl
@@ -495,15 +495,6 @@ const CodeMirrorSearchForm: FC<React.PropsWithChildren> = () => {
)
}
function isInvalidRegExp(source: string) {
try {
RegExp(source)
return false
} catch {
return true
}
}
export default CodeMirrorSearchForm
const buildPosition = debounce(

View File

@@ -5,3 +5,12 @@ export const createRegExp = (searchQuery: SearchQuery) => {
return new RegExp(searchQuery.search, flags)
}
export const isInvalidRegExp = (searchQuery: SearchQuery): boolean => {
try {
createRegExp(searchQuery)
return false
} catch {
return true
}
}

View File

@@ -78,7 +78,7 @@ function Tooltip({
const delay = overlayProps?.delay
let delayShow = DEFAULT_DELAY_SHOW
let delayHide = DEFAULT_DELAY_HIDE
if (delay) {
if (delay !== undefined) {
delayShow = typeof delay === 'number' ? delay : delay.show
delayHide = typeof delay === 'number' ? Math.max(delay - 10, 0) : delay.hide
}

View File

@@ -0,0 +1,125 @@
import * as React from 'react'
import useSafeDispatch from './use-safe-dispatch'
import { Nullable } from '../../../../types/utils'
import { FetchError } from '../../infrastructure/fetch-json'
type State<T, E> = {
status: 'idle' | 'pending' | 'resolved' | 'rejected'
data: Nullable<T>
error: Nullable<E>
}
type Action<T, E> = Partial<State<T, E>>
type AsyncRunner<T> = (signal: AbortSignal) => Promise<T>
const defaultInitialState: State<null, null> = {
status: 'idle',
data: null,
error: null,
}
const abortError = new Error('Aborted by the caller')
abortError.name = 'AbortError'
function useAsync<T = any, E extends Error | FetchError = Error>(
initialState?: Partial<State<T, E>>
) {
const initialStateRef = React.useRef({
...defaultInitialState,
...initialState,
})
// Use a Set to track all active AbortController instances
const abortControllerSetRef = React.useRef<Set<AbortController>>(new Set())
const [{ status, data, error }, setState] = React.useReducer(
(state: State<T, E>, action: Action<T, E>) => ({ ...state, ...action }),
initialStateRef.current
)
const safeSetState = useSafeDispatch(setState)
const setData = React.useCallback(
(data: Nullable<T>) => safeSetState({ data, status: 'resolved' }),
[safeSetState]
)
const setError = React.useCallback(
(error: Nullable<E>) => safeSetState({ error, status: 'rejected' }),
[safeSetState]
)
const reset = React.useCallback(
() => safeSetState(initialStateRef.current),
[safeSetState]
)
const cancelAll = React.useCallback(() => {
// Abort all controllers in the set and clear it
abortControllerSetRef.current.forEach(controller => controller.abort())
abortControllerSetRef.current.clear()
}, [])
const runAsync = React.useCallback(
(asyncRunner: AsyncRunner<T>) => {
safeSetState({ status: 'pending' })
const controller = new AbortController()
abortControllerSetRef.current.add(controller)
// The original promise is now created using the provided factory function,
// which receives the signal for cancellation.
const promise = asyncRunner(controller.signal)
const abortPromise = new Promise<never>((_resolve, reject) => {
controller.signal.addEventListener('abort', () => {
reject(abortError)
})
})
return Promise.race([promise, abortPromise])
.then(
data => {
setData(data)
return data
},
error => {
if (error !== abortError) {
setError(error)
}
return Promise.reject(error)
}
)
.finally(() => {
// Remove the controller from the set, whether it succeeded or failed
abortControllerSetRef.current.delete(controller)
})
},
[safeSetState, setData, setError]
)
// Abort all requests when the component unmounts to prevent memory leaks
React.useEffect(() => {
return () => {
cancelAll()
}
}, [cancelAll])
return {
isIdle: status === 'idle',
isLoading: status === 'pending',
isError: status === 'rejected',
isSuccess: status === 'resolved',
setData,
setError,
error,
status,
data,
runAsync,
reset,
cancelAll,
}
}
export default useAsync
export type UseAsyncReturnType = ReturnType<typeof useAsync>
export { useAsync, abortError }

View File

@@ -1338,9 +1338,9 @@
"manage_group_settings_subtext_managed_users": "Turn on Managed Users",
"manage_institution_managers": "Manage institution managers",
"manage_managers_subtext": "Assign or remove manager privileges",
"manage_newsletter": "Manage Your Newsletter Preferences",
"manage_newsletter": "Manage newsletter preferences",
"manage_publisher_managers": "Manage publisher managers",
"manage_sessions": "Manage Your Sessions",
"manage_sessions": "Manage sessions",
"manage_subscription": "Manage subscription",
"manage_tag": "Manage tag",
"manage_your_ai_assist_add_on": "Manage your AI Assist add-on",
@@ -2639,7 +2639,7 @@
"view_in_template_gallery": "View it in the template gallery",
"view_invitation": "View invitation",
"view_invoices": "View invoices",
"view_labs_experiments": "View Labs Experiments",
"view_labs_experiments": "View experiments",
"view_less": "View less",
"view_logs": "View logs",
"view_metrics": "View metrics",

View File

@@ -30,7 +30,7 @@ import { FullProjectSearchModifiers } from './full-project-search-modifiers'
import { isMac } from '@/shared/utils/os'
import { PanelHeading } from '@/shared/components/panel-heading'
import { useEditorManagerContext } from '@/features/ide-react/context/editor-manager-context'
import { createRegExp } from '../util/regexp'
import { createRegExp } from '@/features/source-editor/utils/regexp'
import { useEditorOpenDocContext } from '@/features/ide-react/context/editor-open-doc-context'
import { useFileTreePathContext } from '@/features/file-tree/contexts/file-tree-path'
import { FullProjectSearchResults } from './full-project-search-results'

View File

@@ -273,9 +273,9 @@
"@uppy/utils": "^5.7.0",
"@uppy/xhr-upload": "^3.6.0",
"@vitest/eslint-plugin": "1.1.44",
"@writefull/core": "^1.27.24",
"@writefull/ui": "^1.27.24",
"@writefull/utils": "^1.27.24",
"@writefull/core": "^1.27.26",
"@writefull/ui": "^1.27.26",
"@writefull/utils": "^1.27.26",
"5to6-codemod": "^1.8.0",
"abort-controller": "^3.0.0",
"acorn": "^7.1.1",

View File

@@ -3,6 +3,7 @@ import AddSeats, {
MAX_NUMBER_OF_PO_NUMBER_CHARACTERS,
} from '@/features/group-management/components/add-seats/add-seats'
import { SplitTestProvider } from '@/shared/context/split-test-context'
import { cloneDeep } from 'lodash'
describe('<AddSeats />', function () {
beforeEach(function () {
@@ -409,6 +410,40 @@ describe('<AddSeats />', function () {
})
})
it('handles double digit numbers of licenses gracefully', function () {
const { promise, resolve } = Promise.withResolvers<void>()
const body = cloneDeep(this.body)
cy.intercept(
'POST',
'/user/subscription/group/add-users/preview',
async req => {
await promise
// make the response reflect back whatever quantity was sent in the request
// we don't really care about the rest of the body for this test
const { adding } = req.body
body.change.addOn.quantity = body.change.addOn.prevQuantity + adding
req.reply({
statusCode: 200,
body,
})
}
).as('addUsersRequest')
cy.get('@input').type('1')
cy.get('@input').type('2')
resolve()
cy.findByTestId('adding-licenses-summary').within(() => {
cy.findByText((_, el) =>
Boolean(
el?.textContent?.includes(
'Youre adding 12 licenses to your plan giving you a total of 17 licenses'
)
)
)
})
})
describe('request', function () {
afterEach(function () {
cy.findByRole('link', { name: /go to subscriptions/i }).should(

View File

@@ -8,7 +8,7 @@ describe('<NewsletterSection />', function () {
render(<NewsletterSection />)
const link = screen.getByRole('link', {
name: 'Manage Your Newsletter Preferences',
name: 'Manage newsletter preferences',
})
expect(link.getAttribute('href')).to.equal('/user/email-preferences')

View File

@@ -8,7 +8,7 @@ describe('<SessionsSection />', function () {
render(<SessionsSection />)
const link = screen.getByRole('link', {
name: 'Manage Your Sessions',
name: 'Manage sessions',
})
expect(link.getAttribute('href')).to.equal('/user/sessions')

View File

@@ -344,12 +344,27 @@ describe('TeamInvitesHandler', function () {
email: 'tyrion@example.com',
}
this.user_subscription = {
id: '66264b9125930b976cc0811e',
_id: new ObjectId('66264b9125930b976cc0811e'),
groupPlan: false,
recurlySubscription_id: 'fa1b2cfa156gh',
admin_id: '123456789',
member_ids: [],
teamInvites: [],
save: sinon.stub().resolves(),
}
this.ipAddress = '127.0.0.1'
this.UserGetter.promises.getUserByAnyEmail
.withArgs(this.user.email)
.resolves(this.user)
this.SubscriptionLocator.promises.getUsersSubscription
.withArgs(this.user.id)
.resolves(this.user_subscription)
this.subscription.teamInvites.push({
email: 'john.snow@example.com',
token: 'dddddddd',
@@ -421,12 +436,12 @@ describe('TeamInvitesHandler', function () {
)
sinon.assert.calledWith(
this.SubscriptionUpdater.promises.deleteSubscription,
this.subscription,
this.user_subscription,
{ id: this.user.id, ip: this.ipAddress }
)
sinon.assert.calledWith(
this.RecurlyClient.promises.terminateSubscriptionByUuid,
this.subscription.recurlySubscription_id
this.user_subscription.recurlySubscription_id
)
sinon.assert.calledWith(
this.Modules.promises.hooks.fire,
@@ -435,6 +450,23 @@ describe('TeamInvitesHandler', function () {
this.subscription
)
})
it('should not delete the users subscription if that subscription is also the join target', async function () {
this.subscription.managedUsersEnabled = true
this.SubscriptionLocator.promises.getUsersSubscription
.withArgs(this.user.id)
.resolves(this.subscription)
await this.TeamInvitesHandler.promises.acceptInvite(
'dddddddd',
this.user.id,
this.ipAddress
)
sinon.assert.notCalled(
this.SubscriptionUpdater.promises.deleteSubscription
)
})
})
describe('with group SSO enabled', function () {