18 Commits

Author SHA1 Message Date
Miguel Serrano
18fc51bfa4 Merge pull request #29361 from overleaf/msm-hotfix-5-5-6
[CE/SP] Hotfix 5.5.6

GitOrigin-RevId: f5390576b4ea0c9d7e5a2c145130b3f01a8bc431
2025-10-29 09:06:22 +00:00
Maria Florencia Besteiro Gonzalez
e82413ec72 Merge pull request #29312 from overleaf/mfb-upgrade-nodemailer-through-mailtrap-in-saas-e2e
Change mailtrap version. Nodemailer was deleted from saas-e2e service

GitOrigin-RevId: 54e6b280309075e2b1bbc91d34da2c3d8b2d6534
2025-10-29 09:06:13 +00:00
Jakob Ackermann
d056ee5b74 [web] enable compileFromClsiCache independent of clsi-cache-prompt (#29366)
The split test is active now and .active is only set in the dev-env.

GitOrigin-RevId: ed4a379e2144c2a08497eb0cf42ef16077f7ad27
2025-10-29 09:06:04 +00:00
Kristina
d3def551ae [web] improve messaging when upgrading from standalone add-on to premium plan + add-on (#29330)
* update userCanStartTrial to consider standalone add-ons
* display correct disabled message on hover
* display error message on preview plan purchase page

GitOrigin-RevId: 57c4e4267c1fd0ea892df8c0f5443ad74847147c
2025-10-29 09:05:59 +00:00
Kristina
05a50710bd [web] display Stripe invoice line items on preview change page (#29280)
GitOrigin-RevId: 267586a33f19f6931e2eb76b33fdf569aa07b4d3
2025-10-29 09:05:54 +00:00
Gernot Schulz
8441937806 Merge pull request #29244 from overleaf/gs-jenkins-pr-build-tags
Tag images with branch head instead of merge commit

GitOrigin-RevId: f0324a48a8bd692f781c9bee4e42bf97788eb997
2025-10-29 09:05:48 +00:00
Rebeka Dekany
0e04cdda8e Fix translation from "Change email" to "Change role and department" (#29341)
GitOrigin-RevId: c99415d30f7ff65e54f0823d98607a1a757e6c94
2025-10-29 09:05:39 +00:00
Jakob Ackermann
28c1c7db37 [clsi-cache] add circuit breaker to clsi-cache requests (#29339)
Stage timeouts:
- frontend waits 5s
- web/clsi waits 4s
- clsi-cache waits 3s
This should ensure that the frontend can receive a valid response after
any of the backend requests failed.

The circuit breaker will remain closed for TIMEOUT + jitter of 0-3 times
the TIMEOUT of the respective service. This should avoid the bulk of
traffic to fail and occasionally issue retries without hammering the
instances while down.

Also do not try the next backend when the abort signal has expired.

GitOrigin-RevId: d612125616a9e416beff2f4c6d7f30066b5b9d6d
2025-10-29 09:05:34 +00:00
Mathias Jakobsen
1b4719d523 Merge pull request #29310 from overleaf/mj-references-url-config
[web] Remove references url from configuration

GitOrigin-RevId: fd671d0ac1ff9a8bb754ee3136dc29401ba2b186
2025-10-28 09:05:57 +00:00
Antoine Clausse
62a401c98d [web] Fix flaky frontend test in UserNotifications -> Affiliation (#29315)
* Temporary: forcibly reproduce the loading state and test failure with a `setTimeout`

* Wait for the loading to be finished in the test

* Revert "Temporary: forcibly reproduce the loading state and test failure with a `setTimeout`"

This reverts commit fb0270cc0cacd49fcff74e186b50b55f822f7729.

* Let the test pass if the loading text is not found

* Replace `getByTest` to `findByText` so it awaits

GitOrigin-RevId: 6cee6e3a86b6a5f24d95c7e3e1fcef4c4efcc094
2025-10-28 09:05:46 +00:00
Simon Gardner
d95815e0c1 Migrate UserSessionsManager and associated tests to async/await
GitOrigin-RevId: 8b5f3a296798930aa1168738cd3a4c666c7a3028
2025-10-28 09:05:42 +00:00
Brian Gough
cc7f0c1b9b Merge pull request #29321 from overleaf/bg-ignore-deleted-projects-in-history-backup
handle deleted projects in history-v1 backup worker

GitOrigin-RevId: f4392045074248137f15d082d922c18b1ef9232f
2025-10-28 09:05:34 +00:00
roo hutton
43c0ba828d Merge pull request #29289 from overleaf/rh-compile-timeout-remove-info
Support compile-timeout-remove-info split test in paywalls

GitOrigin-RevId: d557417ab0bd63fce82f4ffb5d66e8e1c9b0b039
2025-10-28 09:05:27 +00:00
Eric Mc Sween
61d823f946 Merge pull request #29301 from overleaf/em-fast-png-copy-metrics
More precise metrics on fast PNG copy

GitOrigin-RevId: 8b3a65a8a70152f1743c45f701448dc97be7ffeb
2025-10-28 09:05:23 +00:00
Davinder Singh
618f79d4cf removing the sentence and related functions and translation (#29269)
GitOrigin-RevId: 0969d0eb1ec1da0e15f606bb3bdcf86908fa45f7
2025-10-28 09:05:06 +00:00
Mathias Jakobsen
f6bc89691c Merge pull request #29308 from overleaf/ae-sentry-image
Ignore blocked image from googletagmanager error report

GitOrigin-RevId: 0e386ca2c1cd0e59e4e4a686d2eeb40e32efcc80
2025-10-28 09:05:02 +00:00
Mathias Jakobsen
8767bcd891 Merge pull request #29287 from overleaf/mj-recompile-button-key-warning
[web] Avoid react key warning for recompile button

GitOrigin-RevId: e7334ece38a1ae177244ba3afbaaec9190c967ee
2025-10-28 09:04:58 +00:00
Mathias Jakobsen
43a80ef8a5 Merge pull request #28945 from overleaf/mj-tear-down-server-side-referencing
[web] Tear down server side referencing

GitOrigin-RevId: 37feac39cc7bf219a2cbc463191163534434f267
2025-10-28 09:04:53 +00:00
40 changed files with 1239 additions and 1708 deletions

20
package-lock.json generated
View File

@@ -36362,9 +36362,9 @@
}
},
"node_modules/mailtrap": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/mailtrap/-/mailtrap-3.4.0.tgz",
"integrity": "sha512-gegg90/gMY8hvfxB+WMtE8RRZyhQr90jUw00QOLApIAomItumqFBCpZv5IfG51EUKThu9+p7X4QdNA4buryenw==",
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/mailtrap/-/mailtrap-4.3.0.tgz",
"integrity": "sha512-JqWpt11LONtxmfMGf3EB5mrKLAapHajSdnVKxWD0b4/Boak9GEosAvVpnkiSTJJuB7dgLeLDVUeOwqDssXl8pA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -38363,18 +38363,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/nodemailer": {
"version": "6.10.1",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.10.1.tgz",
"integrity": "sha512-Z+iLaBGVaSjbIzQ4pX6XV41HrooLsQ10ZWPUehGmuantvzWoDVBnmsdUcOIDM1t+yPor5pDhVlDESgOMEGxhHA==",
"dev": true,
"license": "MIT-0",
"optional": true,
"peer": true,
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/nodemon": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.0.1.tgz",
@@ -54583,7 +54571,7 @@
"cypress": "13.13.2",
"cypress-multi-reporters": "^2.0.5",
"isomorphic-git": "^1.33.1",
"mailtrap": "^3.4.0",
"mailtrap": "^4.3.0",
"mocha-junit-reporter": "^2.2.1",
"pdf-parse": "^1.1.1",
"typescript": "^5.0.4",

View File

@@ -0,0 +1,5 @@
FROM sharelatex/sharelatex:5.5.5
RUN apt update \
&& unattended-upgrade --verbose --no-minimal-upgrade-steps \
&& rm -rf /var/lib/apt/lists/*

View File

@@ -17,6 +17,11 @@ const { MeteredStream } = require('@overleaf/stream-utils')
const { CACHE_SUBDIR } = require('./OutputCacheManager')
const { isExtraneousFile } = require('./ResourceWriter')
const TIMEOUT = 5_000
/**
* @type {Map<string, number>}
*/
const lastFailures = new Map()
const TIMING_BUCKETS = [
0, 10, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000,
]
@@ -35,6 +40,25 @@ function getShard(projectId) {
return Settings.apis.clsiCache.shards[idx]
}
function checkCircuitBreaker(url) {
const lastFailure = lastFailures.get(url) ?? 0
if (lastFailure) {
// Circuit breaker that avoids retries for 5-20s.
const retryDelay = TIMEOUT * (1 + 3 * Math.random())
if (performance.now() - lastFailure < retryDelay) {
return true
}
}
return false
}
function tripCircuitBreaker(url) {
lastFailures.set(url, performance.now()) // The shard is unhealthy. Refresh timestamp of last failure.
}
function closeCircuitBreaker(url) {
lastFailures.delete(url) // The shard is back up.
}
/**
* @param {string} projectId
* @param {string} userId
@@ -61,6 +85,7 @@ function notifyCLSICacheAboutBuild({
if (!Settings.apis.clsiCache.enabled) return undefined
if (!OBJECT_ID_REGEX.test(projectId)) return undefined
const { url, shard } = getShard(projectId)
if (checkCircuitBreaker(url)) return undefined
/**
* @param {[{path: string}]} files
@@ -102,13 +127,18 @@ function notifyCLSICacheAboutBuild({
method: 'POST',
body,
headers: { 'Content-Type': 'application/json' },
signal: AbortSignal.timeout(15_000),
}).catch(err => {
logger.warn(
{ err, projectId, userId, buildId },
'enqueue for clsi cache failed'
)
signal: AbortSignal.timeout(TIMEOUT),
})
.then(() => {
closeCircuitBreaker()
})
.catch(err => {
tripCircuitBreaker()
logger.warn(
{ err, projectId, userId, buildId },
'enqueue for clsi cache failed'
)
})
}
// PDF preview
@@ -201,6 +231,8 @@ async function downloadOutputDotSynctexFromCompileCache(
) {
if (!Settings.apis.clsiCache.enabled) return false
if (!OBJECT_ID_REGEX.test(projectId)) return false
const { url } = getShard(projectId)
if (checkCircuitBreaker(url)) return false
const timer = new Metrics.Timer(
'clsi_cache_download',
@@ -211,19 +243,21 @@ async function downloadOutputDotSynctexFromCompileCache(
let stream
try {
stream = await fetchStream(
`${getShard(projectId).url}/project/${projectId}/${
`${url}/project/${projectId}/${
userId ? `user/${userId}/` : ''
}build/${editorId}-${buildId}/search/output/output.synctex.gz`,
{
method: 'GET',
signal: AbortSignal.timeout(10_000),
signal: AbortSignal.timeout(TIMEOUT),
}
)
} catch (err) {
if (err instanceof RequestFailedError && err.response.status === 404) {
closeCircuitBreaker()
timer.done({ status: 'not-found' })
return false
}
tripCircuitBreaker()
timer.done({ status: 'error' })
throw err
}
@@ -240,11 +274,13 @@ async function downloadOutputDotSynctexFromCompileCache(
)
await fs.promises.rename(tmp, dst)
} catch (err) {
tripCircuitBreaker()
try {
await fs.promises.unlink(tmp)
} catch {}
throw err
}
closeCircuitBreaker()
timer.done({ status: 'success' })
return true
}
@@ -258,10 +294,9 @@ async function downloadOutputDotSynctexFromCompileCache(
async function downloadLatestCompileCache(projectId, userId, compileDir) {
if (!Settings.apis.clsiCache.enabled) return false
if (!OBJECT_ID_REGEX.test(projectId)) return false
const { url } = getShard(projectId)
if (checkCircuitBreaker(url)) return false
const url = `${getShard(projectId).url}/project/${projectId}/${
userId ? `user/${userId}/` : ''
}latest/output/output.tar.gz`
const timer = new Metrics.Timer(
'clsi_cache_download',
1,
@@ -270,54 +305,71 @@ async function downloadLatestCompileCache(projectId, userId, compileDir) {
)
let stream
try {
stream = await fetchStream(url, {
method: 'GET',
signal: AbortSignal.timeout(10_000),
})
stream = await fetchStream(
`${url}/project/${projectId}/${
userId ? `user/${userId}/` : ''
}latest/output/output.tar.gz`,
{
method: 'GET',
signal: AbortSignal.timeout(TIMEOUT),
}
)
} catch (err) {
if (err instanceof RequestFailedError && err.response.status === 404) {
closeCircuitBreaker()
timer.done({ status: 'not-found' })
return false
}
tripCircuitBreaker()
timer.done({ status: 'error' })
throw err
}
let n = 0
let abort = false
await pipeline(
stream,
new MeteredStream(Metrics, 'clsi_cache_egress', { path: 'output.tar.gz' }),
createGunzip(),
tarFs.extract(compileDir, {
// use ignore hook for counting entries (files+folders) and validation.
// Include folders as they incur mkdir calls.
ignore(_, header) {
if (abort) return true // log once
n++
if (n > MAX_ENTRIES_IN_OUTPUT_TAR) {
abort = true
logger.warn(
{
url,
compileDir,
},
'too many entries in tar-ball from clsi-cache'
)
} else if (header.type !== 'file' && header.type !== 'directory') {
abort = true
logger.warn(
{
url,
compileDir,
entryType: header.type,
},
'unexpected entry in tar-ball from clsi-cache'
)
}
return abort
},
})
)
try {
await pipeline(
stream,
new MeteredStream(Metrics, 'clsi_cache_egress', {
path: 'output.tar.gz',
}),
createGunzip(),
tarFs.extract(compileDir, {
// use ignore hook for counting entries (files+folders) and validation.
// Include folders as they incur mkdir calls.
ignore(_, header) {
if (abort) return true // log once
n++
if (n > MAX_ENTRIES_IN_OUTPUT_TAR) {
abort = true
logger.warn(
{
projectId,
userId,
compileDir,
},
'too many entries in tar-ball from clsi-cache'
)
} else if (header.type !== 'file' && header.type !== 'directory') {
abort = true
logger.warn(
{
projectId,
userId,
compileDir,
entryType: header.type,
},
'unexpected entry in tar-ball from clsi-cache'
)
}
return abort
},
})
)
} catch (err) {
tripCircuitBreaker()
throw err
}
closeCircuitBreaker()
Metrics.count('clsi_cache_download_entries', n)
timer.done({ status: 'success' })
return !abort

View File

@@ -104,11 +104,20 @@ const LATEX_MK_METRICS_STDERR = [
[
'latexmk-img-times',
s => {
const pngCategoriesByFile = new Map()
const pngCopyMatches = s.matchAll(/^PNG copy: (.*)$/gm)
const pngCopyFiles = new Set()
for (const match of pngCopyMatches) {
const filename = match[1]
pngCopyFiles.add(filename)
pngCategoriesByFile.set(filename, 'fast-copy')
}
const pngCopySkipMatches = s.matchAll(
/^PNG copy skipped \((alpha|gamma|palette|interlaced)\): (.*)$/gm
)
for (const match of pngCopySkipMatches) {
const category = match[1]
const filename = match[2]
pngCategoriesByFile.set(filename, category)
}
const timingMatches = s.matchAll(
@@ -119,9 +128,14 @@ const LATEX_MK_METRICS_STDERR = [
let type = match[1]
const timeMs = parseInt(match[2], 10)
const filename = match[3]
if (type === 'PNG' && pngCopyFiles.has(filename)) {
type = 'PNG-fast-copy'
if (type === 'PNG') {
const pngCategory = pngCategoriesByFile.get(filename)
if (pngCategory != null) {
type = `PNG-${pngCategory}`
}
}
const accumulatedTime = timingsByType.get(type) ?? 0
timingsByType.set(type, accumulatedTime + timeMs)
}

View File

@@ -1,5 +1,5 @@
const { Binary, ObjectId } = require('mongodb')
const { projects, backedUpBlobs } = require('../mongodb')
const { projects, deletedProjects, backedUpBlobs } = require('../mongodb')
const OError = require('@overleaf/o-error')
// List projects with pending backups older than the specified interval
@@ -79,6 +79,13 @@ async function getBackupStatus(projectId) {
}
)
if (!project) {
// Check whether the project was deleted
const deletedProject = await deletedProjects.findOne({
'deleterData.deletedProjectId': new ObjectId(projectId),
})
if (deletedProject) {
throw new Error('Project deleted')
}
throw new Error('Project not found')
}
return {

View File

@@ -14,6 +14,7 @@ const blobs = db.collection('projectHistoryBlobs')
const globalBlobs = db.collection('projectHistoryGlobalBlobs')
const shardedBlobs = db.collection('projectHistoryShardedBlobs')
const projects = db.collection('projects')
const deletedProjects = db.collection('deletedProjects')
// Temporary collection for tracking progress of backed up old blobs (without a hash).
// The initial sync process will be able to skip over these.
// Schema: _id: projectId, blobs: [Binary]
@@ -32,6 +33,7 @@ module.exports = {
blobs,
globalBlobs,
projects,
deletedProjects,
shardedBlobs,
backedUpBlobs,
cleanupTestDatabase,

View File

@@ -114,9 +114,14 @@ async function runBackup(projectId, data, job) {
}
return `backup completed ${projectId}`
} catch (err) {
metrics.inc('backup_worker_project', 1, { status: 'failed' })
logger.error({ projectId, err }, 'backup failed')
throw err // Re-throw to mark job as failed
if (err.message === 'Project deleted') {
metrics.inc('backup_worker_project', 1, { status: 'deleted' })
logger.warn({ projectId, err }, 'skipping backup of deleted project')
} else {
metrics.inc('backup_worker_project', 1, { status: 'failed' })
logger.error({ projectId, err }, 'backup failed')
throw err // Re-throw to mark job as failed
}
}
}

View File

@@ -0,0 +1,389 @@
---
mode: 'agent'
description: 'Generate a clear code explanation with examples'
---
# Improved Async/Await Migration Instructions
Based on lessons learned from PR #28840 and reviewer feedback, these comprehensive instructions address the original migration requirements while preventing common issues.
## Core Migration Principles
### 1. Function Signature Transformation
- Convert callback-style functions to async/await
- Remove callback parameters from function signatures
- Add `async` keyword to function declarations
- Replace `return callback(err, result)` with `throw err` or `return result`
- Replace `return callback()` with `return` (or `return undefined`)
### 2. Error Handling Patterns
#### OError.tag Usage - CRITICAL UPDATE
**DO NOT** wrap simple operations in try/catch just to tag errors with OError. With async/await, the stack trace is preserved automatically, making OError.tag less necessary for basic error propagation.
```javascript
// OLD (callback style) - OError.tag was needed
callback(err) => {
OError.tag(err, 'description', { context })
return callback(err)
}
// BAD (unnecessary with async/await)
try {
await operation()
} catch (err) {
throw OError.tag(err, 'description', { context })
}
// GOOD (let errors propagate naturally)
await operation()
// ONLY use OError.tag when adding meaningful context or transforming errors
try {
await complexOperation()
} catch (err) {
if (err.code === 'SPECIFIC_ERROR') {
throw OError.tag(err, 'meaningful context about why this failed', {
important_context: value
})
}
throw err // let other errors propagate unchanged
}
```
### 3. Concurrency Considerations - CRITICAL
#### Sequential vs Parallel Operations
**Be extremely cautious when converting from serial to parallel operations.** The original code's choice of sequential processing is often intentional.
```javascript
// OLD - Sequential processing (often intentional)
Async.mapSeries(items, processItem, callback)
// BAD - Unbounded parallel processing
await Promise.all(items.map(processItem))
// BETTER - Keep sequential if unsure about resource limits
for (const item of items) {
await processItem(item)
}
// GOOD - Controlled batch processing for performance
const BATCH_SIZE = 10
for (let i = 0; i < items.length; i += BATCH_SIZE) {
const batch = items.slice(i, i + BATCH_SIZE)
await Promise.all(batch.map(processItem))
}
// IDEAL - Use Redis MGET for multiple key retrieval
// Instead of: Promise.all(keys.map(k => redis.get(k)))
const values = await redis.mget(keys)
```
#### Database/Redis Operation Guidelines
- **Never** send unbounded parallel requests to databases
- **Prefer** sequential processing for database operations unless there's a specific performance need
- **Consider** batch operations (like Redis MGET/MSET) for multiple operations
- **Implement** maximum concurrency limits when parallel processing is necessary
### 4. Background Operations
#### Fire-and-Forget Pattern
When operations were called in the background (with empty callbacks), preserve this behavior:
```javascript
// OLD - Background operation with ignored callback
someOperation(user, function () {}) // errors swallowed
// GOOD - Preserve background behavior
someOperation(user).catch(err => {
logger.error({ err }, 'Failed to run background operation')
})
// Or if truly fire-and-forget:
someOperation(user).catch(() => {}) // explicitly ignore errors
```
### 5. Module Export Patterns
#### Using callbackifyAll for Dual API
```javascript
const { callbackifyAll } = require('@overleaf/promise-utils')
const MyModule = {
async myMethod(param) {
// async implementation
},
}
const moduleExports = {
...callbackifyAll(MyModule), // callback API
promises: MyModule, // promise API
}
module.exports = moduleExports
```
#### Internal Method Stubbing (for testing)
**Only** add method binding patterns when tests need to stub internal method calls:
```javascript
// ONLY if tests need to stub internal calls to _internalMethod
MyModule._internalMethod = (...args) => moduleExports._internalMethod(...args)
```
**Do NOT** expose internal methods at the top level - they should be accessible via `moduleExports.promises._internalMethod`.
### 6. Test Migration Patterns
#### Async Test Conversion
```javascript
// OLD
it('should do something', function (done) {
MyModule.method(param, function (err, result) {
expect(err).to.not.exist
expect(result).to.equal(expected)
done()
})
})
// NEW
it('should do something', async function () {
const result = await MyModule.promises.method(param)
expect(result).to.equal(expected)
})
```
#### Mock/Stub Patterns
```javascript
// For Redis or database mocks, ensure method chaining works
beforeEach(function () {
redis.multi = sinon.stub().returns({
sadd: sinon.stub().returnsThis(),
pexpire: sinon.stub().returnsThis(),
exec: sinon.stub().resolves(),
})
})
```
### 7. Specific Redis Patterns
#### Multi-Transaction Operations
```javascript
// Correct pattern for Redis multi operations
const multi = redis.multi()
multi.sadd(key, value)
multi.pexpire(key, ttl)
await multi.exec()
```
#### Single vs Multiple Key Operations
```javascript
// BAD - Multiple individual operations
const values = await Promise.all(keys.map(k => redis.get(k)))
// GOOD - Use batch operations when available
const values = await redis.mget(keys)
```
## Migration Checklist
### Before Starting
- [ ] Understand the original code's concurrency patterns
- [ ] Identify any background operations that should remain non-blocking
- [ ] Check if Redis batch operations can replace individual operations
- [ ] Look for internal method calls that might need test stubbing
### During Migration
- [ ] Convert function signatures (remove callbacks, add async)
- [ ] Replace callback patterns with await
- [ ] Handle early returns properly
- [ ] Preserve sequential processing unless there's a clear performance benefit
- [ ] Keep background operations non-blocking
- [ ] Avoid unnecessary OError.tag wrapping
- [ ] Update JSDoc comments to remove callback parameters
### After Migration
- [ ] Run comprehensive tests (fix Docker/environment issues if needed)
- [ ] Verify all background operations still work correctly
- [ ] Check that internal method calls can be stubbed if needed
- [ ] Ensure database operations don't overwhelm resources
- [ ] Validate error handling preserves meaningful context
- [ ] **Remove all decaffeinate artifacts from both implementation AND test files**
- [ ] Add explanatory comments for any non-obvious technical patterns
- [ ] Avoid selfRef patterns - use module exports routing instead
### Test Migration
- [ ] **Run tests EARLY and OFTEN during migration process**
- [ ] Convert test functions to async
- [ ] Update assertion patterns
- [ ] Fix mock/stub configurations for chained operations (Redis multi, etc.)
- [ ] Verify all test scenarios still pass
- [ ] Remove duplicate or unnecessary mock setups
- [ ] Clean up decaffeinate comments from test files
- [ ] Ensure internal method stubs work through promises interface
## Critical Lessons from Real Migration Experience
### 1. Testing Environment Issues
**ALWAYS run tests early and often during migration.** Don't wait until the end.
Common test running problems:
- Docker containers may need cleanup: `docker system prune -f`
- Use specific test grep patterns: `MOCHA_GREP="ModuleName" make test_unit_app`
- Mock objects must return proper objects for chaining (e.g., `multi()` must return `{method: stub().returnsThis(), ...}`)
### 2. Method Stubbing for Internal Calls
When methods call other methods internally, tests may need to stub those calls:
```javascript
// If methodA() calls methodB() internally and tests need to verify this:
// DON'T do this - creates unnecessary complexity:
const selfRef = { ... }; // BAD pattern
// DO this - route through the module exports interface:
moduleExports.promises.methodB(params).catch(err => {
logger.error({ err }, 'Failed to run background operation')
})
// Add a brief comment explaining the routing pattern:
// Route through moduleExports so tests can stub this call
```
### 3. Avoid the selfRef Pattern
The `selfRef` pattern should be avoided - it's a code smell that indicates better module structure is needed:
```javascript
// BAD - selfRef pattern
const selfRef = {}
selfRef.methodA = async function() {
await selfRef.methodB() // circular reference
}
// GOOD - route through module exports when stubbing is needed
async methodA() {
await moduleExports.promises.methodB() // testable
}
```
### 4. Complete Decaffeinate Cleanup
**Remove ALL legacy CoffeeScript artifacts** - this is a required part of the migration.
Look for and remove patterns like these (exact format may vary):
```javascript
/* eslint-disable */
// TODO: This file was created by bulk-decaffeinate.
/* eslint-disable
camelcase,
n/handle-callback-err,
max-len,
no-return-assign,
no-unused-vars,
*/
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
```
**Search patterns to look for:**
- Comments containing "bulk-decaffeinate" or "decaffeinate"
- Large `/* eslint-disable */` blocks at the top of files
- Comments about "Fix any style issues and re-enable lint"
- "decaffeinate suggestions" comment blocks
Check **both implementation AND test files** for these artifacts.
### 5. Add Explanatory Comments for Non-Obvious Code
When you need to write "ugly" code for unavoidable technical reasons, add a brief comment explaining why:
```javascript
// Background operation - preserve fire-and-forget behavior
// Route through moduleExports so tests can stub this call
moduleExports.promises._checkSessions(user).catch(err => {
logger.error({ err }, 'Failed to check sessions in background')
})
```
This prevents future developers from "refactoring" the code and breaking functionality.
### 6. Simplify OError.tag() Usage
**With async/await, many OError.tag() wrappers can be removed.** OError.tag() was primarily used to preserve stack traces across callback boundaries, but async/await handles this automatically.
```javascript
// BEFORE - callback era (needed OError.tag for stack traces)
try {
await redis.multi().sadd(key, value).exec()
} catch (err) {
throw OError.tag(err, 'error adding to redis set', { key })
}
// AFTER - async/await preserves stack traces naturally
await redis.multi().sadd(key, value).exec()
```
**Keep OError.tag() only when:**
- Adding meaningful context that aids debugging
- Transforming low-level errors into domain-specific errors
- The wrapper adds significant value beyond just a descriptive message
**Remove OError.tag() when:**
- The error message doesn't add meaningful context
- The tag message just restates what the code obviously does
- Stack trace preservation was the only benefit
## Common Pitfalls to Avoid
1. **Over-parallelization**: Don't convert all sequential operations to parallel
2. **Unnecessary error wrapping**: Don't wrap every operation in try/catch just for OError.tag
3. **Breaking background operations**: Maintain fire-and-forget behavior where intended
4. **Exposing internal methods incorrectly**: Use the promises interface, not top-level exports
5. **Resource exhaustion**: Be mindful of database connection limits and Redis performance
6. **Test mock complexity**: Keep mocks simple and targeted to what's actually needed
7. **Using selfRef patterns**: Always route through module exports instead
8. **Forgetting decaffeinate cleanup**: Remove all legacy comments and eslint disables
9. **Not running tests early**: Run tests frequently during migration, not just at the end
10. **Missing explanatory comments**: Add brief comments for non-obvious technical patterns
## Success Metrics
- All existing tests pass without modification (except for async conversion)
- No new resource exhaustion issues under load
- Background operations continue to work as intended
- Error messages and logging remain informative
- Internal method stubbing works correctly for testing
- Code is cleaner and more maintainable than before
These instructions should be applied systematically, with careful consideration of the specific context and requirements of each module being migrated.

View File

@@ -21,13 +21,28 @@ pipeline {
DOCKER_COMPOSE_FLAGS = '-f docker-compose.ci.yml'
BRANCH_NAME = "${env.CHANGE_BRANCH ? env.CHANGE_BRANCH : env.BRANCH_NAME}"
JENKINS_BUILD_NUMBER = "${BUILD_NUMBER}" // preserve original BUILD_NUMBER
BUILD_NUMBER = "${SHORT_SHA}_${BUILD_NUMBER}" // mimic the format used for Cloud Build
COMMIT_SHA = "${GIT_COMMIT}"
SHORT_SHA = "${GIT_COMMIT.take(7)}"
CDN_STAG = "gs://ol-stag-web-assets-1"
CDN_PROD = "gs://mgcp-1117973-ol-prod-web-assets-1"
}
stages {
stage('Set Build Variables') {
steps {
script {
def relevantCommitHash
if (env.CHANGE_BRANCH) {
echo "PR build detected. Jenkins checked out a merge commit: ${GIT_COMMIT}"
relevantCommitHash = sh(script: 'git rev-parse HEAD^1', returnStdout: true).trim()
echo "Found actual branch commit: ${relevantCommitHash}"
} else {
echo "Branch build detected. Using commit: ${GIT_COMMIT}"
relevantCommitHash = "${GIT_COMMIT}"
}
env.COMMIT_SHA = relevantCommitHash
env.SHORT_SHA = relevantCommitHash.take(7)
env.BUILD_NUMBER = "${env.SHORT_SHA}_${env.JENKINS_BUILD_NUMBER}"
}
}
}
stage('Stage 1') {
parallel {
stage('Prefetch Tests Images') {

View File

@@ -9,6 +9,13 @@ const Settings = require('@overleaf/settings')
const OError = require('@overleaf/o-error')
const { NotFoundError, InvalidNameError } = require('../Errors/Errors')
const TIMEOUT = 4_000
/**
* @type {Map<string, number>}
*/
const lastFailures = new Map()
/**
* Keep in sync with validateFilename in services/clsi-cache/app/js/utils.js
*
@@ -70,7 +77,7 @@ async function clearCache(projectId, userId) {
try {
await fetchNothing(u, {
method: 'DELETE',
signal: AbortSignal.timeout(15_000),
signal: AbortSignal.timeout(TIMEOUT),
})
} catch (err) {
throw OError.tag(err, 'clear clsi-cache', { url, shard })
@@ -94,7 +101,7 @@ async function getOutputFile(
userId,
buildId,
filename,
signal = AbortSignal.timeout(15_000)
signal = AbortSignal.timeout(TIMEOUT)
) {
validateFilename(filename)
if (!/^[a-f0-9-]+$/.test(buildId)) {
@@ -122,7 +129,7 @@ async function getLatestOutputFile(
projectId,
userId,
filename,
signal = AbortSignal.timeout(15_000)
signal = AbortSignal.timeout(TIMEOUT)
) {
validateFilename(filename)
@@ -154,11 +161,23 @@ async function getRedirectWithFallback(
projectId,
userId,
path,
signal = AbortSignal.timeout(15_000)
signal = AbortSignal.timeout(TIMEOUT)
) {
// Avoid hitting the same instance first all the time.
const instances = _.shuffle(Settings.apis.clsiCache.instances)
for (const { url, shard } of instances) {
if (signal.aborted) {
break // Stop trying the next backend when the signal has expired.
}
const lastFailure = lastFailures.get(url) ?? 0
if (lastFailure) {
// Circuit breaker that avoids retries for 4-16s.
const retryDelay = TIMEOUT * (1 + 3 * Math.random())
if (performance.now() - lastFailure < retryDelay) {
continue
}
}
const u = new URL(url)
u.pathname = path
try {
@@ -168,6 +187,7 @@ async function getRedirectWithFallback(
} = await fetchRedirectWithResponse(u, {
signal,
})
lastFailures.delete(url) // The shard is back up.
let allFilesRaw = headers.get('X-All-Files')
if (!allFilesRaw.startsWith('[')) {
allFilesRaw = Buffer.from(allFilesRaw, 'base64url').toString()
@@ -183,8 +203,10 @@ async function getRedirectWithFallback(
}
} catch (err) {
if (err instanceof RequestFailedError && err.response.status === 404) {
lastFailures.delete(url) // The shard is back up.
break // No clsi-cache instance has cached something for this project/user.
}
lastFailures.set(url, performance.now()) // The shard is unhealthy. Refresh timestamp of last failure.
logger.warn(
{ err, projectId, userId, url, shard },
'getLatestOutputFile from clsi-cache failed'
@@ -239,6 +261,7 @@ async function prepareCacheSource(
}
module.exports = {
TIMEOUT,
getEgressLabel,
clearCache,
getOutputFile,

View File

@@ -195,7 +195,7 @@ async function prepareClsiCache(
const features = await UserGetter.promises.getUserFeatures(userId)
if (features.compileGroup !== 'priority') return
const signal = AbortSignal.timeout(5_000)
const signal = AbortSignal.timeout(ClsiCacheHandler.TIMEOUT)
let lastUpdated
let shard = _.shuffle(Settings.apis.clsiCache.instances)[0].shard
if (sourceProjectId) {

View File

@@ -92,10 +92,7 @@ async function _getSplitTestOptions(req, res) {
'clsi-cache-prompt'
))
populateClsiCache = variant === 'enabled'
if (res.locals.splitTestInfo?.['clsi-cache-prompt']?.active) {
// Start using the cache when the split-test for the prompts is activated.
compileFromClsiCache = populateClsiCache
}
compileFromClsiCache = populateClsiCache
}
const pdfDownloadDomain = Settings.pdfDownloadDomain

View File

@@ -19,12 +19,10 @@ import LinkedFilesErrors from './LinkedFilesErrors.mjs'
import {
OutputFileFetchFailedError,
FileTooLargeError,
OError,
} from '../Errors/Errors.js'
import Modules from '../../infrastructure/Modules.js'
import { plainTextResponse } from '../../infrastructure/Response.js'
import { z, zz, validateReq } from '../../infrastructure/Validation.js'
import ReferencesHandler from '../References/ReferencesHandler.mjs'
import EditorRealTimeController from '../Editor/EditorRealTimeController.js'
import { expressify } from '@overleaf/promise-utils'
import ProjectOutputFileAgent from './ProjectOutputFileAgent.mjs'
@@ -142,26 +140,16 @@ async function refreshLinkedFile(req, res, next) {
}
if (req.body.shouldReindexReferences) {
let data
try {
data = await ReferencesHandler.promises.indexAll(projectId)
} catch (error) {
OError.tag(error, 'failed to index references', {
projectId,
})
return next(error)
}
// Signal to clients that they should re-index references
EditorRealTimeController.emitToRoom(
projectId,
'references:keys:updated',
data.keys,
[],
true,
clientId
)
res.json({ new_file_id: newFileId })
} else {
res.json({ new_file_id: newFileId })
}
res.json({ new_file_id: newFileId })
}
export default LinkedFilesController = {

View File

@@ -405,6 +405,7 @@ const _ProjectController = {
'editor-redesign-new-users',
'writefull-frontend-migration',
'chat-edit-delete',
'compile-timeout-remove-info',
].filter(Boolean)
const getUserValues = async userId =>

View File

@@ -1,63 +1,20 @@
/* eslint-disable
max-len,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import ReferencesHandler from './ReferencesHandler.mjs'
import EditorRealTimeController from '../Editor/EditorRealTimeController.js'
import { OError } from '../Errors/Errors.js'
let ReferencesController
export default ReferencesController = {
export default {
indexAll(req, res, next) {
const projectId = req.params.Project_id
const { shouldBroadcast, clientId } = req.body
return ReferencesHandler.indexAll(projectId, function (error, data) {
if (error) {
OError.tag(error, 'failed to index references', { projectId })
return next(error)
}
return ReferencesController._handleIndexResponse(
req,
res,
projectId,
shouldBroadcast,
true,
data,
clientId
)
})
},
_handleIndexResponse(
req,
res,
projectId,
shouldBroadcast,
isAllDocs,
data,
clientId
) {
if (data == null || data.keys == null) {
return res.json({ projectId, keys: [] })
}
// We've migrated to client side indexing, so we only use the message for
// broadcasting that the clients need to re-index.
if (shouldBroadcast) {
EditorRealTimeController.emitToRoom(
projectId,
'references:keys:updated',
data.keys,
isAllDocs,
[],
true,
clientId
)
}
return res.json(data)
res.json({ projectId, keys: [] })
},
}

View File

@@ -1,214 +0,0 @@
/* eslint-disable
n/handle-callback-err,
max-len,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import OError from '@overleaf/o-error'
import logger from '@overleaf/logger'
import request from 'request'
import settings from '@overleaf/settings'
import Features from '../../infrastructure/Features.js'
import ProjectGetter from '../Project/ProjectGetter.mjs'
import UserGetter from '../User/UserGetter.js'
import DocumentUpdaterHandler from '../DocumentUpdater/DocumentUpdaterHandler.mjs'
import _ from 'lodash'
import Async from 'async'
import Errors from '../Errors/Errors.js'
import { promisify } from '@overleaf/promise-utils'
let ReferencesHandler
if (!Features.hasFeature('references')) {
logger.debug('references search not enabled')
}
export default ReferencesHandler = {
_buildDocUrl(projectId, docId) {
return {
url: `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}/raw`,
}
},
_findBibFileRefs(project) {
const fileRefs = []
function _process(folder) {
_.forEach(folder.fileRefs || [], function (file) {
if (
__guard__(file != null ? file.name : undefined, x1 =>
x1.match(/^.*\.bib$/)
)
) {
return fileRefs.push(file)
}
})
return _.forEach(folder.folders || [], folder => _process(folder))
}
_.forEach(project.rootFolder || [], rootFolder => _process(rootFolder))
return fileRefs
},
_findBibDocIds(project) {
const ids = []
function _process(folder) {
_.forEach(folder.docs || [], function (doc) {
if (
__guard__(doc != null ? doc.name : undefined, x1 =>
x1.match(/^.*\.bib$/)
)
) {
return ids.push(doc._id)
}
})
return _.forEach(folder.folders || [], folder => _process(folder))
}
_.forEach(project.rootFolder || [], rootFolder => _process(rootFolder))
return ids
},
_isFullIndex(project, callback) {
if (callback == null) {
callback = function () {}
}
return UserGetter.getUser(
project.owner_ref,
{ features: true },
function (err, owner) {
if (err != null) {
return callback(err)
}
const features = owner != null ? owner.features : undefined
return callback(
null,
(features != null ? features.references : undefined) === true ||
(features != null ? features.referencesSearch : undefined) === true
)
}
)
},
indexAll(projectId, callback) {
if (callback == null) {
callback = function () {}
}
return ProjectGetter.getProject(
projectId,
{ rootFolder: true, owner_ref: 1, 'overleaf.history.id': 1 },
function (err, project) {
if (err) {
OError.tag(err, 'error finding project', {
projectId,
})
return callback(err)
}
if (!project) {
return callback(
new Errors.NotFoundError(`project does not exist: ${projectId}`)
)
}
logger.debug({ projectId }, 'indexing all bib files in project')
const docIds = ReferencesHandler._findBibDocIds(project)
const fileRefs = ReferencesHandler._findBibFileRefs(project)
return ReferencesHandler._doIndexOperation(
projectId,
project,
docIds,
fileRefs,
callback
)
}
)
},
_doIndexOperation(projectId, project, docIds, fileRefs, callback) {
if (!Features.hasFeature('references')) {
return callback()
}
const historyId = project?.overleaf?.history?.id
if (!historyId) {
return callback(
new OError('project does not have a history id', { projectId })
)
}
return ReferencesHandler._isFullIndex(project, function (err, isFullIndex) {
if (err) {
OError.tag(err, 'error checking whether to do full index', {
projectId,
})
return callback(err)
}
logger.debug(
{ projectId, docIds },
'flushing docs to mongo before calling references service'
)
return Async.series(
docIds.map(
docId => cb =>
DocumentUpdaterHandler.flushDocToMongo(projectId, docId, cb)
),
function (err) {
// continue
if (err) {
OError.tag(err, 'error flushing docs to mongo', {
projectId,
docIds,
})
return callback(err)
}
const bibDocUrls = docIds.map(docId =>
ReferencesHandler._buildDocUrl(projectId, docId)
)
const bibFileUrls = fileRefs.map(fileRef => ({
url: `${settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`,
}))
const sourceURLs = bibDocUrls.concat(bibFileUrls)
return request.post(
{
url: `${settings.apis.references.url}/project/${projectId}/index`,
json: {
docUrls: sourceURLs.map(item => item.url),
sourceURLs,
fullIndex: isFullIndex,
},
},
function (err, res, data) {
if (err) {
OError.tag(err, 'error communicating with references api', {
projectId,
})
return callback(err)
}
if (res.statusCode >= 200 && res.statusCode < 300) {
logger.debug({ projectId }, 'got keys from references api')
return callback(null, data)
} else {
err = new Error(
`references api responded with non-success code: ${res.statusCode}`
)
return callback(err)
}
}
)
}
)
})
},
}
ReferencesHandler.promises = {
indexAll: promisify(ReferencesHandler.indexAll),
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View File

@@ -13,7 +13,7 @@
* @property {number} subtotal
* @property {number} discount
* @property {number} tax
* @property {boolean} isAiAssist
* @property {boolean} [isAiAssist]
*/
const OError = require('@overleaf/o-error')

View File

@@ -747,7 +747,19 @@ async function previewSubscription(req, res, next) {
}
// TODO: use PaymentService to fetch plan information
const plan = await RecurlyClient.promises.getPlan(planCode)
const userId = SessionManager.getLoggedInUserId(req.session)
const user = SessionManager.getSessionUser(req.session)
const userId = user?._id
let trialDisabledReason
if (planCode.includes('_free_trial')) {
const trialEligibility = (
await Modules.promises.hooks.fire('userCanStartTrial', user)
)?.[0]
if (!trialEligibility.canStartTrial) {
trialDisabledReason = trialEligibility.disabledReason
}
}
const subscriptionChange =
await SubscriptionHandler.promises.previewSubscriptionChange(
userId,
@@ -770,6 +782,7 @@ async function previewSubscription(req, res, next) {
res.render('subscriptions/preview-change', {
changePreview,
redirectedPaymentErrorCode: req.query.errorCode,
trialDisabledReason,
})
}

View File

@@ -1,9 +1,7 @@
const OError = require('@overleaf/o-error')
const Settings = require('@overleaf/settings')
const logger = require('@overleaf/logger')
const Async = require('async')
const _ = require('lodash')
const { promisify } = require('util')
const { callbackifyAll } = require('@overleaf/promise-utils')
const UserSessionsRedis = require('./UserSessionsRedis')
const rclient = UserSessionsRedis.client()
@@ -13,246 +11,157 @@ const UserSessionsManager = {
return `sess:${sessionId}`
},
trackSession(user, sessionId, callback) {
async trackSession(user, sessionId) {
if (!user) {
return callback(null)
return
}
if (!sessionId) {
return callback(null)
return
}
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
const value = UserSessionsManager._sessionKey(sessionId)
rclient
.multi()
.sadd(sessionSetKey, value)
.pexpire(sessionSetKey, `${Settings.cookieSessionLength}`) // in milliseconds
.exec(function (err, response) {
if (err) {
OError.tag(
err,
'error while adding session key to UserSessions set',
{
user_id: user._id,
sessionSetKey,
}
)
return callback(err)
}
UserSessionsManager._checkSessions(user, function () {})
callback()
})
const multi = rclient.multi()
multi.sadd(sessionSetKey, value)
multi.pexpire(sessionSetKey, `${Settings.cookieSessionLength}`) // in milliseconds
await multi.exec()
UserSessionsManager._checkSessions(user).catch(err => {
logger.error({ err }, 'Failed to check sessions in background')
})
},
untrackSession(user, sessionId, callback) {
if (!callback) {
callback = function () {}
}
async untrackSession(user, sessionId) {
if (!user) {
return callback(null)
return
}
if (!sessionId) {
return callback(null)
return
}
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
const value = UserSessionsManager._sessionKey(sessionId)
rclient
.multi()
.srem(sessionSetKey, value)
.pexpire(sessionSetKey, `${Settings.cookieSessionLength}`) // in milliseconds
.exec(function (err, response) {
if (err) {
OError.tag(
err,
'error while removing session key from UserSessions set',
{
user_id: user._id,
sessionSetKey,
}
)
return callback(err)
}
UserSessionsManager._checkSessions(user, function () {})
callback()
})
const multi = rclient.multi()
multi.srem(sessionSetKey, value)
multi.pexpire(sessionSetKey, `${Settings.cookieSessionLength}`) // in milliseconds
await multi.exec()
UserSessionsManager._checkSessions(user).catch(err => {
logger.error({ err }, 'Failed to check sessions in background')
})
},
getAllUserSessions(user, exclude, callback) {
async getAllUserSessions(user, exclude) {
exclude = _.map(exclude, UserSessionsManager._sessionKey)
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
rclient.smembers(sessionSetKey, function (err, sessionKeys) {
if (err) {
OError.tag(err, 'error getting all session keys for user from redis', {
user_id: user._id,
})
return callback(err)
const sessionKeys = await rclient.smembers(sessionSetKey)
const filteredSessionKeys = _.filter(
sessionKeys,
k => !_.includes(exclude, k)
)
if (filteredSessionKeys.length === 0) {
logger.debug({ userId: user._id }, 'no other sessions found, returning')
return []
}
// Use sequential processing to avoid overwhelming Redis
const sessions = []
for (const key of filteredSessionKeys) {
const session = await rclient.get(key)
sessions.push(session)
}
const result = []
for (let session of sessions) {
if (!session) {
continue
}
sessionKeys = _.filter(sessionKeys, k => !_.includes(exclude, k))
if (sessionKeys.length === 0) {
logger.debug({ userId: user._id }, 'no other sessions found, returning')
return callback(null, [])
session = JSON.parse(session)
let sessionUser = session.passport && session.passport.user
if (!sessionUser) {
sessionUser = session.user
}
Async.mapSeries(
sessionKeys,
(k, cb) => rclient.get(k, cb),
function (err, sessions) {
if (err) {
OError.tag(err, 'error getting all sessions for user from redis', {
user_id: user._id,
})
return callback(err)
}
result.push({
ip_address: sessionUser.ip_address,
session_created: sessionUser.session_created,
})
}
const result = []
for (let session of Array.from(sessions)) {
if (!session) {
continue
}
session = JSON.parse(session)
let sessionUser = session.passport && session.passport.user
if (!sessionUser) {
sessionUser = session.user
}
result.push({
ip_address: sessionUser.ip_address,
session_created: sessionUser.session_created,
})
}
callback(null, result)
}
)
})
return result
},
/**
* @param {{_id: string}} user
* @param {string | null | undefined} retainSessionID - the session ID to exclude from deletion
* @param {(err: Error | null, data?: unknown) => void} callback
*/
removeSessionsFromRedis(user, retainSessionID, callback) {
async removeSessionsFromRedis(user, retainSessionID) {
if (!user) {
return callback(
new Error('bug: user not passed to removeSessionsFromRedis')
)
throw new Error('bug: user not passed to removeSessionsFromRedis')
}
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
rclient.smembers(sessionSetKey, function (err, sessionKeys) {
if (err) {
OError.tag(err, 'error getting contents of UserSessions set', {
user_id: user._id,
sessionSetKey,
})
return callback(err)
}
const keysToDelete = retainSessionID
? _.without(
sessionKeys,
UserSessionsManager._sessionKey(retainSessionID)
)
: sessionKeys
if (keysToDelete.length === 0) {
logger.debug(
{ userId: user._id },
'no sessions in UserSessions set to delete, returning'
)
return callback(null, 0)
}
const sessionKeys = await rclient.smembers(sessionSetKey)
const keysToDelete = retainSessionID
? _.without(sessionKeys, UserSessionsManager._sessionKey(retainSessionID))
: sessionKeys
if (keysToDelete.length === 0) {
logger.debug(
{ userId: user._id, count: keysToDelete.length },
'deleting sessions for user'
{ userId: user._id },
'no sessions in UserSessions set to delete, returning'
)
const deletions = keysToDelete.map(k => cb => rclient.del(k, cb))
Async.series(deletions, function (err, _result) {
if (err) {
OError.tag(err, 'error revoking all sessions for user', {
user_id: user._id,
sessionSetKey,
})
return callback(err)
}
rclient.srem(sessionSetKey, keysToDelete, function (err) {
if (err) {
OError.tag(err, 'error removing session set for user', {
user_id: user._id,
sessionSetKey,
})
return callback(err)
}
callback(null, keysToDelete.length)
})
})
})
},
touch(user, callback) {
if (!user) {
return callback(null)
return 0
}
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
rclient.pexpire(
sessionSetKey,
`${Settings.cookieSessionLength}`, // in milliseconds
function (err, response) {
if (err) {
OError.tag(err, 'error while updating ttl on UserSessions set', {
user_id: user._id,
})
return callback(err)
}
callback(null)
}
logger.debug(
{ userId: user._id, count: keysToDelete.length },
'deleting sessions for user'
)
// Use sequential processing to avoid overwhelming Redis
for (const key of keysToDelete) {
await rclient.del(key)
}
await rclient.srem(sessionSetKey, keysToDelete)
return keysToDelete.length
},
_checkSessions(user, callback) {
async touch(user) {
if (!user) {
return callback(null)
return
}
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
rclient.smembers(sessionSetKey, function (err, sessionKeys) {
if (err) {
OError.tag(err, 'error getting contents of UserSessions set', {
user_id: user._id,
sessionSetKey,
})
return callback(err)
await rclient.pexpire(sessionSetKey, `${Settings.cookieSessionLength}`)
},
async _checkSessions(user) {
if (!user) {
return
}
const sessionSetKey = UserSessionsRedis.sessionSetKey(user)
const sessionKeys = await rclient.smembers(sessionSetKey)
// Use sequential processing to avoid overwhelming Redis
for (const key of sessionKeys) {
const val = await rclient.get(key)
if (!val) {
await rclient.srem(sessionSetKey, key)
}
Async.series(
sessionKeys.map(
key => next =>
rclient.get(key, function (err, val) {
if (err) {
return next(err)
}
if (!val) {
rclient.srem(sessionSetKey, key, function (err, result) {
return next(err)
})
} else {
next()
}
})
),
function (err, results) {
callback(err)
}
)
})
}
},
}
UserSessionsManager.promises = {
getAllUserSessions: promisify(UserSessionsManager.getAllUserSessions),
removeSessionsFromRedis: (user, retainSessionID = null) =>
promisify(UserSessionsManager.removeSessionsFromRedis)(
user,
retainSessionID
),
untrackSession: promisify(UserSessionsManager.untrackSession),
module.exports = {
...callbackifyAll(UserSessionsManager),
promises: UserSessionsManager,
}
module.exports = UserSessionsManager

View File

@@ -14,8 +14,6 @@ const trackChangesModuleAvailable =
* @property {Object | undefined} apis
* @property {Object | undefined} apis.linkedUrlProxy
* @property {string | undefined} apis.linkedUrlProxy.url
* @property {Object | undefined} apis.references
* @property {string | undefined} apis.references.url
* @property {boolean | undefined} enableGithubSync
* @property {boolean | undefined} enableGitBridge
* @property {boolean | undefined} enableHomepage

View File

@@ -16,6 +16,11 @@ block append meta
data-type='string'
content=redirectedPaymentErrorCode
)
meta(
name='ol-trialDisabledReason'
data-type='string'
content=trialDisabledReason
)
block content
main#main-content.content.content-alt

View File

@@ -236,7 +236,6 @@
"center": "",
"change": "",
"change_currency": "",
"change_email": "",
"change_language": "",
"change_or_cancel-cancel": "",
"change_or_cancel-change": "",
@@ -248,6 +247,7 @@
"change_primary_email": "",
"change_primary_email_address_instructions": "",
"change_project_owner": "",
"change_role_and_department": "",
"change_the_ownership_of_your_personal_projects": "",
"change_to_group_plan": "",
"change_to_this_plan": "",
@@ -1406,7 +1406,6 @@
"reactivating": "",
"read_lines_from_path": "",
"read_more": "",
"read_more_about_free_compile_timeouts_servers": "",
"read_more_about_managed_users": "",
"read_only_dropbox_sync_message": "",
"read_only_token": "",
@@ -2165,7 +2164,6 @@
"were_making_some_changes_to_project_sharing_this_means_you_will_be_visible": "",
"were_performing_maintenance": "",
"weve_redesigned_our_editor_to_make_it_easier_to_use_and_future_ready": "",
"weve_reduced_compile_timeout": "",
"what_did_you_find_most_helpful": "",
"what_do_you_need_help_with": "",
"what_does_this_mean_for_you": "",
@@ -2286,6 +2284,7 @@
"youre_adding_x_licenses_to_your_plan_giving_you_a_total_of_y_licenses": "",
"youre_already_setup_for_sso": "",
"youre_joining": "",
"youre_not_eligible_for_a_free_trial": "",
"youre_on_free_trial_which_ends_on": "",
"youre_signed_in_as_logout": "",
"youve_added_more_licenses": "",

View File

@@ -7,21 +7,20 @@ import { useCallback, useMemo } from 'react'
import ErrorState from './error-state'
import StartFreeTrialButton from '@/shared/components/start-free-trial-button'
import getMeta from '@/utils/meta'
import {
populateEditorRedesignSegmentation,
useEditorAnalytics,
} from '@/shared/hooks/use-editor-analytics'
import { populateEditorRedesignSegmentation } from '@/shared/hooks/use-editor-analytics'
import {
isNewUser,
useIsNewEditorEnabled,
} from '@/features/ide-redesign/utils/new-editor-utils'
import { getSplitTestVariant } from '@/utils/splitTestUtils'
import { getSplitTestVariant, isSplitTestEnabled } from '@/utils/splitTestUtils'
export const ShortCompileTimeoutErrorState = () => {
const { t } = useTranslation()
const { isProjectOwner } = useCompileContext()
const { sendEvent } = useEditorAnalytics()
const newEditor = useIsNewEditorEnabled()
const shouldHideCompileTimeoutInfo = isSplitTestEnabled(
'compile-timeout-remove-info'
)
const { compileTimeout } = getMeta('ol-compileSettings')
const segmentation = useMemo(
@@ -37,14 +36,6 @@ export const ShortCompileTimeoutErrorState = () => {
[isProjectOwner, compileTimeout, newEditor]
)
const sendInfoClickEvent = useCallback(() => {
sendEvent('paywall-info-click', {
...segmentation,
'paywall-type': 'compile-timeout',
content: 'blog',
})
}, [segmentation, sendEvent])
const extraSearchParams = useMemo(() => {
if (!isNewUser()) {
return undefined
@@ -91,28 +82,11 @@ export const ShortCompileTimeoutErrorState = () => {
}
iconType="running_with_errors"
extraContent={
<div className="pdf-error-state-info-box">
<p>
<em>
<Trans
i18nKey="weve_reduced_compile_timeout"
components={[
/* eslint-disable-next-line jsx-a11y/anchor-has-content, react/jsx-key */
<a
aria-label={t(
'read_more_about_free_compile_timeouts_servers'
)}
href="/blog/changes-to-free-compile-timeout"
rel="noopener noreferrer"
target="_blank"
onClick={sendInfoClickEvent}
/>,
]}
/>
</em>
</p>
<ReasonsForTimeoutInfo />
</div>
!shouldHideCompileTimeoutInfo && (
<div className="pdf-error-state-info-box">
<ReasonsForTimeoutInfo />
</div>
)
}
actions={
isProjectOwner && (

View File

@@ -25,12 +25,11 @@ export default function RenderingErrorExpectedState() {
]}
/>
}
actions={[
// eslint-disable-next-line react/jsx-key
actions={
<OLButton variant="primary" size="sm" onClick={() => startCompile()}>
{t('recompile')}
</OLButton>,
]}
</OLButton>
}
extraContent={<ClsiCachePrompt />}
/>
)

View File

@@ -7,16 +7,13 @@ import { useStopOnFirstError } from '../../../shared/hooks/use-stop-on-first-err
import OLButton from '@/shared/components/ol/ol-button'
import * as eventTracking from '../../../infrastructure/event-tracking'
import getMeta from '@/utils/meta'
import {
populateEditorRedesignSegmentation,
useEditorAnalytics,
} from '@/shared/hooks/use-editor-analytics'
import { populateEditorRedesignSegmentation } from '@/shared/hooks/use-editor-analytics'
import {
isNewUser,
useIsNewEditorEnabled,
useIsNewErrorLogsPositionEnabled,
} from '@/features/ide-redesign/utils/new-editor-utils'
import { getSplitTestVariant } from '@/utils/splitTestUtils'
import { getSplitTestVariant, isSplitTestEnabled } from '@/utils/splitTestUtils'
function TimeoutUpgradePromptNew() {
const {
@@ -26,6 +23,9 @@ function TimeoutUpgradePromptNew() {
isProjectOwner,
} = useDetachCompileContext()
const newEditor = useIsNewEditorEnabled()
const shouldHideCompileTimeoutInfo = isSplitTestEnabled(
'compile-timeout-remove-info'
)
const { enableStopOnFirstError } = useStopOnFirstError({
eventSource: 'timeout-new',
@@ -58,13 +58,15 @@ function TimeoutUpgradePromptNew() {
isProjectOwner={isProjectOwner}
segmentation={sharedSegmentation}
/>
{getMeta('ol-ExposedSettings').enableSubscriptions && (
<PreventTimeoutHelpMessage
handleEnableStopOnFirstErrorClick={handleEnableStopOnFirstErrorClick}
lastCompileOptions={lastCompileOptions}
segmentation={sharedSegmentation}
/>
)}
{getMeta('ol-ExposedSettings').enableSubscriptions &&
!shouldHideCompileTimeoutInfo && (
<PreventTimeoutHelpMessage
handleEnableStopOnFirstErrorClick={
handleEnableStopOnFirstErrorClick
}
lastCompileOptions={lastCompileOptions}
/>
)}
</>
)
}
@@ -151,51 +153,21 @@ const CompileTimeout = memo(function CompileTimeout({
type PreventTimeoutHelpMessageProps = {
lastCompileOptions: any
handleEnableStopOnFirstErrorClick: () => void
segmentation: eventTracking.Segmentation
}
const PreventTimeoutHelpMessage = memo(function PreventTimeoutHelpMessage({
lastCompileOptions,
handleEnableStopOnFirstErrorClick,
segmentation,
}: PreventTimeoutHelpMessageProps) {
const { t } = useTranslation()
const { sendEvent } = useEditorAnalytics()
const newLogsPosition = useIsNewErrorLogsPositionEnabled()
function sendInfoClickEvent() {
sendEvent('paywall-info-click', {
...segmentation,
'paywall-type': 'compile-timeout',
content: 'blog',
})
}
const compileTimeoutChangesBlogLink = (
/* eslint-disable-next-line jsx-a11y/anchor-has-content, react/jsx-key */
<a
aria-label={t('read_more_about_free_compile_timeouts_servers')}
href="/blog/changes-to-free-compile-timeout"
rel="noopener noreferrer"
target="_blank"
onClick={sendInfoClickEvent}
/>
)
return (
<PdfLogEntry
autoExpand={!newLogsPosition}
headerTitle={t('reasons_for_compile_timeouts')}
formattedContent={
<>
<p>
<em>
<Trans
i18nKey="weve_reduced_compile_timeout"
components={[compileTimeoutChangesBlogLink]}
/>
</em>
</p>
<p>{t('common_causes_of_compile_timeouts_include')}:</p>
<ul>
<li>

View File

@@ -115,7 +115,7 @@ function InstitutionAndRole({ userEmailData }: InstitutionAndRoleProps) {
>
{!affiliation.department && !affiliation.role
? t('add_role_and_department')
: t('change_email')}
: t('change_role_and_department')}
</OLButton>
</div>
) : (

View File

@@ -23,6 +23,7 @@ import { useFeatureFlag } from '@/shared/context/split-test-context'
import PaymentErrorNotification from '@/features/subscription/components/shared/payment-error-notification'
import handleStripePaymentAction from '../../util/handle-stripe-payment-action'
import RedirectedPaymentErrorNotification from '../shared/redirected-payment-error-notification'
import TrialDisabledNotification from './trial-disabled-notification'
function PreviewSubscriptionChange() {
const preview = getMeta(
@@ -39,6 +40,7 @@ function PreviewSubscriptionChange() {
(item, index, arr) => {
if (!item.isAiAssist) return true
// TODO: this can be removed when all subscriptions are using Stripe
const isCanceledByAnotherItem = arr.some(
(otherItem, otherIndex) =>
otherIndex !== index &&
@@ -100,6 +102,7 @@ function PreviewSubscriptionChange() {
<OLRow>
<OLCol md={{ offset: 2, span: 8 }}>
<RedirectedPaymentErrorNotification />
<TrialDisabledNotification />
<OLCard className="p-3">
{preview.change.type === 'add-on-purchase' ? (
<h1>

View File

@@ -0,0 +1,21 @@
import { useTranslation } from 'react-i18next'
import OLNotification from '@/shared/components/ol/ol-notification'
import getMeta from '@/utils/meta'
export default function TrialDisabledNotification() {
const { t } = useTranslation()
const trialDisabledReason = getMeta('ol-trialDisabledReason')
if (!trialDisabledReason) {
return null
}
return (
<OLNotification
className="mb-4"
aria-live="polite"
content={t('youre_not_eligible_for_a_free_trial')}
type="warning"
/>
)
}

View File

@@ -67,6 +67,8 @@ function sentryReporter() {
// Ignore a frequent unhandled promise rejection
/Non-Error promise rejection captured with keys: currentTarget, detail, isTrusted, target/,
/Non-Error promise rejection captured with keys: message, status/,
// Ignore a frequent blocked image
"Blocked 'image' from 'www.googletagmanager.com'",
],
denyUrls: [

View File

@@ -378,7 +378,9 @@ export const LocalCompileProvider: FC<React.PropsWithChildren> = ({
if (initialCompileFromCache && !pendingInitialCompileFromCache) {
setPendingInitialCompileFromCache(true)
setCompileFromCacheStartedAt(performance.now())
getJSON(`/project/${projectId}/output/cached/output.overleaf.json`)
getJSON(`/project/${projectId}/output/cached/output.overleaf.json`, {
signal: AbortSignal.timeout(5_000),
})
.then((data: any) => {
// Hand data over to next effect, it will wait for project/doc loading.
setDataFromCache(data)

View File

@@ -300,6 +300,7 @@ export interface Meta {
'ol-translationLoadErrorMessage': string
'ol-translationMaintenance': string
'ol-translationUnableToJoin': string
'ol-trialDisabledReason': string | undefined
'ol-usGovBannerVariant': USGovBannerVariant
'ol-useShareJsHash': boolean
'ol-user': User

View File

@@ -1425,7 +1425,6 @@
"reactivate_subscription": "Genaktivér dit abonnement",
"read_lines_from_path": "Læste linjer fra __path__",
"read_more": "Læs mere",
"read_more_about_free_compile_timeouts_servers": "Læs mere om ændringer i kompileringstidsgrænser og servere",
"read_only_token": "Skrivebeskyttet nøgle",
"read_write_token": "Læse- og skrivenøgle",
"ready_to_join_x": "Du er klar til at slutte dig til __inviterName__",

View File

@@ -304,7 +304,6 @@
"certificate": "Certificate",
"change": "Change",
"change_currency": "Change currency",
"change_email": "Change email",
"change_language": "Change language",
"change_or_cancel-cancel": "cancel",
"change_or_cancel-change": "Change",
@@ -316,6 +315,7 @@
"change_primary_email": "Change primary email",
"change_primary_email_address_instructions": "To change your primary email, please add your new primary email address first (by clicking <0>Add another email</0>) and confirm it. Then click the <0>Make primary</0> button. <1>Learn more about managing your __appName__ emails</1>.",
"change_project_owner": "Change project owner",
"change_role_and_department": "Change role and department",
"change_the_ownership_of_your_personal_projects": "Change the ownership of your personal projects to the new account. <0>Find out how to change project owner.</0>",
"change_to_group_plan": "Change to a group plan",
"change_to_this_plan": "Change to this plan",
@@ -1819,7 +1819,6 @@
"reactivating": "Reactivating",
"read_lines_from_path": "Read lines from __path__",
"read_more": "Read more",
"read_more_about_free_compile_timeouts_servers": "Read more about changes to free compile timeouts and servers",
"read_more_about_managed_users": "Read more about managed users",
"read_only_dropbox_sync_message": "As a read-only viewer you can sync the current project version to Dropbox, but changes made in Dropbox will <0>not</0> sync back to Overleaf.",
"read_only_token": "Read-Only Token",
@@ -2709,7 +2708,6 @@
"were_making_some_changes_to_project_sharing_this_means_you_will_be_visible": "Were making some <0>changes to project sharing</0>. This means, as someone with edit access, your name and email address will be visible to the project owner and other editors.",
"were_performing_maintenance": "Were performing maintenance on Overleaf and you need to wait a moment. Sorry for any inconvenience. The editor will refresh automatically in __seconds__ seconds.",
"weve_redesigned_our_editor_to_make_it_easier_to_use_and_future_ready": "Weve redesigned our editor to make it easier to use and future ready. Its now in beta, so try it out and give us your feedback.",
"weve_reduced_compile_timeout": "Weve recently <0>reduced the compile timeout limit</0> on our free plan, which may have affected your project.",
"what_did_you_find_most_helpful": "What did you find most helpful?",
"what_do_you_need": "What do you need?",
"what_do_you_need_help_with": "What do you need help with?",
@@ -2846,12 +2844,13 @@
"youre_adding_x_licenses_to_your_plan_giving_you_a_total_of_y_licenses": "Youre adding <0>__adding__</0> licenses to your plan giving you a total of <1>__total__</1> licenses.",
"youre_already_setup_for_sso": "Youre already set up for SSO",
"youre_joining": "Youre joining",
"youre_not_eligible_for_a_free_trial": "Youre not eligible for a free trial. Upgrade to start using premium features.",
"youre_on_free_trial_which_ends_on": "Youre on a free trial which ends on <0>__date__</0>.",
"youre_signed_in_as_logout": "Youre signed in as <0>__email__</0>. <1>Log out.</1>",
"youre_signed_up": "Youre signed up",
"youve_added_more_licenses": "Youve added more license(s)!",
"youve_added_x_more_licenses_to_your_subscription_invite_people": "Youve added __users__ more license(s) to your subscription. <0>Invite people</0>.",
"youve_already_used_your_free_tial": "Youve already used your free trial. Upgrade to continue using premium features.",
"youve_already_used_your_free_trial": "Youve already used your free trial. Upgrade to continue using premium features.",
"youve_lost_collaboration_access": "Youve lost collaboration access",
"youve_paused_your_subscription": "Your <0>__planName__</0> subscription is paused until <0>__reactivationDate__</0>, then itll automatically unpause. You can unpause early at any time.",
"youve_unlinked_all_users": "Youve unlinked all users",

View File

@@ -1687,7 +1687,6 @@
"reactivate_subscription": "重新激活您的订阅",
"read_lines_from_path": "从 __path__ 读取行",
"read_more": "阅读更多",
"read_more_about_free_compile_timeouts_servers": "阅读有关免费计划编译超时和服务器更改的更多信息",
"read_only_dropbox_sync_message": "作为只读查看者,您可以将当前项目版本同步到 Dropbox但在 Dropbox 中所做的更改<0>不会</0>同步回 Overleaf。",
"read_only_token": "只读令牌",
"read_write_token": "可读写令牌",

View File

@@ -885,7 +885,10 @@ describe('<UserNotifications />', function () {
)
await waitForElementToBeRemoved(() => screen.getByText(/sending/i))
screen.getByText(/Enter the 6-digit code sent to foo@overleaf.com/i)
// Sometimes we need to wait for useWaitForI18n to be ready
await screen.findByText(
/Enter the 6-digit code sent to foo@overleaf.com/i
)
expect(sendReconfirmationMock.callHistory.called()).to.be.true
fireEvent.click(
screen.getByRole('button', { name: /resend confirmation code/i })

View File

@@ -34,8 +34,7 @@ describe('LinkedFilesController', function () {
ctx.SessionManager = {
getLoggedInUserId: sinon.stub().returns(ctx.userId),
}
ctx.EditorRealTimeController = {}
ctx.ReferencesHandler = {}
ctx.EditorRealTimeController = { emitToRoom: sinon.stub() }
ctx.UrlAgent = {}
ctx.ProjectFileAgent = {}
ctx.ProjectOutputFileAgent = {}
@@ -74,13 +73,6 @@ describe('LinkedFilesController', function () {
})
)
vi.doMock(
'../../../../app/src/Features/References/ReferencesHandler',
() => ({
default: ctx.ReferencesHandler,
})
)
vi.doMock('../../../../app/src/Features/LinkedFiles/UrlAgent', () => ({
default: ctx.UrlAgent,
}))
@@ -200,5 +192,38 @@ describe('LinkedFilesController', function () {
ctx.LinkedFilesController.refreshLinkedFile(ctx.req, ctx.res, ctx.next)
})
})
describe('when bib file re-indexing is required', function () {
const clientId = 'client-id'
beforeEach(function (ctx) {
ctx.req.body.shouldReindexReferences = true
ctx.req.body.clientId = clientId
})
it('informs clients to re-index bib references', async function (ctx) {
await new Promise(resolve => {
ctx.next = sinon.stub().callsFake(() => resolve('unexpected error'))
ctx.res = {
json: () => {
expect(
ctx.EditorRealTimeController.emitToRoom
).to.have.been.calledWith(
ctx.projectId,
'references:keys:updated',
[],
true,
clientId
)
resolve()
},
}
ctx.LinkedFilesController.refreshLinkedFile(
ctx.req,
ctx.res,
ctx.next
)
})
})
})
})
})

View File

@@ -15,16 +15,6 @@ describe('ReferencesController', function () {
}),
}))
vi.doMock(
'../../../../app/src/Features/References/ReferencesHandler',
() => ({
default: (ctx.ReferencesHandler = {
index: sinon.stub(),
indexAll: sinon.stub(),
}),
})
)
vi.doMock(
'../../../../app/src/Features/Editor/EditorRealTimeController',
() => ({
@@ -45,16 +35,15 @@ describe('ReferencesController', function () {
ctx.res.json = sinon.stub()
ctx.res.sendStatus = sinon.stub()
ctx.next = sinon.stub()
ctx.fakeResponseData = {
ctx.expectedResponseData = {
projectId: ctx.projectId,
keys: ['one', 'two', 'three'],
keys: [],
}
})
describe('indexAll', function () {
beforeEach(function (ctx) {
ctx.req.body = { shouldBroadcast: false }
ctx.ReferencesHandler.indexAll.callsArgWith(1, null, ctx.fakeResponseData)
ctx.call = callback => {
ctx.controller.indexAll(ctx.req, ctx.res, ctx.next)
return callback()
@@ -72,23 +61,11 @@ describe('ReferencesController', function () {
})
})
it('should return data', async function (ctx) {
it('should return expected empty data', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.res.json.callCount.should.equal(1)
ctx.res.json.calledWith(ctx.fakeResponseData).should.equal(true)
resolve()
})
})
})
it('should call ReferencesHandler.indexAll', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.ReferencesHandler.indexAll.callCount.should.equal(1)
ctx.ReferencesHandler.indexAll
.calledWith(ctx.projectId)
.should.equal(true)
ctx.res.json.calledWith(ctx.expectedResponseData).should.equal(true)
resolve()
})
})
@@ -96,7 +73,6 @@ describe('ReferencesController', function () {
describe('when shouldBroadcast is true', function () {
beforeEach(function (ctx) {
ctx.ReferencesHandler.index.callsArgWith(2, null, ctx.fakeResponseData)
ctx.req.body.shouldBroadcast = true
})
@@ -120,11 +96,11 @@ describe('ReferencesController', function () {
})
})
it('should still return data', async function (ctx) {
it('should still return empty data', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.res.json.callCount.should.equal(1)
ctx.res.json.calledWith(ctx.fakeResponseData).should.equal(true)
ctx.res.json.calledWith(ctx.expectedResponseData).should.equal(true)
resolve()
})
})
@@ -133,7 +109,6 @@ describe('ReferencesController', function () {
describe('when shouldBroadcast is false', function () {
beforeEach(function (ctx) {
ctx.ReferencesHandler.index.callsArgWith(2, null, ctx.fakeResponseData)
ctx.req.body.shouldBroadcast = false
})
@@ -157,57 +132,15 @@ describe('ReferencesController', function () {
})
})
it('should still return data', async function (ctx) {
it('should still return empty data', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.res.json.callCount.should.equal(1)
ctx.res.json.calledWith(ctx.fakeResponseData).should.equal(true)
ctx.res.json.calledWith(ctx.expectedResponseData).should.equal(true)
resolve()
})
})
})
})
})
describe('there is no data', function () {
beforeEach(function (ctx) {
ctx.ReferencesHandler.indexAll.callsArgWith(1)
ctx.call = callback => {
ctx.controller.indexAll(ctx.req, ctx.res, ctx.next)
callback()
}
})
it('should not call EditorRealTimeController.emitToRoom', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.EditorRealTimeController.emitToRoom.callCount.should.equal(0)
resolve()
})
})
})
it('should not produce an error', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.res.sendStatus.callCount.should.equal(0)
ctx.res.sendStatus.calledWith(500).should.equal(false)
ctx.res.sendStatus.calledWith(400).should.equal(false)
resolve()
})
})
})
it('should send a response with an empty keys list', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.res.json.called.should.equal(true)
ctx.res.json
.calledWith({ projectId: ctx.projectId, keys: [] })
.should.equal(true)
resolve()
})
})
})
})
})

View File

@@ -1,444 +0,0 @@
import { expect, vi } from 'vitest'
import sinon from 'sinon'
import Errors from '../../../../app/src/Features/Errors/Errors.js'
const modulePath =
'../../../../app/src/Features/References/ReferencesHandler.mjs'
vi.mock('../../../../app/src/Features/Errors/Errors.js', () =>
vi.importActual('../../../../app/src/Features/Errors/Errors.js')
)
describe('ReferencesHandler', function () {
beforeEach(async function (ctx) {
ctx.projectId = '222'
ctx.historyId = 42
ctx.fakeProject = {
_id: ctx.projectId,
owner_ref: (ctx.fakeOwner = {
_id: 'some_owner',
features: {
references: false,
},
}),
rootFolder: [
{
docs: [
{ name: 'one.bib', _id: 'aaa' },
{ name: 'two.txt', _id: 'bbb' },
],
folders: [
{
docs: [{ name: 'three.bib', _id: 'ccc' }],
fileRefs: [
{ name: 'four.bib', _id: 'fff', hash: 'abc' },
{ name: 'five.bib', _id: 'ggg', hash: 'def' },
],
folders: [],
},
],
},
],
overleaf: { history: { id: ctx.historyId } },
}
ctx.docIds = ['aaa', 'ccc']
vi.doMock('@overleaf/settings', () => ({
default: (ctx.settings = {
apis: {
references: { url: 'http://some.url/references' },
docstore: { url: 'http://some.url/docstore' },
filestore: { url: 'http://some.url/filestore' },
project_history: { url: 'http://project-history.local' },
},
}),
}))
vi.doMock('request', () => ({
default: (ctx.request = {
get: sinon.stub(),
post: sinon.stub(),
}),
}))
vi.doMock('../../../../app/src/Features/Project/ProjectGetter', () => ({
default: (ctx.ProjectGetter = {
getProject: sinon.stub().callsArgWith(2, null, ctx.fakeProject),
}),
}))
vi.doMock('../../../../app/src/Features/User/UserGetter', () => ({
default: (ctx.UserGetter = {
getUser: sinon.stub(),
}),
}))
vi.doMock(
'../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler',
() => ({
default: (ctx.DocumentUpdaterHandler = {
flushDocToMongo: sinon.stub().callsArgWith(2, null),
}),
})
)
vi.doMock('../../../../app/src/infrastructure/Features', () => ({
default: (ctx.Features = {
hasFeature: sinon.stub().returns(true),
}),
}))
ctx.handler = (await import(modulePath)).default
ctx.fakeResponseData = {
projectId: ctx.projectId,
keys: ['k1', 'k2'],
}
})
describe('indexAll', function () {
beforeEach(function (ctx) {
sinon.stub(ctx.handler, '_findBibDocIds').returns(['aaa', 'ccc'])
sinon.stub(ctx.handler, '_findBibFileRefs').returns([
{ _id: 'fff', hash: 'abc' },
{ _id: 'ggg', hash: 'def' },
])
sinon.stub(ctx.handler, '_isFullIndex').callsArgWith(1, null, true)
ctx.request.post.callsArgWith(
1,
null,
{ statusCode: 200 },
ctx.fakeResponseData
)
return (ctx.call = callback => {
return ctx.handler.indexAll(ctx.projectId, callback)
})
})
it('should call _findBibDocIds', async function (ctx) {
await new Promise(resolve => {
return ctx.call((err, data) => {
expect(err).to.be.null
ctx.handler._findBibDocIds.callCount.should.equal(1)
ctx.handler._findBibDocIds
.calledWith(ctx.fakeProject)
.should.equal(true)
return resolve()
})
})
})
it('should call _findBibFileRefs', async function (ctx) {
await new Promise(resolve => {
return ctx.call((err, data) => {
expect(err).to.be.null
ctx.handler._findBibDocIds.callCount.should.equal(1)
ctx.handler._findBibDocIds
.calledWith(ctx.fakeProject)
.should.equal(true)
return resolve()
})
})
})
it('should call DocumentUpdaterHandler.flushDocToMongo', async function (ctx) {
await new Promise(resolve => {
return ctx.call((err, data) => {
expect(err).to.be.null
ctx.DocumentUpdaterHandler.flushDocToMongo.callCount.should.equal(2)
return resolve()
})
})
})
it('should make a request to references service', async function (ctx) {
await new Promise(resolve => {
return ctx.call((err, data) => {
expect(err).to.be.null
ctx.request.post.callCount.should.equal(1)
const arg = ctx.request.post.firstCall.args[0]
expect(arg.json).to.have.all.keys(
'docUrls',
'sourceURLs',
'fullIndex'
)
expect(arg.json.docUrls.length).to.equal(4)
expect(arg.json.docUrls).to.deep.equal([
`${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/aaa/raw`,
`${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/ccc/raw`,
`${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/abc`,
`${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/def`,
])
expect(arg.json.sourceURLs.length).to.equal(4)
expect(arg.json.sourceURLs).to.deep.equal([
{
url: `${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/aaa/raw`,
},
{
url: `${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/ccc/raw`,
},
{
url: `${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/abc`,
},
{
url: `${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/def`,
},
])
expect(arg.json.fullIndex).to.equal(true)
return resolve()
})
})
})
it('should not produce an error', async function (ctx) {
await new Promise(resolve => {
return ctx.call((err, data) => {
expect(err).to.equal(null)
return resolve()
})
})
})
it('should return data', async function (ctx) {
await new Promise(resolve => {
return ctx.call((err, data) => {
expect(err).to.be.null
expect(data).to.not.equal(null)
expect(data).to.not.equal(undefined)
expect(data).to.equal(ctx.fakeResponseData)
return resolve()
})
})
})
describe('when ProjectGetter.getProject produces an error', function () {
beforeEach(function (ctx) {
ctx.ProjectGetter.getProject.callsArgWith(2, new Error('woops'))
})
it('should produce an error', async function (ctx) {
await new Promise(resolve => {
ctx.call((err, data) => {
expect(err).to.not.equal(null)
expect(err).to.be.instanceof(Error)
expect(data).to.equal(undefined)
resolve()
})
})
})
it('should not send request', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.request.post.callCount.should.equal(0)
resolve()
})
})
})
})
describe('when ProjectGetter.getProject returns null', function () {
beforeEach(function (ctx) {
ctx.ProjectGetter.getProject.callsArgWith(2, null)
})
it('should produce an error', async function (ctx) {
await new Promise(resolve => {
ctx.call((err, data) => {
expect(err).to.not.equal(null)
expect(err).to.be.instanceof(Errors.NotFoundError)
expect(data).to.equal(undefined)
resolve()
})
})
})
it('should not send request', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.request.post.callCount.should.equal(0)
resolve()
})
})
})
})
describe('when _isFullIndex produces an error', function () {
beforeEach(function (ctx) {
ctx.ProjectGetter.getProject.callsArgWith(2, null, ctx.fakeProject)
ctx.handler._isFullIndex.callsArgWith(1, new Error('woops'))
})
it('should produce an error', async function (ctx) {
await new Promise(resolve => {
ctx.call((err, data) => {
expect(err).to.not.equal(null)
expect(err).to.be.instanceof(Error)
expect(data).to.equal(undefined)
resolve()
})
})
})
it('should not send request', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.request.post.callCount.should.equal(0)
resolve()
})
})
})
})
describe('when flushDocToMongo produces an error', function () {
beforeEach(function (ctx) {
ctx.ProjectGetter.getProject.callsArgWith(2, null, ctx.fakeProject)
ctx.handler._isFullIndex.callsArgWith(1, false)
ctx.DocumentUpdaterHandler.flushDocToMongo.callsArgWith(
2,
new Error('woops')
)
})
it('should produce an error', async function (ctx) {
await new Promise(resolve => {
ctx.call((err, data) => {
expect(err).to.not.equal(null)
expect(err).to.be.instanceof(Error)
expect(data).to.equal(undefined)
resolve()
})
})
})
it('should not send request', async function (ctx) {
await new Promise(resolve => {
ctx.call(() => {
ctx.request.post.callCount.should.equal(0)
resolve()
})
})
})
})
})
describe('_findBibDocIds', function () {
beforeEach(function (ctx) {
ctx.fakeProject = {
rootFolder: [
{
docs: [
{ name: 'one.bib', _id: 'aaa' },
{ name: 'two.txt', _id: 'bbb' },
],
folders: [
{ docs: [{ name: 'three.bib', _id: 'ccc' }], folders: [] },
],
},
],
}
ctx.expectedIds = ['aaa', 'ccc']
})
it('should select the correct docIds', function (ctx) {
const result = ctx.handler._findBibDocIds(ctx.fakeProject)
expect(result).to.deep.equal(ctx.expectedIds)
})
it('should not error with a non array of folders from dirty data', function (ctx) {
ctx.fakeProject.rootFolder[0].folders[0].folders = {}
const result = ctx.handler._findBibDocIds(ctx.fakeProject)
expect(result).to.deep.equal(ctx.expectedIds)
})
})
describe('_findBibFileRefs', function () {
beforeEach(function (ctx) {
ctx.fakeProject = {
rootFolder: [
{
docs: [
{ name: 'one.bib', _id: 'aaa' },
{ name: 'two.txt', _id: 'bbb' },
],
fileRefs: [{ name: 'other.bib', _id: 'ddd' }],
folders: [
{
docs: [{ name: 'three.bib', _id: 'ccc' }],
fileRefs: [{ name: 'four.bib', _id: 'ghg' }],
folders: [],
},
],
},
],
}
ctx.expectedIds = [
ctx.fakeProject.rootFolder[0].fileRefs[0],
ctx.fakeProject.rootFolder[0].folders[0].fileRefs[0],
]
})
it('should select the correct docIds', function (ctx) {
const result = ctx.handler._findBibFileRefs(ctx.fakeProject)
expect(result).to.deep.equal(ctx.expectedIds)
})
})
describe('_isFullIndex', function () {
beforeEach(function (ctx) {
ctx.fakeProject = { owner_ref: (ctx.owner_ref = 'owner-ref-123') }
ctx.owner = {
features: {
references: false,
},
}
ctx.UserGetter.getUser = sinon.stub()
ctx.UserGetter.getUser
.withArgs(ctx.owner_ref, { features: true })
.yields(null, ctx.owner)
ctx.call = callback => {
ctx.handler._isFullIndex(ctx.fakeProject, callback)
}
})
describe('with references feature on', function () {
beforeEach(function (ctx) {
ctx.owner.features.references = true
})
it('should return true', function (ctx) {
ctx.call((err, isFullIndex) => {
expect(err).to.equal(null)
expect(isFullIndex).to.equal(true)
})
})
})
describe('with references feature off', function () {
beforeEach(function (ctx) {
ctx.owner.features.references = false
})
it('should return false', function (ctx) {
ctx.call((err, isFullIndex) => {
expect(err).to.equal(null)
expect(isFullIndex).to.equal(false)
})
})
})
describe('with referencesSearch', function () {
beforeEach(function (ctx) {
ctx.owner.features = {
referencesSearch: true,
references: false,
}
})
it('should return true', function (ctx) {
ctx.call((err, isFullIndex) => {
expect(err).to.equal(null)
expect(isFullIndex).to.equal(true)
})
})
})
})
})

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,7 @@ export type ImmediateCharge = {
subtotal: number
discount: number
tax: number
isAiAssist: boolean
isAiAssist?: boolean
}[]
}