6 Commits

Author SHA1 Message Date
Miguel Serrano
fa1aa0116a [web] transfer-ownership group audit log (#29764)
* [web] `transfer-ownership` group audit log

Includes `transfer-ownership` in the list of project
audit logs visible to managed group admins, and adds logic
to add multiple log entries when more than one managed
group is involved.

GitOrigin-RevId: 780b90a74a960047e97ebba83e5502a237b83b41
2025-12-02 09:05:27 +00:00
Malik Glossop
472e05f32b Merge pull request #29971 from overleaf/mg-edit-search-bug
Make editor focusable in view-only mode

GitOrigin-RevId: ed9b079fa379d84f7f410669fa2d865f82e21cb1
2025-12-02 09:04:59 +00:00
Maria Florencia Besteiro Gonzalez
dab59520c3 Merge pull request #29930 from overleaf/mfb-fix-zod-iso-datetime-error
allow iso date time string with offset on zod validation. add unit te…

GitOrigin-RevId: 88407fe681a66d13737de41789a9ea807a23627a
2025-12-01 09:06:23 +00:00
Brian Gough
06f696ced0 Merge pull request #29980 from overleaf/bg-history-extend-backup-comparison-III
Check file tree hashes in backup comparison

GitOrigin-RevId: 4bd1f36afa34f326d4b8934c8bb0ea00a52cf1d9
2025-12-01 09:06:18 +00:00
Olzhas Askar
9ebab12049 Merge pull request #29826 from overleaf/oa-coupon-scripts
[web] Coupon scripts

GitOrigin-RevId: 9d98b65649326ebad7a41c55ebce009327c9c634
2025-12-01 09:05:54 +00:00
Alf Eaton
b40af34b8c Add workbench as a separate panel (#29922)
GitOrigin-RevId: de1a168e6081dee94313ba2e2a8952028cf455bb
2025-12-01 09:05:50 +00:00
13 changed files with 744 additions and 54 deletions

View File

@@ -41,4 +41,180 @@ describe('zodHelpers', () => {
expect(parsed.data?.toString()).toBe('507f1f77bcf86cd799439011')
})
})
describe('datetime', () => {
it('parses valid ISO 8601 datetime strings', () => {
const parsed = zz.datetime().safeParse('2024-01-01T12:00:00Z')
expect(parsed.success).toBe(true)
expect(parsed.data).toEqual(new Date('2024-01-01T12:00:00Z'))
})
it('parses a valid ISO 8601 datetime with offset', () => {
const parsed = zz
.datetime({ offset: true })
.safeParse('2024-01-01T12:00:00+00:00')
expect(parsed.success).toBe(true)
expect(parsed.data).toEqual(new Date('2024-01-01T12:00:00+00:00'))
})
it('parses a valid Date object', () => {
const date = new Date('2024-01-01T12:00:00Z')
const parsed = zz.datetime().safeParse(date)
expect(parsed.success).toBe(true)
expect(parsed.data).toEqual(date)
})
it('fails to parse datetime with offset when offset option is false', () => {
const parsed = zz
.datetime({ offset: false })
.safeParse('2024-01-01T12:00:00+00:00')
expect(parsed.success).toBe(false)
expect(parsed.error?.issues).toHaveLength(1)
expect(parsed.error?.issues).toMatchObject([
expect.objectContaining({
code: 'invalid_format',
format: 'datetime',
message: 'Invalid ISO datetime',
}),
])
})
it('fails to parse null when schema is not nullable', () => {
const parsed = zz.datetime().safeParse(null)
expect(parsed.success).toBe(false)
expect(parsed.error?.message).toContain(
'Invalid input: expected date, received null'
)
})
it('fails to parse invalid datetime strings', () => {
const parsed = zz.datetime().safeParse('invalid-datetime')
expect(parsed.success).toBe(false)
expect(parsed.error?.issues).toHaveLength(1)
expect(parsed.error?.issues).toMatchObject([
expect.objectContaining({
code: 'invalid_format',
format: 'datetime',
message: 'Invalid ISO datetime',
}),
])
})
})
describe('datetimeNullable', () => {
it('parses valid ISO 8601 datetime strings', () => {
const parsed = zz.datetimeNullable().safeParse('2024-01-01T12:00:00Z')
expect(parsed.success).toBe(true)
expect(parsed.data).toEqual(new Date('2024-01-01T12:00:00Z'))
})
it('parses a valid ISO 8601 datetime with offset', () => {
const parsed = zz
.datetimeNullable({ offset: true })
.safeParse('2024-01-01T12:00:00+00:00')
expect(parsed.success).toBe(true)
expect(parsed.data).toEqual(new Date('2024-01-01T12:00:00+00:00'))
})
it('parses a valid Date object', () => {
const date = new Date('2024-01-01T12:00:00Z')
const parsed = zz.datetimeNullable().safeParse(date)
expect(parsed.success).toBe(true)
expect(parsed.data).toEqual(date)
})
it('fails to parse datetime with offset when offset option is false', () => {
const parsed = zz
.datetimeNullable({ offset: false })
.safeParse('2024-01-01T12:00:00+00:00')
expect(parsed.success).toBe(false)
expect(parsed.error?.issues).toHaveLength(1)
expect(parsed.error?.issues).toMatchObject([
expect.objectContaining({
code: 'invalid_format',
format: 'datetime',
message: 'Invalid ISO datetime',
}),
])
})
it('parses null when schema is nullable and input is null', () => {
const parsed = zz.datetimeNullable().safeParse(null)
expect(parsed.success).toBe(true)
expect(parsed.data).toBeNull()
})
it('fails to parse invalid datetime strings', () => {
const parsed = zz.datetimeNullable().safeParse('invalid-datetime')
expect(parsed.success).toBe(false)
expect(parsed.error?.issues).toHaveLength(1)
expect(parsed.error?.issues).toMatchObject([
expect.objectContaining({
code: 'invalid_format',
format: 'datetime',
message: 'Invalid ISO datetime',
}),
])
})
})
describe('datetimeNullish', () => {
it('parses valid ISO 8601 datetime strings', () => {
const parsed = zz.datetimeNullish().safeParse('2024-01-01T12:00:00Z')
expect(parsed.success).toBe(true)
expect(parsed.data).toEqual(new Date('2024-01-01T12:00:00Z'))
})
it('parses a valid ISO 8601 datetime with offset', () => {
const parsed = zz
.datetimeNullish({ offset: true })
.safeParse('2024-01-01T12:00:00+00:00')
expect(parsed.success).toBe(true)
expect(parsed.data).toEqual(new Date('2024-01-01T12:00:00+00:00'))
})
it('parses a valid Date object', () => {
const date = new Date('2024-01-01T12:00:00Z')
const parsed = zz.datetimeNullish().safeParse(date)
expect(parsed.success).toBe(true)
expect(parsed.data).toEqual(date)
})
it('parses null when schema is nullable and input is null', () => {
const parsed = zz.datetimeNullish().safeParse(null)
expect(parsed.success).toBe(true)
expect(parsed.data).toBeNull()
})
it('parses undefined when schema is nullish and input is undefined', () => {
const parsed = zz.datetimeNullish().safeParse(undefined)
expect(parsed.success).toBe(true)
expect(parsed.data).toBeUndefined()
})
it('fails to parse datetime with offset when offset option is false', () => {
const parsed = zz
.datetimeNullish({ offset: false })
.safeParse('2024-01-01T12:00:00+00:00')
expect(parsed.success).toBe(false)
expect(parsed.error?.issues).toHaveLength(1)
expect(parsed.error?.issues).toMatchObject([
expect.objectContaining({
code: 'invalid_format',
format: 'datetime',
message: 'Invalid ISO datetime',
}),
])
})
it('fails to parse invalid datetime strings', () => {
const parsed = zz.datetimeNullish().safeParse('invalid-datetime')
expect(parsed.success).toBe(false)
expect(parsed.error?.issues).toHaveLength(1)
expect(parsed.error?.issues).toMatchObject([
expect.objectContaining({
code: 'invalid_format',
format: 'datetime',
message: 'Invalid ISO datetime',
}),
])
})
})
})

6
libraries/validation-tools/types.d.ts vendored Normal file
View File

@@ -0,0 +1,6 @@
import z from 'zod'
export interface DatetimeSchemaOptions extends z.core.$ZodISODateTimeParams {
allowNull?: boolean
allowUndefined?: boolean
}

View File

@@ -3,9 +3,19 @@ const mongodb = require('mongodb')
const { ObjectId } = mongodb
const dateWithTransform = (schema, allowNull = false) => {
return schema.transform(dt => {
if (allowNull && !dt) return null
/**
* @import { DatetimeSchemaOptions } from './types'
*/
/**
* @param {DatetimeSchemaOptions} options
*/
const datetimeSchema = ({ allowNull, allowUndefined, ...zodOptions } = {}) => {
const union = [z.date(), z.iso.datetime(zodOptions)]
if (allowNull) union.push(z.null())
if (allowUndefined) union.push(z.undefined())
return z.union(union).transform(dt => {
if (allowNull && !dt) return dt === null ? null : undefined
return dt instanceof Date ? dt : new Date(dt)
})
}
@@ -19,14 +29,10 @@ const zz = {
.refine(ObjectId.isValid, { message: 'invalid Mongo ObjectId' })
.transform(val => new ObjectId(val)),
hex: () => z.string().regex(/^[0-9a-f]*$/),
datetime: () => dateWithTransform(z.union([z.iso.datetime(), z.date()])),
datetimeNullable: () =>
dateWithTransform(z.union([z.iso.datetime(), z.date(), z.null()]), true),
datetimeNullish: () =>
dateWithTransform(
z.union([z.iso.datetime(), z.date(), z.null(), z.undefined()]),
true
),
datetime: options => datetimeSchema(options),
datetimeNullable: options => datetimeSchema({ ...options, allowNull: true }),
datetimeNullish: options =>
datetimeSchema({ ...options, allowNull: true, allowUndefined: true }),
}
module.exports = { zz }

View File

@@ -68,14 +68,18 @@ async function getHistoryId(projectId) {
return project.overleaf.history.id
}
async function getBackupStatus(projectId) {
async function getBackupStatus(projectId, options = {}) {
const projection = {
'overleaf.history': 1,
'overleaf.backup': 1,
}
if (options.includeRootFolder) {
projection.rootFolder = 1
}
const project = await projects.findOne(
{ _id: new ObjectId(projectId) },
{
projection: {
'overleaf.history': 1,
'overleaf.backup': 1,
},
projection,
}
)
if (!project) {
@@ -93,9 +97,38 @@ async function getBackupStatus(projectId) {
historyId: `${project.overleaf.history.id}`,
currentEndVersion: project.overleaf.history.currentEndVersion,
currentEndTimestamp: project.overleaf.history.currentEndTimestamp,
...(options.includeRootFolder && { rootFolder: project.rootFolder?.[0] }),
}
}
/**
* Recursively traverses the file tree and collects file hashes into a Set.
*
* @param {object} rootFolder - The root folder object of the file tree.
* @returns {Set<string>} A Set containing all unique file hashes found in the file tree.
*/
function getHashesFromFileTree(rootFolder) {
const hashSet = new Set()
function processFolder(folder) {
for (const file of folder.fileRefs || []) {
if (file?.hash) {
hashSet.add(file.hash)
}
}
for (const subfolder of folder.folders || []) {
if (subfolder?._id) {
processFolder(subfolder)
}
}
}
processFolder(rootFolder)
return hashSet
}
async function setBackupVersion(
projectId,
previousBackedUpVersion,
@@ -216,4 +249,5 @@ module.exports = {
listUninitializedBackups,
getBackedUpBlobHashes,
unsetBackedUpBlobHashes,
getHashesFromFileTree,
}

View File

@@ -7,6 +7,7 @@ import {
getProjectChunks,
getLatestChunkMetadata,
create,
getBackend,
} from '../lib/chunk_store/index.js'
import { client } from '../lib/mongodb.js'
import redis from '../lib/redis.js'
@@ -27,6 +28,7 @@ import {
updatePendingChangeTimestamp,
getBackedUpBlobHashes,
unsetBackedUpBlobHashes,
getHashesFromFileTree,
} from '../lib/backup_store/index.js'
import { backupBlob, downloadBlobToDir } from '../lib/backupBlob.mjs'
import {
@@ -949,8 +951,19 @@ async function getBlobListing(historyId) {
*/
async function compareBackups(projectId, options, log = console.log) {
log(`Comparing backups for project ${projectId}`)
const { historyId } = await getBackupStatus(projectId)
// Convert any postgres history ids to mongo project ids
const backend = getBackend(projectId)
projectId = await backend.resolveHistoryIdToMongoProjectId(projectId)
const { historyId, rootFolder } = await getBackupStatus(projectId, {
includeRootFolder: true,
})
log(`Comparing backups for project ${projectId} historyId ${historyId}`)
const hashesFromFileTree = rootFolder
? getHashesFromFileTree(rootFolder)
: new Set()
const hashesFromHistory = new Set()
const chunks = await getProjectChunks(historyId)
const blobStore = new BlobStore(historyId)
const backupPersistorForProject = await backupPersistor.forProject(
@@ -1047,6 +1060,9 @@ async function compareBackups(projectId, options, log = console.log) {
throw new Error('interrupted')
}
// Track all the hashes in the history
hashesFromHistory.add(blob.hash)
if (GLOBAL_BLOBS.has(blob.hash)) {
const globalBlob = GLOBAL_BLOBS.get(blob.hash)
log(
@@ -1158,6 +1174,31 @@ async function compareBackups(projectId, options, log = console.log) {
}
}
if (gracefulShutdownInitiated) {
throw new Error('interrupted')
}
// Reconcile hashes in file tree with history
log(`Comparing file hashes from file tree with history`)
if (hashesFromFileTree.size > 0) {
for (const hash of hashesFromFileTree) {
const presentInHistory = hashesFromHistory.has(hash)
if (presentInHistory) {
log(` ✓ File tree hash ${hash} present in history`)
} else {
log(` ✗ File tree hash ${hash} not found in history`)
totalBlobsNotFound++
errors.push({
type: 'file-not-found',
historyId,
blobHash: hash,
error: `File tree hash ${hash} not found in history`,
})
}
}
} else {
log(` ✓ File tree does not contain any binary files`)
}
// Print summary
log('\nComparison Summary:')
log('==================')
@@ -1236,6 +1277,9 @@ async function compareProjectAndEmitResult(
return false
} catch (err) {
if (gracefulShutdownInitiated) {
throw err
}
console.log(`FAIL: ${projectId}`)
// Output buffered logs on error when verbose
@@ -1276,6 +1320,9 @@ async function compareProjectAndEmitResult(
case 'blob-size-mismatch':
console.log(`size-mismatch: ${projectId},${historyId},${blobHash}`)
break
case 'file-not-found':
console.log(`file-not-found: ${projectId},${historyId},${blobHash}`)
break
case 'chunk-mismatch':
console.log(`chunk-mismatch: ${projectId},${historyId},${chunkId}`)
break

View File

@@ -2,6 +2,7 @@ import logger from '@overleaf/logger'
import { ProjectAuditLogEntry } from '../../models/ProjectAuditLogEntry.mjs'
import { callbackify } from '@overleaf/promise-utils'
import SubscriptionLocator from '../Subscription/SubscriptionLocator.mjs'
import _ from 'lodash'
const MANAGED_GROUP_PROJECT_EVENTS = [
'send-invite',
@@ -14,8 +15,35 @@ const MANAGED_GROUP_PROJECT_EVENTS = [
'project-untrashed',
'project-restored',
'project-cloned',
'transfer-ownership',
]
async function findManagedSubscriptions(entry) {
if (!MANAGED_GROUP_PROJECT_EVENTS.includes(entry.operation)) {
return
}
// remove duplications and empty values
const userIds = _.uniq(
_.compact([
entry.info?.previousOwnerId,
entry.info?.newOwnerId,
entry.initiatorId,
])
)
const managedSubscriptions = await Promise.all(
userIds.map(id =>
SubscriptionLocator.promises.getUniqueManagedSubscriptionMemberOf(id)
)
)
const ids = managedSubscriptions.map(subscription =>
subscription?._id.toString()
)
return _.uniq(_.compact(ids))
}
export default {
promises: {
addEntry,
@@ -29,13 +57,16 @@ export default {
}
/**
* Add an audit log entry
* Add an audit log entry. If the entry involves multiple managed subscriptions,
* adds multiple entries each with a different managedSubscriptionId.
*
* The entry should include at least the following fields:
*
* - operation: a string identifying the type of operation
* - userId: the user on behalf of whom the operation was performed
* - message: a string detailing what happened
* @param {ObjectId} projectId - the project for which the operation was performed
* @param {string} operation - a string identifying the type of operation
* @param {ObjectId} initiatorId - the user on behalf of whom the operation was performed
* @param {string} ipAddress - the IP address of the initiator
* @param {object} info - any additional payload
*/
async function addEntry(
projectId,
@@ -51,20 +82,32 @@ async function addEntry(
ipAddress,
info,
}
if (MANAGED_GROUP_PROJECT_EVENTS.includes(operation)) {
const managedSubscription =
await SubscriptionLocator.promises.getUniqueManagedSubscriptionMemberOf(
info.userId || initiatorId
)
if (managedSubscription) {
entry.managedSubscriptionId = managedSubscription._id
const managedSubscriptions = await findManagedSubscriptions(entry)
if (managedSubscriptions?.length) {
for (const managedSubscriptionId of managedSubscriptions) {
await ProjectAuditLogEntry.create({
...entry,
managedSubscriptionId,
})
}
} else {
await ProjectAuditLogEntry.create(entry)
}
await ProjectAuditLogEntry.create(entry)
}
/**
* Add an audit log entry only if the entry is related to a managed subscription.
* If the entry involves multiple managed subscriptions, adds multiple entries each
* with a different managedSubscriptionId.
*
* The entry should include at least the following fields:
*
* @param {ObjectId} projectId - the project for which the operation was performed
* @param {string} operation - a string identifying the type of operation
* @param {ObjectId} initiatorId - the user on behalf of whom the operation was performed
* @param {string} ipAddress - the IP address of the initiator
* @param {object} info - any additional payload
*/
async function addEntryIfManaged(
projectId,
operation,
@@ -76,24 +119,25 @@ async function addEntryIfManaged(
return
}
const managedSubscription =
await SubscriptionLocator.promises.getUniqueManagedSubscriptionMemberOf(
info.userId || initiatorId
)
if (!managedSubscription) {
return
}
const entry = {
projectId,
operation,
initiatorId,
ipAddress,
info,
managedSubscriptionId: managedSubscription._id,
}
await ProjectAuditLogEntry.create(entry)
const managedSubscriptions = await findManagedSubscriptions(entry)
if (!managedSubscriptions?.length) {
return
}
for (const managedSubscriptionId of managedSubscriptions) {
await ProjectAuditLogEntry.create({
...entry,
managedSubscriptionId,
})
}
}
/**
@@ -116,6 +160,9 @@ function addEntryInBackground(
})
}
/**
* Add an audit log entry in the background only if related to a managed subscription.
*/
function addEntryIfManagedInBackground(
projectId,
operation,

View File

@@ -999,6 +999,7 @@ module.exports = {
sourceEditorToolbarEndButtons: [],
rootContextProviders: [],
mainEditorLayoutModals: [],
mainEditorLayoutPanels: [],
langFeedbackLinkingWidgets: [],
labsExperiments: [],
integrationLinkingWidgets: [],

View File

@@ -15,6 +15,11 @@ import HistoryContainer from '@/features/ide-react/components/history-container'
import { DefaultSynctexControl } from '@/features/pdf-preview/components/detach-synctex-control'
import importOverleafModules from '../../../../macros/import-overleaf-module.macro'
const mainEditorLayoutPanels: Array<{
import: { default: ElementType }
path: string
}> = importOverleafModules('mainEditorLayoutPanels')
const mainEditorLayoutModalsModules: Array<{
import: { default: ElementType }
path: string
@@ -121,6 +126,11 @@ export default function MainLayout() {
</Panel>
</PanelGroup>
</Panel>
{mainEditorLayoutPanels.map(
({ import: { default: Component }, path }, i) => {
return <Component key={path} order={i + 3} />
}
)}
</PanelGroup>
</div>
{mainEditorLayoutModalsModules.map(

View File

@@ -3,27 +3,43 @@ import { EditorView } from '@codemirror/view'
const readOnlyConf = new Compartment()
// Make the editor focusable even when contenteditable="false" (read-only mode)
// This allows keyboard shortcuts like Cmd+F to work in read-only mode
const focusableReadOnly = EditorView.contentAttributes.of({ tabindex: '0' })
// Hide the blinking cursor in read-only mode
const hideCursor = EditorView.theme({
'&.cm-editor .cm-cursorLayer': {
display: 'none',
},
})
const readOnlyAttributes = [
EditorState.readOnly.of(true),
EditorView.editable.of(false),
focusableReadOnly,
hideCursor,
]
const editableAttributes = [
EditorState.readOnly.of(false),
EditorView.editable.of(true),
]
/**
* A custom extension which determines whether the content is editable, by setting the value of the EditorState.readOnly and EditorView.editable facets.
* Commands and extensions read the EditorState.readOnly facet to decide whether they should be applied.
* EditorView.editable determines whether the DOM can be focused, by changing the value of the contenteditable attribute.
* We add tabindex="0" in read-only mode to ensure the editor remains focusable for keyboard shortcuts.
*/
export const editable = () => {
return [
readOnlyConf.of([
EditorState.readOnly.of(true),
EditorView.editable.of(false),
]),
]
return [readOnlyConf.of(readOnlyAttributes)]
}
export const setEditable = (value = true): TransactionSpec => {
return {
effects: [
readOnlyConf.reconfigure([
EditorState.readOnly.of(!value),
EditorView.editable.of(value),
]),
readOnlyConf.reconfigure(value ? editableAttributes : readOnlyAttributes),
],
}
}

View File

@@ -0,0 +1,115 @@
#!/usr/bin/env node
import minimist from 'minimist'
import { scriptRunner } from '../lib/ScriptRunner.mjs'
import { getRegionClient } from '../../modules/subscriptions/app/src/StripeClient.mjs'
// eslint-disable-next-line import/no-unresolved
import * as csv from 'csv/sync'
import { readFile } from 'node:fs/promises'
/**
* This script creates Stripe coupons and promotion codes from a CSV file.
*
* Usage:
* node scripts/stripe/create_coupons.mjs --region=us INPUT.CSV
*
* Options:
* --region=us|uk Required. Stripe region to process (us or uk)
*
* CSV Format:
* id,name,percent_off,duration,code,max_redemptions
*/
async function main(trackProgress) {
const args = minimist(process.argv.slice(2), {
string: ['region'],
})
const inputCSV = args._[0]
const region = args.region
await trackProgress(
`Starting script for Stripe ${region.toUpperCase()} region`
)
const file = await readFile(inputCSV, { encoding: 'utf8' })
const couponsPlannedToCreate = csv.parse(file, { columns: true })
await trackProgress(
`Successfully parsed "${inputCSV}" CSV file with ${couponsPlannedToCreate.length} coupons to create`
)
const client = getRegionClient(region)
const existingCoupons = await client.stripe.coupons.list({ limit: 100 })
const existingIdsAndNames = existingCoupons.data.map(ec => ({
id: ec.id,
name: ec.name,
}))
await trackProgress(
`Successfully parsed ${existingIdsAndNames.length} existing coupons for verification`
)
const couponsToCreate = couponsPlannedToCreate.filter(
c => !existingIdsAndNames.some(e => e.id === c.id || e.name === c.name)
)
if (couponsToCreate.length === 0) {
await trackProgress(`There are no coupons to create`)
} else if (couponsToCreate.length < couponsPlannedToCreate.length) {
const filteredOut = couponsPlannedToCreate
.filter(c =>
existingIdsAndNames.some(e => e.id === c.id || e.name === c.name)
)
.map(c => c.name)
await trackProgress(
`Successfully filtered out: ${filteredOut.join(', ')} existing coupons from the ones to create`
)
}
const errors = []
for (const toCreate of couponsToCreate) {
try {
const createdCoupon = await client.stripe.coupons.create({
id: toCreate.id,
name: toCreate.name,
percent_off: parseFloat(toCreate.percent_off),
duration: toCreate.duration,
})
const promotionPayload = {
coupon: createdCoupon.id,
code: toCreate.code,
}
const maxRedemptions = parseInt(toCreate.max_redemptions, 10)
if (maxRedemptions > 0) {
promotionPayload.max_redemptions = maxRedemptions
}
await client.stripe.promotionCodes.create(promotionPayload)
} catch (error) {
await trackProgress(
`Failed to create coupon "${toCreate.name}" (${toCreate.id})`,
error.message
)
errors.push(toCreate.name)
}
}
if (errors.length > 0) {
await trackProgress(
`Could not create the following coupons: ${errors.join(', ')}`
)
} else {
await trackProgress(
`Successfully created ${couponsToCreate.length} coupon(s) and promotion code(s).`
)
}
}
// Execute the script using the runner
try {
await scriptRunner(main)
process.exit(0)
} catch (error) {
console.error('Script failed:', error.message)
process.exit(1)
}

View File

@@ -8,6 +8,7 @@ import { FC } from 'react'
import { FileTreePathContext } from '@/features/file-tree/contexts/file-tree-path'
import { TestContainer } from '../helpers/test-container'
import { PermissionsContext } from '@/features/ide-react/context/permissions-context'
import { metaKey } from '../helpers/meta-key'
const FileTreePathProvider: FC<React.PropsWithChildren> = ({ children }) => (
<FileTreePathContext.Provider
@@ -154,4 +155,56 @@ describe('<CodeMirrorEditor/> in Visual mode with read-only permission', functio
cy.findByLabelText('URL').should('be.disabled')
cy.findByRole('button', { name: 'Remove link' }).should('not.exist')
})
it('opens the CodeMirror search panel with Cmd/Ctrl+F', function () {
mountEditor('Hello world\n\nThis is a test document.')
// Click to focus the editor
cy.get('.cm-content').click()
// Search panel should not be open initially
cy.findByRole('search').should('not.exist')
// Press Cmd/Ctrl+F to open search
cy.get('.cm-content').type(`{${metaKey}+f}`)
// Search panel should now be open
cy.findByRole('search').should('exist')
cy.findByRole('textbox', { name: 'Find' }).should('be.visible')
})
it('allows searching for text in read-only mode', function () {
mountEditor('Hello world\n\nThis is a test document with hello again.')
// Click to focus the editor
cy.get('.cm-content').click()
// Open search panel
cy.get('.cm-content').type(`{${metaKey}+f}`)
// Type a search query
cy.findByRole('textbox', { name: 'Find' }).type('hello')
// Should find matches (case insensitive)
cy.get('.cm-searchMatch').should('have.length.at.least', 1)
})
it('closes the search panel with Escape', function () {
mountEditor('Hello world')
// Click to focus the editor
cy.get('.cm-content').click()
// Open search panel
cy.get('.cm-content').type(`{${metaKey}+f}`)
// Search panel should be open
cy.findByRole('search').should('exist')
// Press Escape to close
cy.findByRole('textbox', { name: 'Find' }).type('{esc}')
// Search panel should be closed
cy.findByRole('search').should('not.exist')
})
})

View File

@@ -0,0 +1,130 @@
import { expect } from 'chai'
import { EditorState } from '@codemirror/state'
import { EditorView } from '@codemirror/view'
import {
editable,
setEditable,
} from '../../../../../frontend/js/features/source-editor/extensions/editable'
const doc = `\\documentclass{article}
\\begin{document}
Hello world
\\end{document}`
describe('editable extension', function () {
let view: EditorView
let container: HTMLElement
beforeEach(function () {
container = document.createElement('div')
document.body.appendChild(container)
})
afterEach(function () {
view?.destroy()
container?.remove()
})
function createView(extensions = [editable()]) {
view = new EditorView({
parent: container,
state: EditorState.create({
doc,
extensions,
}),
})
return view
}
describe('initial read-only state', function () {
beforeEach(function () {
createView()
})
it('should set EditorState.readOnly to true', function () {
expect(view.state.readOnly).to.be.true
})
it('should set EditorView.editable to false', function () {
expect(view.state.facet(EditorView.editable)).to.be.false
})
it('should set contenteditable="false" on the content element', function () {
expect(view.contentDOM.getAttribute('contenteditable')).to.equal('false')
})
it('should set tabindex="0" to allow focus in read-only mode', function () {
expect(view.contentDOM.getAttribute('tabindex')).to.equal('0')
})
it('should allow the editor to receive focus via tabindex', function () {
view.contentDOM.focus()
expect(document.activeElement).to.equal(view.contentDOM)
})
})
describe('setEditable(true) - switching to editable mode', function () {
beforeEach(function () {
createView()
view.dispatch(setEditable(true))
})
it('should set EditorState.readOnly to false', function () {
expect(view.state.readOnly).to.be.false
})
it('should set EditorView.editable to true', function () {
expect(view.state.facet(EditorView.editable)).to.be.true
})
it('should set contenteditable="true" on the content element', function () {
expect(view.contentDOM.getAttribute('contenteditable')).to.equal('true')
})
it('should not have tabindex attribute (not needed when contenteditable)', function () {
expect(view.contentDOM.getAttribute('tabindex')).to.be.null
})
it('should allow document modifications', function () {
view.dispatch({
changes: { from: 0, insert: 'New text ' },
})
expect(view.state.doc.toString().startsWith('New text ')).to.be.true
})
it('should allow the editor to receive focus', function () {
view.contentDOM.focus()
expect(document.activeElement).to.equal(view.contentDOM)
})
})
describe('setEditable(false) - switching to read-only mode', function () {
beforeEach(function () {
createView()
view.dispatch(setEditable(true))
view.dispatch(setEditable(false))
})
it('should set EditorState.readOnly to true', function () {
expect(view.state.readOnly).to.be.true
})
it('should set EditorView.editable to false', function () {
expect(view.state.facet(EditorView.editable)).to.be.false
})
it('should set contenteditable="false" on the content element', function () {
expect(view.contentDOM.getAttribute('contenteditable')).to.equal('false')
})
it('should restore tabindex="0" for focusability', function () {
expect(view.contentDOM.getAttribute('tabindex')).to.equal('0')
})
it('should still allow the editor to receive focus after switching modes', function () {
view.contentDOM.focus()
expect(document.activeElement).to.equal(view.contentDOM)
})
})
})

View File

@@ -9,6 +9,9 @@ const { ObjectId } = mongodb
const projectId = new ObjectId()
const userId = new ObjectId()
const subscriptionId = new ObjectId()
const previousOwnerId = new ObjectId()
const newOwnerId = new ObjectId()
const subscriptionId2 = new ObjectId()
describe('ProjectAuditLogHandler', function (ctx) {
beforeEach(async function (ctx) {
@@ -75,7 +78,7 @@ describe('ProjectAuditLogHandler', function (ctx) {
'0:0:0:0'
)
expect(ctx.createEntryMock).to.have.been.calledWithMatch({
managedSubscriptionId: subscriptionId,
managedSubscriptionId: subscriptionId.toString(),
})
})
@@ -93,6 +96,29 @@ describe('ProjectAuditLogHandler', function (ctx) {
managedSubscriptionId: subscriptionId,
})
})
it('adds multiple entries when the log involves multiple group subscriptions', async function (ctx) {
ctx.getUniqueManagedSubscriptionMemberOfMock.onFirstCall().resolves({
_id: subscriptionId,
})
ctx.getUniqueManagedSubscriptionMemberOfMock.onSecondCall().resolves({
_id: subscriptionId2,
})
await ctx.ProjectAuditLogHandler.promises.addEntry(
projectId,
'transfer-ownership',
userId,
'0:0:0:0',
{ previousOwnerId, newOwnerId }
)
expect(ctx.createEntryMock).to.have.been.calledTwice
expect(ctx.createEntryMock).to.have.been.calledWithMatch({
managedSubscriptionId: subscriptionId.toString(),
})
expect(ctx.createEntryMock).to.have.been.calledWithMatch({
managedSubscriptionId: subscriptionId2.toString(),
})
})
})
describe('addEntryIfManaged', function () {
@@ -116,7 +142,7 @@ describe('ProjectAuditLogHandler', function (ctx) {
initiatorId: userId,
ipAddress: '0:0:0:0',
info: {},
managedSubscriptionId: subscriptionId,
managedSubscriptionId: subscriptionId.toString(),
})
})
@@ -142,5 +168,28 @@ describe('ProjectAuditLogHandler', function (ctx) {
expect(ctx.createEntryMock).not.to.have.been.called
})
})
it('adds multiple entries when the log involves multiple group subscriptions', async function (ctx) {
ctx.getUniqueManagedSubscriptionMemberOfMock.onFirstCall().resolves({
_id: subscriptionId,
})
ctx.getUniqueManagedSubscriptionMemberOfMock.onSecondCall().resolves({
_id: subscriptionId2,
})
await ctx.ProjectAuditLogHandler.promises.addEntryIfManaged(
projectId,
'transfer-ownership',
userId,
'0:0:0:0',
{ previousOwnerId, newOwnerId }
)
expect(ctx.createEntryMock).to.have.been.calledTwice
expect(ctx.createEntryMock).to.have.been.calledWithMatch({
managedSubscriptionId: subscriptionId.toString(),
})
expect(ctx.createEntryMock).to.have.been.calledWithMatch({
managedSubscriptionId: subscriptionId2.toString(),
})
})
})
})