mirror of
https://github.com/overleaf/overleaf.git
synced 2025-12-05 01:10:29 +00:00
Compare commits
15 Commits
3bba9c9fb7
...
8b33fc86ab
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b33fc86ab | ||
|
|
e9fd1311a7 | ||
|
|
e05ec0321b | ||
|
|
84e4808812 | ||
|
|
a7c64d05e8 | ||
|
|
a51ba7a7d5 | ||
|
|
2db902bdd2 | ||
|
|
dc1203e2bd | ||
|
|
c1446b9d34 | ||
|
|
ee447b67e8 | ||
|
|
9153130c4f | ||
|
|
f582a77de0 | ||
|
|
3b853e9434 | ||
|
|
3b232ca419 | ||
|
|
1b8a183430 |
8
package-lock.json
generated
8
package-lock.json
generated
@@ -46639,9 +46639,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tar-fs": {
|
||||
"version": "3.0.9",
|
||||
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.9.tgz",
|
||||
"integrity": "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA==",
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.1.1.tgz",
|
||||
"integrity": "sha512-LZA0oaPOc2fVo82Txf3gw+AkEd38szODlptMYejQUhndHMLQ9M059uXR+AfS7DNo0NpINvSqDsvyaCrBVkptWg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pump": "^3.0.0",
|
||||
@@ -50792,7 +50792,7 @@
|
||||
"p-limit": "^3.1.0",
|
||||
"request": "^2.88.2",
|
||||
"send": "^0.19.0",
|
||||
"tar-fs": "^3.0.9",
|
||||
"tar-fs": "^3.1.1",
|
||||
"workerpool": "^6.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -10,7 +10,7 @@ ENV TEXMFVAR=/var/lib/overleaf/tmp/texmf-var
|
||||
|
||||
# Update to ensure dependencies are updated
|
||||
# ------------------------------------------
|
||||
ENV REBUILT_AFTER="2025-05-19"
|
||||
ENV REBUILT_AFTER="2025-09-26"
|
||||
|
||||
# Install dependencies
|
||||
# --------------------
|
||||
|
||||
@@ -3,5 +3,7 @@ FROM sharelatex/sharelatex:5.5.4
|
||||
# ../../bin/import_pr_patch.sh 27932
|
||||
# Remove web migrations changes
|
||||
# Remove test changes
|
||||
COPY *.patch .
|
||||
RUN bash -ec 'for p in *.patch; do echo "=== Applying $p ==="; patch -p1 < "$p" && rm $p; done'
|
||||
# Add init script change (commit cc83a526ecd86ed7b1a0e061b074d2fb15d4d672 in PR 28738)
|
||||
COPY *.patch* .
|
||||
RUN bash -ec 'for p in *.patch; do echo "=== Applying $p ==="; patch -p1 < "$p" && rm $p; done' \
|
||||
&& bash -ec 'cd / && for p in /overleaf/*.patch-abs; do echo "=== Applying $p ==="; patch -p1 < "$p" && rm $p; done'
|
||||
|
||||
12
server-ce/hotfix/5.5.5/pr_28738.patch-abs
Normal file
12
server-ce/hotfix/5.5.5/pr_28738.patch-abs
Normal file
@@ -0,0 +1,12 @@
|
||||
--- a/etc/my_init.d/100_make_overleaf_data_dirs.sh
|
||||
+++ b/etc/my_init.d/100_make_overleaf_data_dirs.sh
|
||||
@@ -4,8 +4,10 @@ set -e
|
||||
mkdir -p /var/lib/overleaf/data
|
||||
chown www-data:www-data /var/lib/overleaf/data
|
||||
|
||||
-mkdir -p /var/lib/overleaf/data/user_files
|
||||
-chown www-data:www-data /var/lib/overleaf/data/user_files
|
||||
+if [[ "${OVERLEAF_FILESTORE_MIGRATION_LEVEL:-0}" != "2" ]]; then
|
||||
+ mkdir -p /var/lib/overleaf/data/user_files
|
||||
+ chown www-data:www-data /var/lib/overleaf/data/user_files
|
||||
+fi
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
mkdir -p /var/lib/overleaf/data
|
||||
|
||||
1
server-ce/test/.gitignore
vendored
1
server-ce/test/.gitignore
vendored
@@ -1,2 +1,3 @@
|
||||
cypress-reports/
|
||||
data/
|
||||
docker-mailtrap/
|
||||
|
||||
46
server-ce/test/Jenkinsfile
vendored
46
server-ce/test/Jenkinsfile
vendored
@@ -37,7 +37,9 @@ pipeline {
|
||||
OVERLEAF_LATEST = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-internal:main"
|
||||
OVERLEAF_TAG = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-internal:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}"
|
||||
IMAGE_TAG_CE = "${OVERLEAF_TAG}"
|
||||
IMAGE_TAG_PRO = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:main"
|
||||
IMAGE_TAG_PRO = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}"
|
||||
OVERLEAF_PRO_TAG_BRANCH = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro-internal:${BRANCH_NAME}"
|
||||
OVERLEAF_PRO_TAG_LATEST = "us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro-internal:main"
|
||||
}
|
||||
stages {
|
||||
// Retries will use the same pipeline instance. Reset the vars.
|
||||
@@ -51,6 +53,11 @@ pipeline {
|
||||
job_server_ce_build_done = false
|
||||
job_server_pro_build_done = false
|
||||
}
|
||||
// Reset the results folder.
|
||||
// Use a folder that is not managed by cypress, as cypress will clear its results folder at the start of each individual run.
|
||||
// I.e. we would loose the test results from finished/running test suites when the last test suite starts.
|
||||
sh 'rm -rf server-ce/test/cypress-reports/'
|
||||
sh 'mkdir -p server-ce/test/cypress-reports/'
|
||||
}
|
||||
}
|
||||
stage('Parallel') {
|
||||
@@ -138,6 +145,18 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push Pro to internal') {
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_server_pro_build_done
|
||||
}
|
||||
}
|
||||
dir('server-pro') {
|
||||
sh 'make push_branch'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Prefetch default') {
|
||||
steps {
|
||||
dir('server-ce/test') {
|
||||
@@ -286,18 +305,41 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('PRO custom 5') {
|
||||
environment {
|
||||
CYPRESS_SHARD = "PRO_CUSTOM_5"
|
||||
COMPOSE_PROJECT_NAME = "test-pro-custom-5"
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
waitUntil {
|
||||
return job_npm_install_done && job_server_pro_build_done && job_prefetch_default_done && job_prefetch_custom_done
|
||||
}
|
||||
}
|
||||
dir('server-ce/test') {
|
||||
sh 'make test-e2e'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
// Collect junit test results for both success and failure case.
|
||||
always {
|
||||
junit checksName: 'Server Pro E2E test results', testResults: 'server-ce/test/cypress-reports/junit-*.xml'
|
||||
}
|
||||
// Ensure tear down of test containers, remove CE docker images, then run general Jenkins VM cleanup.
|
||||
cleanup {
|
||||
dir('server-ce/test') {
|
||||
sh 'make clean'
|
||||
sh 'make clean -j10'
|
||||
}
|
||||
dir('server-ce') {
|
||||
sh 'make clean'
|
||||
}
|
||||
dir('server-pro') {
|
||||
sh 'make clean'
|
||||
}
|
||||
sh 'make clean_jenkins'
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:main
|
||||
export CYPRESS_SHARD ?=
|
||||
export COMPOSE_PROJECT_NAME ?= test
|
||||
export DOCKER_USER := $(shell id -u):$(shell id -g)
|
||||
IMAGE_TAG_PRO_BASE := $(shell echo $(IMAGE_TAG_PRO) | sed -E s/:.+//)
|
||||
|
||||
test-e2e-native:
|
||||
docker compose -f docker-compose.yml -f docker-compose.native.yml up --no-log-prefix sharelatex host-admin -d
|
||||
@@ -24,13 +25,13 @@ test-e2e-native:
|
||||
test-e2e:
|
||||
docker compose build host-admin
|
||||
docker compose up --detach --wait host-admin
|
||||
docker compose up --detach --wait mongo || { docker compose logs mongo; exit 42; }
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e e2e host-admin || { docker compose logs mongo; exit 42; }
|
||||
docker compose up --detach --wait mongo
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e e2e host-admin
|
||||
|
||||
test-e2e-open:
|
||||
docker compose up --detach --wait host-admin
|
||||
docker compose up --detach --wait mongo || { docker compose logs mongo; exit 42; }
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e-open e2e-open host-admin || { docker compose logs mongo; exit 42; }
|
||||
docker compose up --detach --wait mongo
|
||||
docker compose up --no-log-prefix --exit-code-from=e2e-open e2e-open host-admin
|
||||
|
||||
SHARD_PROJECT_NAMES = \
|
||||
test-ce-default \
|
||||
@@ -43,7 +44,7 @@ SHARD_PROJECT_NAMES = \
|
||||
test-pro-custom-4
|
||||
CLEAN_SHARDS=$(addprefix clean/,$(SHARD_PROJECT_NAMES))
|
||||
clean: $(CLEAN_SHARDS)
|
||||
-docker compose run --no-deps --rm --entrypoint rm host-admin -rf docker-compose.override.yml docker-compose.*_*.yml data/
|
||||
-docker compose run --no-deps --rm --entrypoint rm host-admin -rf docker-compose.override.yml docker-compose.*_*.yml cypress-reports/ data/
|
||||
-docker compose down --remove-orphans --rmi local --timeout 0 --volumes
|
||||
|
||||
$(CLEAN_SHARDS): clean/%:
|
||||
@@ -75,18 +76,18 @@ prefetch_custom_texlive_2022:
|
||||
|
||||
prefetch_custom: prefetch_old_4_2
|
||||
prefetch_old_4_2:
|
||||
docker pull $(IMAGE_TAG_PRO:main=4.2)
|
||||
docker pull $(IMAGE_TAG_PRO_BASE):4.2
|
||||
|
||||
prefetch_custom: prefetch_old_5_0
|
||||
prefetch_old_5_0:
|
||||
docker pull $(IMAGE_TAG_PRO:main=5.0.1-RC1)
|
||||
docker pull $(IMAGE_TAG_PRO:main=5.0)
|
||||
docker pull $(IMAGE_TAG_PRO_BASE):5.0.1-RC1
|
||||
docker pull $(IMAGE_TAG_PRO_BASE):5.0
|
||||
|
||||
prefetch_custom: build_hotfix_5_5_5
|
||||
build_hotfix_5_5_5:
|
||||
docker pull $(IMAGE_TAG_PRO:main=5.5.4)
|
||||
docker tag $(IMAGE_TAG_PRO:main=5.5.4) quay.io/sharelatex/sharelatex-pro:5.5.4
|
||||
cd ../../server-pro/hotfix/5.5.5 && docker build -t $(IMAGE_TAG_PRO:main=5.5.5) .
|
||||
docker pull $(IMAGE_TAG_PRO_BASE):5.5.4
|
||||
docker tag $(IMAGE_TAG_PRO_BASE):5.5.4 quay.io/sharelatex/sharelatex-pro:5.5.4
|
||||
cd ../../server-pro/hotfix/5.5.5 && docker build -t $(IMAGE_TAG_PRO_BASE):5.5.5 .
|
||||
|
||||
# Google Cloud Build runs on a very ancient Docker version that does not support the subdir flag.
|
||||
# Use services -> mailtrap -> build -> context = https://github.com/dbck/docker-mailtrap.git#v1.5.0:build in docker-compose.yml eventually.
|
||||
|
||||
@@ -33,6 +33,16 @@ if (process.env.CYPRESS_SHARD && !process.env.SPEC_PATTERN) {
|
||||
|
||||
const specPattern = process.env.SPEC_PATTERN || './**/*.spec.ts'
|
||||
|
||||
let reporterOptions = {}
|
||||
if (process.env.CI) {
|
||||
reporterOptions = {
|
||||
reporter: '/overleaf/server-ce/test/node_modules/cypress-multi-reporters',
|
||||
reporterOptions: {
|
||||
configFile: 'cypress/cypress-multi-reporters.json',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = defineConfig({
|
||||
defaultCommandTimeout: 10_000,
|
||||
fixturesFolder: 'cypress/fixtures',
|
||||
@@ -55,4 +65,5 @@ module.exports = defineConfig({
|
||||
retries: {
|
||||
runMode: 3,
|
||||
},
|
||||
...reporterOptions,
|
||||
})
|
||||
|
||||
10
server-ce/test/cypress/cypress-multi-reporters.json
Normal file
10
server-ce/test/cypress/cypress-multi-reporters.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"reporterEnabled": "spec, mocha-junit-reporter",
|
||||
"mochaJunitReporterReporterOptions": {
|
||||
"mochaFile": "cypress-reports/junit-[hash]-[suiteFilename].xml",
|
||||
"includePending": true,
|
||||
"useFullSuiteTitle": true,
|
||||
"jenkinsMode": true,
|
||||
"rootSuiteTitle": "Server Pro E2E tests"
|
||||
}
|
||||
}
|
||||
@@ -70,9 +70,9 @@ services:
|
||||
stop_grace_period: 0s
|
||||
entrypoint: npm
|
||||
command: run cypress:run
|
||||
working_dir: /e2e
|
||||
working_dir: /overleaf/server-ce/test
|
||||
volumes:
|
||||
- ./:/e2e
|
||||
- ./:/overleaf/server-ce/test
|
||||
user: "${DOCKER_USER:-1000:1000}"
|
||||
environment:
|
||||
CYPRESS_SHARD:
|
||||
|
||||
@@ -28,7 +28,7 @@ function activateUserVersion1x(url: string, password = DEFAULT_PASSWORD) {
|
||||
}
|
||||
|
||||
describe('filestore migration', function () {
|
||||
if (isExcludedBySharding('LOCAL_ONLY')) return
|
||||
if (isExcludedBySharding('PRO_CUSTOM_5')) return
|
||||
const email = 'user@example.com'
|
||||
// Branding of env vars changed in 5.x
|
||||
const sharelatexBrandedVars = {
|
||||
@@ -385,16 +385,40 @@ describe('filestore migration', function () {
|
||||
|
||||
describe('purge filestore data', function () {
|
||||
before(async function () {
|
||||
await purgeFilestoreData()
|
||||
const deleted = await purgeFilestoreData()
|
||||
expect(deleted).to.have.length.greaterThan(
|
||||
previousBinaryFiles.length
|
||||
)
|
||||
expect(deleted).to.include(
|
||||
"removed directory '/var/lib/overleaf/data/user_files'"
|
||||
)
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
|
||||
describe('after next restart', function () {
|
||||
startWith({
|
||||
version: '5.5.5',
|
||||
pro: true,
|
||||
withDataDir: true,
|
||||
vars: {
|
||||
OVERLEAF_APP_NAME: 'change-config',
|
||||
OVERLEAF_FILESTORE_MIGRATION_LEVEL: '2',
|
||||
},
|
||||
})
|
||||
it('should not recreate the user_files folder', async function () {
|
||||
expect(await purgeFilestoreData()).to.deep.equal([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('latest', function () {
|
||||
startWith({
|
||||
pro: true,
|
||||
withDataDir: true,
|
||||
vars: { OVERLEAF_FILESTORE_MIGRATION_LEVEL: '2' },
|
||||
})
|
||||
it('should not recreate the user_files folder', async function () {
|
||||
expect(await purgeFilestoreData()).to.deep.equal([])
|
||||
})
|
||||
checkFilesAreAccessible()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -15,6 +15,7 @@ export function isExcludedBySharding(
|
||||
| 'PRO_CUSTOM_2'
|
||||
| 'PRO_CUSTOM_3'
|
||||
| 'PRO_CUSTOM_4'
|
||||
| 'PRO_CUSTOM_5'
|
||||
) {
|
||||
const SHARD = Cypress.env('SHARD')
|
||||
return SHARD && shard !== SHARD
|
||||
|
||||
@@ -122,9 +122,11 @@ export async function setMongoFeatureCompatibilityVersion(
|
||||
}
|
||||
|
||||
export async function purgeFilestoreData() {
|
||||
await fetchJSON(`${hostAdminURL}/data/user_files`, {
|
||||
const { stdout } = await fetchJSON(`${hostAdminURL}/data/user_files`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
if (!stdout.trim()) return []
|
||||
return stdout.trim().split('\n')
|
||||
}
|
||||
|
||||
async function sleep(ms: number) {
|
||||
|
||||
@@ -443,7 +443,7 @@ app.get('/redis/keys', (req, res) => {
|
||||
app.delete('/data/user_files', (req, res) => {
|
||||
runDockerCompose(
|
||||
'exec',
|
||||
['sharelatex', 'rm', '-rf', '/var/lib/overleaf/data/user_files'],
|
||||
['sharelatex', 'rm', '-vrf', '/var/lib/overleaf/data/user_files'],
|
||||
(error, stdout, stderr) => {
|
||||
res.json({ error, stdout, stderr })
|
||||
}
|
||||
|
||||
782
server-ce/test/package-lock.json
generated
782
server-ce/test/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -18,9 +18,11 @@
|
||||
"body-parser": "^1.20.3",
|
||||
"celebrate": "^15.0.3",
|
||||
"cypress": "13.13.2",
|
||||
"cypress-multi-reporters": "^2.0.5",
|
||||
"express": "^4.21.2",
|
||||
"isomorphic-git": "^1.33.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
"mocha-junit-reporter": "^2.2.1",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"typescript": "^5.0.4",
|
||||
"uuid": "^9.0.1"
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
"p-limit": "^3.1.0",
|
||||
"request": "^2.88.2",
|
||||
"send": "^0.19.0",
|
||||
"tar-fs": "^3.0.9",
|
||||
"tar-fs": "^3.1.1",
|
||||
"workerpool": "^6.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -310,7 +310,7 @@ describe('PubSubRace', function () {
|
||||
// - disconnect goes through one process.nextTick
|
||||
// We have to inject the disconnect event into a different event loop
|
||||
// cycle.
|
||||
3
|
||||
1
|
||||
)
|
||||
},
|
||||
|
||||
|
||||
2
services/web/Jenkinsfile
vendored
2
services/web/Jenkinsfile
vendored
@@ -486,7 +486,7 @@ pipeline {
|
||||
// Ensure tear down of test containers, then run general Jenkins VM cleanup.
|
||||
cleanup {
|
||||
dir('services/web') {
|
||||
sh 'make clean'
|
||||
sh 'make clean -j10'
|
||||
}
|
||||
sh 'make clean_jenkins'
|
||||
}
|
||||
|
||||
@@ -142,7 +142,7 @@ test_unit_module: mongo_migrations_for_tests
|
||||
$(MAKE) modules/$(MODULE_NAME)/test_unit
|
||||
|
||||
mongo_migrations_for_tests:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run migrations -- migrate -t saas || { $(DOCKER_COMPOSE) logs mongo; exit 42; }
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run migrations -- migrate -t saas
|
||||
|
||||
#
|
||||
# Frontend tests
|
||||
@@ -222,7 +222,7 @@ test_acceptance_app_server_pro: export COMPOSE_PROJECT_NAME=acceptance_test_serv
|
||||
test_acceptance_app_server_pro: export OVERLEAF_CONFIG=$(CFG_SERVER_PRO)
|
||||
|
||||
$(TEST_ACCEPTANCE_APP):
|
||||
$(DOCKER_COMPOSE) run --rm test_acceptance || { $(DOCKER_COMPOSE) logs mongo; exit 42; }
|
||||
$(DOCKER_COMPOSE) run --rm test_acceptance
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
# We are using _make magic_ for turning these file-targets into calls to
|
||||
@@ -369,7 +369,7 @@ TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS = \
|
||||
test_acceptance_modules_merged_server_pro \
|
||||
|
||||
$(TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS):
|
||||
$(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner || { $(DOCKER_COMPOSE) logs mongo; exit 42; }
|
||||
$(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
# outer loop for running saas tests in parallel
|
||||
@@ -390,7 +390,7 @@ test_acceptance_modules_merged_saas_4: export COMPOSE_PROJECT_NAME = \
|
||||
$(TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS): export BASE_CONFIG = $(CFG_SAAS)
|
||||
|
||||
$(TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS): test_acceptance_modules_merged_saas_%:
|
||||
$(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner_$* || { $(DOCKER_COMPOSE) logs mongo; exit 42; }
|
||||
$(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner_$*
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
test_acceptance_modules: $(TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS)
|
||||
|
||||
@@ -3,6 +3,7 @@ import Path from 'node:path'
|
||||
import FileWriter from '../../infrastructure/FileWriter.js'
|
||||
import Metrics from '../../infrastructure/Metrics.js'
|
||||
import FileSystemImportManager from '../Uploads/FileSystemImportManager.js'
|
||||
import FileTypeManager from '../Uploads/FileTypeManager.js'
|
||||
import EditorController from '../Editor/EditorController.js'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import moment from 'moment'
|
||||
@@ -113,12 +114,8 @@ const RestoreManager = {
|
||||
if (!project?.overleaf?.history?.rangesSupportEnabled) {
|
||||
throw new OError('project does not have ranges support', { projectId })
|
||||
}
|
||||
const historyId = project.overleaf.history.id
|
||||
|
||||
const fsPath = await RestoreManager._writeFileVersionToDisk(
|
||||
projectId,
|
||||
version,
|
||||
pathname
|
||||
)
|
||||
const basename = Path.basename(pathname)
|
||||
let dirname = Path.dirname(pathname)
|
||||
if (dirname === '.') {
|
||||
@@ -142,18 +139,13 @@ const RestoreManager = {
|
||||
throw new OError('file not found in snapshot', { pathname })
|
||||
}
|
||||
|
||||
const importInfo = await FileSystemImportManager.promises.importFile(
|
||||
fsPath,
|
||||
pathname
|
||||
)
|
||||
|
||||
let hadDeletedRootFile = false
|
||||
if (file) {
|
||||
if (file.type !== 'doc' && file.type !== 'file') {
|
||||
throw new OError('unexpected file type', { type: file.type })
|
||||
}
|
||||
logger.debug(
|
||||
{ projectId, fileId: file.element._id, type: importInfo.type },
|
||||
{ projectId, fileId: file.element._id },
|
||||
'deleting entity before reverting it'
|
||||
)
|
||||
await EditorController.promises.deleteEntity(
|
||||
@@ -177,15 +169,21 @@ const RestoreManager = {
|
||||
|
||||
// Look for metadata indicating a linked file.
|
||||
const fileMetadata = snapshotFile.getMetadata()
|
||||
const isFileMetadata = fileMetadata && 'provider' in fileMetadata
|
||||
const isLinkedFile = fileMetadata && 'provider' in fileMetadata
|
||||
|
||||
logger.debug({ fileMetadata }, 'metadata from history')
|
||||
|
||||
if (
|
||||
isLinkedFile ||
|
||||
!snapshotFile.isEditable() ||
|
||||
importInfo.type === 'file' ||
|
||||
isFileMetadata
|
||||
!FileTypeManager.isEditable(snapshotFile.getContent(), {
|
||||
filename: pathname,
|
||||
})
|
||||
) {
|
||||
const fsPath = await RestoreManager._writeSnapshotFileToDisk(
|
||||
historyId,
|
||||
snapshotFile
|
||||
)
|
||||
const newFile = await EditorController.promises.upsertFile(
|
||||
projectId,
|
||||
parentFolderId,
|
||||
@@ -320,7 +318,7 @@ const RestoreManager = {
|
||||
endTimer({ type: 'doc' })
|
||||
return {
|
||||
_id,
|
||||
type: importInfo.type,
|
||||
type: 'doc',
|
||||
}
|
||||
},
|
||||
|
||||
@@ -428,6 +426,22 @@ const RestoreManager = {
|
||||
}/project/${projectId}/version/${version}/${encodeURIComponent(pathname)}`
|
||||
return await FileWriter.promises.writeUrlToDisk(projectId, url)
|
||||
},
|
||||
|
||||
async _writeSnapshotFileToDisk(historyId, file) {
|
||||
if (file.isEditable()) {
|
||||
return await FileWriter.promises.writeContentToDisk(
|
||||
historyId,
|
||||
file.getContent()
|
||||
)
|
||||
} else {
|
||||
const hash = file.getHash()
|
||||
const { stream } = await HistoryManager.promises.requestBlob(
|
||||
historyId,
|
||||
hash
|
||||
)
|
||||
return await FileWriter.promises.writeStreamToDisk(historyId, stream)
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
export default { ...callbackifyAll(RestoreManager), promises: RestoreManager }
|
||||
|
||||
@@ -151,8 +151,8 @@ function isGroupPlanCode(planCode) {
|
||||
* into its corresponding Stripe-compatible plan code (e.g., `group_professional_educational`),
|
||||
* extracting the license quantity where applicable.
|
||||
*
|
||||
* @param {RecurlyPlanCode} planCode
|
||||
* @returns {{ planCode: RecurlyPlanCode, quantity: number }}
|
||||
* @param {string} planCode
|
||||
* @returns {{ planCode: string, quantity: number }}
|
||||
*/
|
||||
function convertLegacyGroupPlanCodeToConsolidatedGroupPlanCodeIfNeeded(
|
||||
planCode
|
||||
|
||||
@@ -31,7 +31,7 @@ async function newUpdate(
|
||||
throw new Errors.TooManyRequestsError('project on cooldown')
|
||||
}
|
||||
|
||||
const shouldIgnore = await FileTypeManager.promises.shouldIgnore(path)
|
||||
const shouldIgnore = FileTypeManager.shouldIgnore(path)
|
||||
if (shouldIgnore) {
|
||||
return null
|
||||
}
|
||||
@@ -175,7 +175,7 @@ async function createFolder(userId, projectId, projectName, path) {
|
||||
throw new Errors.TooManyRequestsError('project on cooldown')
|
||||
}
|
||||
|
||||
const shouldIgnore = await FileTypeManager.promises.shouldIgnore(path)
|
||||
const shouldIgnore = FileTypeManager.shouldIgnore(path)
|
||||
if (shouldIgnore) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -96,7 +96,7 @@ async function addFolderContents(
|
||||
}
|
||||
const entries = (await fs.promises.readdir(folderPath)) || []
|
||||
for (const entry of entries) {
|
||||
if (await FileTypeManager.promises.shouldIgnore(entry)) {
|
||||
if (FileTypeManager.shouldIgnore(entry)) {
|
||||
continue
|
||||
}
|
||||
await addEntity(
|
||||
@@ -227,7 +227,7 @@ async function* _walkDir(dirPath) {
|
||||
const entries = await fs.promises.readdir(dirPath)
|
||||
for (const entry of entries) {
|
||||
const entryPath = Path.join(dirPath, entry)
|
||||
if (await FileTypeManager.promises.shouldIgnore(entryPath)) {
|
||||
if (FileTypeManager.shouldIgnore(entryPath)) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -1,96 +1,101 @@
|
||||
const fs = require('fs')
|
||||
// @ts-check
|
||||
|
||||
const fs = require('fs/promises')
|
||||
const Path = require('path')
|
||||
const { callbackify } = require('util')
|
||||
const isUtf8 = require('utf-8-validate')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Minimatch = require('minimatch').Minimatch
|
||||
|
||||
const fileIgnoreMatcher = new Minimatch(Settings.fileIgnorePattern, {
|
||||
nocase: true, // make the whole path matching case-insensitive
|
||||
// (previously we were only matching the extension case-insensitively but it seems safer to match the whole path)
|
||||
dot: true, // allows matching on paths containing a dot e.g. /.git/foo/bar.txt
|
||||
const FILE_IGNORE_MATCHER = new Minimatch(Settings.fileIgnorePattern, {
|
||||
// make the whole path matching case-insensitive (previously we were only
|
||||
// matching the extension case-insensitively but it seems safer to match the
|
||||
// whole path)
|
||||
nocase: true,
|
||||
// allows matching on paths containing a dot e.g. /.git/foo/bar.txt
|
||||
dot: true,
|
||||
})
|
||||
|
||||
const FileTypeManager = {
|
||||
TEXT_EXTENSIONS: new Set(Settings.textExtensions.map(ext => `.${ext}`)),
|
||||
EDITABLE_FILENAMES: Settings.editableFilenames,
|
||||
const TEXT_EXTENSIONS = new Set(Settings.textExtensions.map(ext => `.${ext}`))
|
||||
const EDITABLE_FILENAMES = Settings.editableFilenames
|
||||
|
||||
MAX_TEXT_FILE_SIZE: 3 * Settings.max_doc_length, // allow 3 bytes for every character
|
||||
// allow 3 bytes for every character
|
||||
const MAX_TEXT_FILE_SIZE = 3 * Settings.max_doc_length
|
||||
|
||||
isDirectory(path, callback) {
|
||||
fs.stat(path, (error, stats) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
callback(null, stats.isDirectory())
|
||||
})
|
||||
},
|
||||
async function isDirectory(path) {
|
||||
const stats = await fs.stat(path)
|
||||
return stats.isDirectory()
|
||||
}
|
||||
|
||||
// returns charset as understood by fs.readFile,
|
||||
getType(name, fsPath, existingFileType, callback) {
|
||||
if (!name) {
|
||||
return callback(
|
||||
new Error(
|
||||
'[FileTypeManager] getType requires a non-null "name" parameter'
|
||||
)
|
||||
)
|
||||
}
|
||||
if (!fsPath) {
|
||||
return callback(
|
||||
new Error(
|
||||
'[FileTypeManager] getType requires a non-null "fsPath" parameter'
|
||||
)
|
||||
)
|
||||
}
|
||||
const basename = Path.basename(name)
|
||||
if (existingFileType !== 'doc' && !_isTextFilename(basename)) {
|
||||
return callback(null, { binary: true })
|
||||
}
|
||||
/**
|
||||
* Determine whether a string can be stored as an editable doc
|
||||
*
|
||||
* @param {string} content
|
||||
* @param {object} [opts]
|
||||
* @param {string} [opts.filename] - if a filename is given, the algorithm also
|
||||
* checks whether the filename matches the list of editable filenames
|
||||
*/
|
||||
function isEditable(content, opts = {}) {
|
||||
if (opts.filename != null && !_isTextFilename(opts.filename)) {
|
||||
return false
|
||||
}
|
||||
|
||||
fs.stat(fsPath, (err, stat) => {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (stat.size > FileTypeManager.MAX_TEXT_FILE_SIZE) {
|
||||
return callback(null, { binary: true }) // Treat large text file as binary
|
||||
}
|
||||
if (content.length >= Settings.max_doc_length) {
|
||||
return false
|
||||
}
|
||||
|
||||
fs.readFile(fsPath, (err, bytes) => {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
const encoding = _detectEncoding(bytes)
|
||||
const text = bytes.toString(encoding)
|
||||
if (text.length >= Settings.max_doc_length) {
|
||||
return callback(null, { binary: true }) // Treat large text file as binary
|
||||
}
|
||||
// For compatibility with the history service, only accept valid utf8 with no
|
||||
// nulls or non-BMP characters as text, eveything else is binary.
|
||||
if (text.includes('\x00')) {
|
||||
return callback(null, { binary: true })
|
||||
}
|
||||
if (/[\uD800-\uDFFF]/.test(text)) {
|
||||
// non-BMP characters (high and low surrogate characters)
|
||||
return callback(null, { binary: true })
|
||||
}
|
||||
callback(null, { binary: false, encoding })
|
||||
})
|
||||
})
|
||||
},
|
||||
// For compatibility with the history service, only accept valid utf8 with no
|
||||
// nulls or non-BMP characters as text, eveything else is binary.
|
||||
if (content.includes('\x00')) {
|
||||
return false
|
||||
}
|
||||
// non-BMP characters (high and low surrogate characters)
|
||||
if (/[\uD800-\uDFFF]/.test(content)) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// FIXME: we can convert this to a synchronous function if we want to
|
||||
shouldIgnore(path, callback) {
|
||||
// use minimatch file matching to check if the path should be ignored
|
||||
const ignore = fileIgnoreMatcher.match(path)
|
||||
callback(null, ignore)
|
||||
},
|
||||
/**
|
||||
* Determine whether a file can be stored as an editable doc
|
||||
*
|
||||
* @param {string} name - target filename
|
||||
* @param {string} fsPath - path of the file on the filesystem
|
||||
* @param {'file' | 'doc' | null} existingFileType - current type of the file at
|
||||
* the target location
|
||||
*/
|
||||
async function getType(name, fsPath, existingFileType) {
|
||||
if (existingFileType !== 'doc' && !_isTextFilename(name)) {
|
||||
return { binary: true }
|
||||
}
|
||||
|
||||
const stat = await fs.stat(fsPath)
|
||||
if (stat.size > MAX_TEXT_FILE_SIZE) {
|
||||
return { binary: true }
|
||||
}
|
||||
|
||||
const bytes = await fs.readFile(fsPath)
|
||||
const encoding = _detectEncoding(bytes)
|
||||
const text = bytes.toString(encoding)
|
||||
|
||||
if (isEditable(text)) {
|
||||
return { binary: false, encoding }
|
||||
} else {
|
||||
return { binary: true }
|
||||
}
|
||||
}
|
||||
|
||||
function shouldIgnore(path) {
|
||||
// use minimatch file matching to check if the path should be ignored
|
||||
return FILE_IGNORE_MATCHER.match(path)
|
||||
}
|
||||
|
||||
function _isTextFilename(filename) {
|
||||
const basename = Path.basename(filename)
|
||||
const extension = Path.extname(filename).toLowerCase()
|
||||
return (
|
||||
FileTypeManager.TEXT_EXTENSIONS.has(extension) ||
|
||||
FileTypeManager.EDITABLE_FILENAMES.includes(filename.toLowerCase())
|
||||
TEXT_EXTENSIONS.has(extension) ||
|
||||
EDITABLE_FILENAMES.includes(basename.toLowerCase())
|
||||
)
|
||||
}
|
||||
|
||||
@@ -105,7 +110,13 @@ function _detectEncoding(bytes) {
|
||||
return 'latin1'
|
||||
}
|
||||
|
||||
module.exports = FileTypeManager
|
||||
module.exports.promises = promisifyAll(FileTypeManager, {
|
||||
without: ['getStrictTypeFromContent'],
|
||||
})
|
||||
module.exports = {
|
||||
shouldIgnore,
|
||||
isEditable,
|
||||
getType: callbackify(getType),
|
||||
isDirectory: callbackify(isDirectory),
|
||||
promises: {
|
||||
getType,
|
||||
isDirectory,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -278,6 +278,7 @@ export interface Meta {
|
||||
customerId: string
|
||||
subscriptionState: string | null
|
||||
paymentProviderService: StripePaymentProviderService | null
|
||||
segment: string | null
|
||||
}
|
||||
'ol-subscription': any // TODO: mixed types, split into two fields
|
||||
'ol-subscriptionChangePreview': SubscriptionChangePreview
|
||||
|
||||
@@ -295,6 +295,58 @@
|
||||
margin-top: var(--spacing-08);
|
||||
}
|
||||
}
|
||||
|
||||
&.stripe-payment-container {
|
||||
.payment-checkout {
|
||||
.checkout-form-inputs {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
// Everything under .form-group is to make the form better match
|
||||
// Stripe's payment inputs
|
||||
.form-group {
|
||||
// we can't use "Noto Sans" as the font within PaymentElement,
|
||||
// so we're using sans-serif for the text elsewhere in the page
|
||||
font-family: sans-serif;
|
||||
|
||||
.form-control {
|
||||
padding: 10px 12px;
|
||||
font-size: 16px;
|
||||
line-height: 20px;
|
||||
border: 1px solid #e6e6e6;
|
||||
transition:
|
||||
box-shadow 0.15s ease,
|
||||
border-color 0.15s ease;
|
||||
box-shadow: 0 1px 3px 0 rgb(0 0 0 / 2%);
|
||||
|
||||
&:focus {
|
||||
outline: none;
|
||||
border-color: #046530; // brand primary color
|
||||
box-shadow:
|
||||
0 1px 1px rgb(0 0 0 / 3%),
|
||||
0 3px 6px rgb(0 0 0 / 2%),
|
||||
0 0 0 3px rgb(11 107 39 / 25%),
|
||||
0 1px 1px 0 rgb(0 0 0 / 8%);
|
||||
}
|
||||
|
||||
&.is-invalid {
|
||||
border-color: var(--bs-form-invalid-border-color);
|
||||
box-shadow:
|
||||
0 1px 3px 0 rgb(0 0 0 / 10%),
|
||||
0 0 0 1px var(--bs-form-invalid-border-color);
|
||||
}
|
||||
}
|
||||
|
||||
.invalid-feedback {
|
||||
.material-symbols {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.group-heading {
|
||||
|
||||
248
services/web/scripts/stripe/add_user_id_to_stripe_customer.mjs
Normal file
248
services/web/scripts/stripe/add_user_id_to_stripe_customer.mjs
Normal file
@@ -0,0 +1,248 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* This script iterates through all Stripe subscriptions, checks if they have metadata adminUserId,
|
||||
* and sets it to customer metadata "userId" if present.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/stripe/add_user_id_to_stripe_customer.mjs --region=us [options]
|
||||
* node scripts/stripe/add_user_id_to_stripe_customer.mjs --region=uk [options]
|
||||
*
|
||||
* Options:
|
||||
* --region=us|uk Required. Stripe region to process (us or uk)
|
||||
* --commit Actually perform the updates (default: dry-run mode)
|
||||
* --verbose Enable verbose logging
|
||||
* --limit=N Limit processing to N subscriptions (for testing)
|
||||
*
|
||||
* Examples:
|
||||
* # Dry run for US region with verbose output
|
||||
* node scripts/stripe/add_user_id_to_stripe_customer.mjs --region=us --verbose
|
||||
*
|
||||
* # Commit changes for UK region
|
||||
* node scripts/stripe/add_user_id_to_stripe_customer.mjs --region=uk --commit
|
||||
*
|
||||
* # Test with limited subscriptions
|
||||
* node scripts/stripe/add_user_id_to_stripe_customer.mjs --region=us --limit=10 --verbose
|
||||
*/
|
||||
|
||||
import minimist from 'minimist'
|
||||
import { z } from '../../app/src/infrastructure/Validation.js'
|
||||
import { scriptRunner } from '../lib/ScriptRunner.mjs'
|
||||
import {
|
||||
getRegionClient,
|
||||
CUSTOMER_SEGMENT_MAPPING,
|
||||
} from '../../modules/subscriptions/app/src/StripeClient.mjs'
|
||||
|
||||
const paramsSchema = z.object({
|
||||
region: z.enum(['us', 'uk']),
|
||||
commit: z.boolean().default(false),
|
||||
verbose: z.boolean().default(false),
|
||||
limit: z.number().int().min(1).optional(),
|
||||
})
|
||||
|
||||
let processedCount = 0
|
||||
let updatedCount = 0
|
||||
let errorCount = 0
|
||||
|
||||
/**
|
||||
* Sleep function to respect Stripe rate limits (100 requests per second)
|
||||
* We'll be conservative and sleep for 50ms between requests to stay well under the limit
|
||||
*/
|
||||
async function rateLimitSleep() {
|
||||
return new Promise(resolve => setTimeout(resolve, 50))
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single subscription and update customer metadata if needed
|
||||
*/
|
||||
async function processSubscription(
|
||||
subscription,
|
||||
stripeClient,
|
||||
commit,
|
||||
verbose
|
||||
) {
|
||||
try {
|
||||
processedCount++
|
||||
|
||||
// Check if subscription has adminUserId metadata
|
||||
const adminUserId = subscription.metadata?.adminUserId
|
||||
|
||||
if (verbose) {
|
||||
console.info(
|
||||
`Processing subscription ${subscription.id} (customer: ${subscription.customer.id}) - adminUserId: ${adminUserId || 'none'}`
|
||||
)
|
||||
}
|
||||
|
||||
if (!adminUserId) {
|
||||
// No adminUserId to migrate
|
||||
return
|
||||
}
|
||||
|
||||
// Get customer details to check current metadata
|
||||
const customer = subscription.customer
|
||||
|
||||
if (customer.metadata?.userId === adminUserId) {
|
||||
if (verbose) {
|
||||
console.info(
|
||||
`Customer ${customer.id} already has userId=${adminUserId}, skipping`
|
||||
)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (customer.metadata?.userId && customer.metadata.userId !== adminUserId) {
|
||||
console.warn(
|
||||
`Customer ${customer.id} has existing userId=${customer.metadata.userId} which differs from adminUserId=${adminUserId}, skipping to avoid overwrite`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
if (commit) {
|
||||
// Update customer metadata using the StripeClient method
|
||||
await stripeClient.updateCustomerMetadata(customer.id, {
|
||||
...customer.metadata,
|
||||
userId: adminUserId,
|
||||
segment: CUSTOMER_SEGMENT_MAPPING.B2C,
|
||||
})
|
||||
|
||||
console.info(
|
||||
`Updated customer ${customer.id} metadata: userId=${adminUserId}`
|
||||
)
|
||||
} else {
|
||||
console.info(
|
||||
`DRY RUN: Would update customer ${customer.id} metadata: userId=${adminUserId}`
|
||||
)
|
||||
}
|
||||
|
||||
updatedCount++
|
||||
} catch (error) {
|
||||
errorCount++
|
||||
console.log(error)
|
||||
}
|
||||
|
||||
// Respect rate limits
|
||||
await rateLimitSleep()
|
||||
}
|
||||
|
||||
/**
|
||||
* Main script function
|
||||
*/
|
||||
async function main(trackProgress) {
|
||||
const parseResult = paramsSchema.safeParse(
|
||||
minimist(process.argv.slice(2), {
|
||||
boolean: ['commit', 'verbose'],
|
||||
string: ['region'],
|
||||
number: ['limit'],
|
||||
})
|
||||
)
|
||||
|
||||
if (!parseResult.success) {
|
||||
throw new Error(`Invalid parameters: ${parseResult.error.message}`)
|
||||
}
|
||||
|
||||
const { region, commit, verbose, limit } = parseResult.data
|
||||
|
||||
const mode = commit ? 'COMMIT MODE' : 'DRY RUN MODE'
|
||||
await trackProgress(
|
||||
`Starting script in ${mode} for Stripe ${region.toUpperCase()} region`
|
||||
)
|
||||
|
||||
if (limit) {
|
||||
await trackProgress(`Processing limited to ${limit} subscriptions`)
|
||||
}
|
||||
|
||||
// Get Stripe client for the specified region
|
||||
const stripeClient = getRegionClient(region)
|
||||
|
||||
// Reset counters
|
||||
processedCount = 0
|
||||
updatedCount = 0
|
||||
errorCount = 0
|
||||
|
||||
await trackProgress('Starting to iterate through Stripe subscriptions...')
|
||||
|
||||
const listParams = {
|
||||
limit: 100, // Stripe's maximum limit per request
|
||||
expand: ['data.customer'], // Expand customer data to reduce additional API calls
|
||||
}
|
||||
|
||||
let hasMore = true
|
||||
let startingAfter = null
|
||||
let totalProcessed = 0
|
||||
|
||||
while (hasMore) {
|
||||
const params = { ...listParams }
|
||||
if (startingAfter) {
|
||||
params.starting_after = startingAfter
|
||||
}
|
||||
|
||||
// Get batch of subscriptions
|
||||
const subscriptions = await stripeClient.stripe.subscriptions.list(params)
|
||||
|
||||
await trackProgress(
|
||||
`Retrieved ${subscriptions.data.length} subscriptions (total processed so far: ${totalProcessed})`
|
||||
)
|
||||
|
||||
// Process each subscription in the batch
|
||||
for (const subscription of subscriptions.data) {
|
||||
await processSubscription(subscription, stripeClient, commit, verbose)
|
||||
|
||||
totalProcessed++
|
||||
|
||||
// Check if we've hit the limit
|
||||
if (limit && totalProcessed >= limit) {
|
||||
await trackProgress(`Reached limit of ${limit} subscriptions, stopping`)
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
// Progress update every 50 subscriptions
|
||||
if (totalProcessed % 50 === 0) {
|
||||
await trackProgress(
|
||||
`Progress: ${totalProcessed} processed, ${updatedCount} customers updated, ${errorCount} errors`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if there are more subscriptions to process
|
||||
hasMore = hasMore && subscriptions.has_more
|
||||
if (hasMore && subscriptions.data.length > 0) {
|
||||
startingAfter = subscriptions.data[subscriptions.data.length - 1].id
|
||||
}
|
||||
|
||||
// Rate limit between batch requests
|
||||
await rateLimitSleep()
|
||||
}
|
||||
|
||||
// Final summary
|
||||
await trackProgress('FINAL SUMMARY:')
|
||||
await trackProgress(` Total subscriptions processed: ${processedCount}`)
|
||||
await trackProgress(
|
||||
` Customers ${commit ? 'updated' : 'would be updated'}: ${updatedCount}`
|
||||
)
|
||||
await trackProgress(` Errors encountered: ${errorCount}`)
|
||||
|
||||
if (!commit && updatedCount > 0) {
|
||||
await trackProgress('')
|
||||
await trackProgress(
|
||||
'To actually perform the updates, run the script with --commit flag'
|
||||
)
|
||||
}
|
||||
|
||||
if (errorCount > 0) {
|
||||
await trackProgress(
|
||||
'Some errors were encountered. Check the logs above for details.'
|
||||
)
|
||||
}
|
||||
|
||||
await trackProgress(`Script completed successfully in ${mode}`)
|
||||
}
|
||||
|
||||
// Execute the script using the runner
|
||||
try {
|
||||
await scriptRunner(main)
|
||||
process.exit(0)
|
||||
} catch (error) {
|
||||
console.error('Script failed:', error.message)
|
||||
process.exit(1)
|
||||
}
|
||||
@@ -0,0 +1,227 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* This script iterates through all Stripe subscriptions and removes the adminUserId metadata
|
||||
* from subscription objects that have it.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/stripe/remove_admin_user_id_from_stripe_subscription.mjs --region=us [options]
|
||||
* node scripts/stripe/remove_admin_user_id_from_stripe_subscription.mjs --region=uk [options]
|
||||
*
|
||||
* Options:
|
||||
* --region=us|uk Required. Stripe region to process (us or uk)
|
||||
* --commit Actually perform the updates (default: dry-run mode)
|
||||
* --verbose Enable verbose logging
|
||||
* --limit=N Limit processing to N subscriptions (for testing)
|
||||
*
|
||||
* Examples:
|
||||
* # Dry run for US region with verbose output
|
||||
* node scripts/stripe/remove_admin_user_id_from_stripe_subscription.mjs --region=us --verbose
|
||||
*
|
||||
* # Commit changes for UK region
|
||||
* node scripts/stripe/remove_admin_user_id_from_stripe_subscription.mjs --region=uk --commit
|
||||
*
|
||||
* # Test with limited subscriptions
|
||||
* node scripts/stripe/remove_admin_user_id_from_stripe_subscription.mjs --region=us --limit=10 --verbose
|
||||
*/
|
||||
|
||||
import minimist from 'minimist'
|
||||
import { z } from '../../app/src/infrastructure/Validation.js'
|
||||
import { scriptRunner } from '../lib/ScriptRunner.mjs'
|
||||
import { getRegionClient } from '../../modules/subscriptions/app/src/StripeClient.mjs'
|
||||
|
||||
const paramsSchema = z.object({
|
||||
region: z.enum(['us', 'uk']),
|
||||
commit: z.boolean().default(false),
|
||||
verbose: z.boolean().default(false),
|
||||
limit: z.number().int().min(1).optional(),
|
||||
})
|
||||
|
||||
let processedCount = 0
|
||||
let updatedCount = 0
|
||||
let errorCount = 0
|
||||
|
||||
/**
|
||||
* Sleep function to respect Stripe rate limits (100 requests per second)
|
||||
* We'll be conservative and sleep for 50ms between requests to stay well under the limit
|
||||
*/
|
||||
async function rateLimitSleep() {
|
||||
return new Promise(resolve => setTimeout(resolve, 50))
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single subscription and remove adminUserId metadata if present
|
||||
*/
|
||||
async function processSubscription(
|
||||
subscription,
|
||||
stripeClient,
|
||||
commit,
|
||||
verbose
|
||||
) {
|
||||
try {
|
||||
processedCount++
|
||||
|
||||
// Check if subscription has adminUserId metadata
|
||||
const adminUserId = subscription.metadata?.adminUserId
|
||||
|
||||
if (verbose) {
|
||||
console.info(
|
||||
`Processing subscription ${subscription.id} - adminUserId: ${adminUserId || 'none'}`
|
||||
)
|
||||
}
|
||||
|
||||
if (!adminUserId) {
|
||||
// No adminUserId to remove
|
||||
return
|
||||
}
|
||||
|
||||
if (commit) {
|
||||
// Create a new metadata object that will remove adminUserId
|
||||
const updatedMetadata = { ...subscription.metadata }
|
||||
updatedMetadata.adminUserId = ''
|
||||
|
||||
// Update subscription metadata using Stripe API directly
|
||||
await stripeClient.stripe.subscriptions.update(subscription.id, {
|
||||
metadata: updatedMetadata,
|
||||
})
|
||||
|
||||
console.info(
|
||||
`Removed adminUserId metadata from subscription ${subscription.id}`
|
||||
)
|
||||
} else {
|
||||
console.info(
|
||||
`DRY RUN: Would remove adminUserId metadata from subscription ${subscription.id}`
|
||||
)
|
||||
}
|
||||
|
||||
updatedCount++
|
||||
} catch (error) {
|
||||
errorCount++
|
||||
console.log(error)
|
||||
}
|
||||
|
||||
// Respect rate limits
|
||||
await rateLimitSleep()
|
||||
}
|
||||
|
||||
/**
|
||||
* Main script function
|
||||
*/
|
||||
async function main(trackProgress) {
|
||||
const parseResult = paramsSchema.safeParse(
|
||||
minimist(process.argv.slice(2), {
|
||||
boolean: ['commit', 'verbose'],
|
||||
string: ['region'],
|
||||
number: ['limit'],
|
||||
})
|
||||
)
|
||||
|
||||
if (!parseResult.success) {
|
||||
throw new Error(`Invalid parameters: ${parseResult.error.message}`)
|
||||
}
|
||||
|
||||
const { region, commit, verbose, limit } = parseResult.data
|
||||
|
||||
const mode = commit ? 'COMMIT MODE' : 'DRY RUN MODE'
|
||||
await trackProgress(
|
||||
`Starting script in ${mode} for Stripe ${region.toUpperCase()} region`
|
||||
)
|
||||
|
||||
if (limit) {
|
||||
await trackProgress(`Processing limited to ${limit} subscriptions`)
|
||||
}
|
||||
|
||||
// Get Stripe client for the specified region
|
||||
const stripeClient = getRegionClient(region)
|
||||
|
||||
// Reset counters
|
||||
processedCount = 0
|
||||
updatedCount = 0
|
||||
errorCount = 0
|
||||
|
||||
await trackProgress('Starting to iterate through Stripe subscriptions...')
|
||||
|
||||
const listParams = {
|
||||
limit: 100, // Stripe's maximum limit per request
|
||||
}
|
||||
|
||||
let hasMore = true
|
||||
let startingAfter = null
|
||||
let totalProcessed = 0
|
||||
|
||||
while (hasMore) {
|
||||
const params = { ...listParams }
|
||||
if (startingAfter) {
|
||||
params.starting_after = startingAfter
|
||||
}
|
||||
|
||||
// Get batch of subscriptions
|
||||
const subscriptions = await stripeClient.stripe.subscriptions.list(params)
|
||||
|
||||
await trackProgress(
|
||||
`Retrieved ${subscriptions.data.length} subscriptions (total processed so far: ${totalProcessed})`
|
||||
)
|
||||
|
||||
// Process each subscription in the batch
|
||||
for (const subscription of subscriptions.data) {
|
||||
await processSubscription(subscription, stripeClient, commit, verbose)
|
||||
|
||||
totalProcessed++
|
||||
|
||||
// Check if we've hit the limit
|
||||
if (limit && totalProcessed >= limit) {
|
||||
await trackProgress(`Reached limit of ${limit} subscriptions, stopping`)
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
// Progress update every 50 subscriptions
|
||||
if (totalProcessed % 50 === 0) {
|
||||
await trackProgress(
|
||||
`Progress: ${totalProcessed} processed, ${updatedCount} subscriptions updated, ${errorCount} errors`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if there are more subscriptions to process
|
||||
hasMore = hasMore && subscriptions.has_more
|
||||
if (hasMore && subscriptions.data.length > 0) {
|
||||
startingAfter = subscriptions.data[subscriptions.data.length - 1].id
|
||||
}
|
||||
|
||||
// Rate limit between batch requests
|
||||
await rateLimitSleep()
|
||||
}
|
||||
|
||||
// Final summary
|
||||
await trackProgress('FINAL SUMMARY:')
|
||||
await trackProgress(` Total subscriptions processed: ${processedCount}`)
|
||||
await trackProgress(
|
||||
` Subscriptions ${commit ? 'updated' : 'would be updated'}: ${updatedCount}`
|
||||
)
|
||||
await trackProgress(` Errors encountered: ${errorCount}`)
|
||||
|
||||
if (!commit && updatedCount > 0) {
|
||||
await trackProgress('')
|
||||
await trackProgress(
|
||||
'To actually perform the updates, run the script with --commit flag'
|
||||
)
|
||||
}
|
||||
|
||||
if (errorCount > 0) {
|
||||
await trackProgress(
|
||||
'Some errors were encountered. Check the logs above for details.'
|
||||
)
|
||||
}
|
||||
|
||||
await trackProgress(`Script completed successfully in ${mode}`)
|
||||
}
|
||||
|
||||
// Execute the script using the runner
|
||||
try {
|
||||
await scriptRunner(main)
|
||||
process.exit(0)
|
||||
} catch (error) {
|
||||
console.error('Script failed:', error.message)
|
||||
process.exit(1)
|
||||
}
|
||||
@@ -16,6 +16,9 @@ describe('RestoreManager', function () {
|
||||
beforeEach(async function (ctx) {
|
||||
tk.freeze(Date.now()) // freeze the time for these tests
|
||||
|
||||
ctx.fsPath = '/tmp/path/on/disk'
|
||||
ctx.blobStream = 'blob-stream'
|
||||
|
||||
vi.doMock('../../../../app/src/Features/Errors/Errors.js', () => ({
|
||||
default: Errors,
|
||||
}))
|
||||
@@ -71,6 +74,7 @@ describe('RestoreManager', function () {
|
||||
},
|
||||
timestamp: new Date().toISOString(),
|
||||
}),
|
||||
requestBlob: sinon.stub().resolves({ stream: ctx.blobStream }),
|
||||
},
|
||||
}),
|
||||
}))
|
||||
@@ -91,7 +95,12 @@ describe('RestoreManager', function () {
|
||||
}))
|
||||
|
||||
vi.doMock('../../../../app/src/infrastructure/FileWriter', () => ({
|
||||
default: (ctx.FileWriter = { promises: {} }),
|
||||
default: (ctx.FileWriter = {
|
||||
promises: {
|
||||
writeStreamToDisk: sinon.stub().resolves(ctx.fsPath),
|
||||
writeContentToDisk: sinon.stub().resolves(ctx.fsPath),
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock(
|
||||
@@ -167,6 +176,7 @@ describe('RestoreManager', function () {
|
||||
getMetadata: sinon
|
||||
.stub()
|
||||
.returns(snapshotData?.files?.[pathname]?.metadata),
|
||||
getHash: sinon.stub().returns((ctx.hash = 'somehash')),
|
||||
}),
|
||||
getFilePathnames: sinon
|
||||
.stub()
|
||||
@@ -380,9 +390,6 @@ describe('RestoreManager', function () {
|
||||
overleaf: { history: { rangesSupportEnabled: true } },
|
||||
rootDoc_id: 'root-doc-id',
|
||||
})
|
||||
ctx.RestoreManager.promises._writeFileVersionToDisk = sinon
|
||||
.stub()
|
||||
.resolves((ctx.fsPath = '/tmp/path/on/disk'))
|
||||
ctx.RestoreManager.promises._findOrCreateFolder = sinon
|
||||
.stub()
|
||||
.resolves((ctx.folder_id = 'mock-folder-id'))
|
||||
@@ -487,9 +494,6 @@ describe('RestoreManager', function () {
|
||||
metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-2' },
|
||||
},
|
||||
]
|
||||
ctx.FileSystemImportManager.promises.importFile = sinon
|
||||
.stub()
|
||||
.resolves({ type: 'doc', lines: ['foo', 'bar', 'baz'] })
|
||||
ctx.getDocUpdaterCompatibleRanges.returns({
|
||||
changes: ctx.tracked_changes,
|
||||
comments: ctx.comments,
|
||||
@@ -933,9 +937,6 @@ describe('RestoreManager', function () {
|
||||
describe('when reverting a linked file', function () {
|
||||
beforeEach(async function (ctx) {
|
||||
ctx.pathname = 'foo.png'
|
||||
ctx.FileSystemImportManager.promises.importFile = sinon
|
||||
.stub()
|
||||
.resolves({ type: 'file' })
|
||||
ctx.result = await ctx.RestoreManager.promises.revertFile(
|
||||
ctx.user_id,
|
||||
ctx.project_id,
|
||||
@@ -979,9 +980,6 @@ describe('RestoreManager', function () {
|
||||
describe('when reverting a linked document with provider', function () {
|
||||
beforeEach(async function (ctx) {
|
||||
ctx.pathname = 'linkedFile.bib'
|
||||
ctx.FileSystemImportManager.promises.importFile = sinon
|
||||
.stub()
|
||||
.resolves({ type: 'doc', lines: ['foo', 'bar', 'baz'] })
|
||||
ctx.result = await ctx.RestoreManager.promises.revertFile(
|
||||
ctx.user_id,
|
||||
ctx.project_id,
|
||||
@@ -1025,9 +1023,6 @@ describe('RestoreManager', function () {
|
||||
describe('when reverting a linked document with { main: true }', function () {
|
||||
beforeEach(async function (ctx) {
|
||||
ctx.pathname = 'withMainTrue.tex'
|
||||
ctx.FileSystemImportManager.promises.importFile = sinon
|
||||
.stub()
|
||||
.resolves({ type: 'doc', lines: ['foo', 'bar', 'baz'] })
|
||||
ctx.result = await ctx.RestoreManager.promises.revertFile(
|
||||
ctx.user_id,
|
||||
ctx.project_id,
|
||||
@@ -1065,9 +1060,6 @@ describe('RestoreManager', function () {
|
||||
describe('when reverting a binary file', function () {
|
||||
beforeEach(async function (ctx) {
|
||||
ctx.pathname = 'foo.png'
|
||||
ctx.FileSystemImportManager.promises.importFile = sinon
|
||||
.stub()
|
||||
.resolves({ type: 'file' })
|
||||
ctx.EditorController.promises.upsertFile = sinon
|
||||
.stub()
|
||||
.resolves({ _id: 'mock-file-id', type: 'file' })
|
||||
|
||||
@@ -45,9 +45,7 @@ describe('TpdsUpdateHandler', function () {
|
||||
},
|
||||
}
|
||||
ctx.FileTypeManager = {
|
||||
promises: {
|
||||
shouldIgnore: sinon.stub().resolves(false),
|
||||
},
|
||||
shouldIgnore: sinon.stub().returns(false),
|
||||
}
|
||||
ctx.Modules = {
|
||||
promises: {
|
||||
@@ -225,7 +223,7 @@ describe('TpdsUpdateHandler', function () {
|
||||
describe('update to a file that should be ignored', async function () {
|
||||
setupMatchingProjects(['active1'])
|
||||
beforeEach(function (ctx) {
|
||||
ctx.FileTypeManager.promises.shouldIgnore.resolves(true)
|
||||
ctx.FileTypeManager.shouldIgnore.returns(true)
|
||||
})
|
||||
receiveUpdate()
|
||||
expectProjectNotCreated()
|
||||
@@ -492,16 +490,13 @@ function receiveFileDelete() {
|
||||
|
||||
function receiveFileDeleteById() {
|
||||
beforeEach(async function (ctx) {
|
||||
await new Promise(resolve => {
|
||||
ctx.TpdsUpdateHandler.deleteUpdate(
|
||||
ctx.userId,
|
||||
ctx.projectId,
|
||||
'', // projectName
|
||||
ctx.path,
|
||||
ctx.source,
|
||||
resolve
|
||||
)
|
||||
})
|
||||
await ctx.TpdsUpdateHandler.promises.deleteUpdate(
|
||||
ctx.userId,
|
||||
ctx.projectId,
|
||||
'', // projectName
|
||||
ctx.path,
|
||||
ctx.source
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -3,58 +3,59 @@ const { expect } = require('chai')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const isUtf8 = require('utf-8-validate')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const modulePath = '../../../../app/src/Features/Uploads/FileTypeManager.js'
|
||||
|
||||
const MODULE_PATH = '../../../../app/src/Features/Uploads/FileTypeManager.js'
|
||||
|
||||
describe('FileTypeManager', function () {
|
||||
const fileContents = 'Ich bin eine kleine Teekanne, kurz und kräftig.'
|
||||
|
||||
beforeEach(function () {
|
||||
this.isUtf8 = sinon.spy(isUtf8)
|
||||
this.stats = {
|
||||
isDirectory: sinon.stub().returns(false),
|
||||
size: 100,
|
||||
}
|
||||
const fileContents = 'Ich bin eine kleine Teekanne, kurz und kräftig.'
|
||||
this.fs = {
|
||||
stat: sinon.stub().yields(null, this.stats),
|
||||
stat: sinon.stub().resolves(this.stats),
|
||||
readFile: sinon.stub(),
|
||||
}
|
||||
this.fs.readFile
|
||||
.withArgs('utf8.tex')
|
||||
.yields(null, Buffer.from(fileContents, 'utf-8'))
|
||||
.resolves(Buffer.from(fileContents, 'utf-8'))
|
||||
this.fs.readFile
|
||||
.withArgs('utf16.tex')
|
||||
.yields(null, Buffer.from(`\uFEFF${fileContents}`, 'utf-16le'))
|
||||
.resolves(Buffer.from(`\uFEFF${fileContents}`, 'utf-16le'))
|
||||
this.fs.readFile
|
||||
.withArgs('latin1.tex')
|
||||
.yields(null, Buffer.from(fileContents, 'latin1'))
|
||||
.resolves(Buffer.from(fileContents, 'latin1'))
|
||||
this.fs.readFile
|
||||
.withArgs('latin1-null.tex')
|
||||
.yields(null, Buffer.from(`${fileContents}\x00${fileContents}`, 'utf-8'))
|
||||
.resolves(Buffer.from(`${fileContents}\x00${fileContents}`, 'utf-8'))
|
||||
this.fs.readFile
|
||||
.withArgs('utf8-null.tex')
|
||||
.yields(null, Buffer.from(`${fileContents}\x00${fileContents}`, 'utf-8'))
|
||||
.resolves(Buffer.from(`${fileContents}\x00${fileContents}`, 'utf-8'))
|
||||
this.fs.readFile
|
||||
.withArgs('utf8-non-bmp.tex')
|
||||
.yields(null, Buffer.from(`${fileContents}😈`))
|
||||
.resolves(Buffer.from(`${fileContents}😈`))
|
||||
this.fs.readFile
|
||||
.withArgs('utf8-control-chars.tex')
|
||||
.yields(null, Buffer.from(`${fileContents}\x0c${fileContents}`))
|
||||
.resolves(Buffer.from(`${fileContents}\x0c${fileContents}`))
|
||||
this.fs.readFile
|
||||
.withArgs('text-short.tex')
|
||||
.yields(null, Buffer.from('a'.repeat(0.5 * 1024 * 1024), 'utf-8'))
|
||||
.resolves(Buffer.from('a'.repeat(0.5 * 1024 * 1024), 'utf-8'))
|
||||
this.fs.readFile
|
||||
.withArgs('text-smaller.tex')
|
||||
.yields(null, Buffer.from('a'.repeat(2 * 1024 * 1024 - 1), 'utf-8'))
|
||||
.resolves(Buffer.from('a'.repeat(2 * 1024 * 1024 - 1), 'utf-8'))
|
||||
this.fs.readFile
|
||||
.withArgs('text-exact.tex')
|
||||
.yields(null, Buffer.from('a'.repeat(2 * 1024 * 1024), 'utf-8'))
|
||||
.resolves(Buffer.from('a'.repeat(2 * 1024 * 1024), 'utf-8'))
|
||||
this.fs.readFile
|
||||
.withArgs('text-long.tex')
|
||||
.yields(null, Buffer.from('a'.repeat(3 * 1024 * 1024), 'utf-8'))
|
||||
this.callback = sinon.stub()
|
||||
this.DocumentHelper = { getEncodingFromTexContent: sinon.stub() }
|
||||
this.FileTypeManager = SandboxedModule.require(modulePath, {
|
||||
.resolves(Buffer.from('a'.repeat(3 * 1024 * 1024), 'utf-8'))
|
||||
|
||||
this.FileTypeManager = SandboxedModule.require(MODULE_PATH, {
|
||||
requires: {
|
||||
fs: this.fs,
|
||||
'fs/promises': this.fs,
|
||||
'utf-8-validate': this.isUtf8,
|
||||
'@overleaf/settings': Settings,
|
||||
},
|
||||
@@ -88,6 +89,32 @@ describe('FileTypeManager', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('isEditable', function () {
|
||||
it('classifies simple UTF-8 as editable', function () {
|
||||
expect(this.FileTypeManager.isEditable(fileContents)).to.be.true
|
||||
})
|
||||
|
||||
it('classifies text with non-BMP characters as binary', function () {
|
||||
expect(this.FileTypeManager.isEditable(`${fileContents}😈`)).to.be.false
|
||||
})
|
||||
|
||||
it('classifies a .tex file as editable', function () {
|
||||
expect(
|
||||
this.FileTypeManager.isEditable(fileContents, {
|
||||
filename: 'some/file.tex',
|
||||
})
|
||||
).to.be.true
|
||||
})
|
||||
|
||||
it('classifies a .exe file as binary', function () {
|
||||
expect(
|
||||
this.FileTypeManager.isEditable(fileContents, {
|
||||
filename: 'command.exe',
|
||||
})
|
||||
).to.be.false
|
||||
})
|
||||
})
|
||||
|
||||
describe('getType', function () {
|
||||
describe('when the file extension is text', function () {
|
||||
const TEXT_FILENAMES = [
|
||||
@@ -335,76 +362,58 @@ describe('FileTypeManager', function () {
|
||||
|
||||
describe('shouldIgnore', function () {
|
||||
it('should ignore tex auxiliary files', async function () {
|
||||
const ignore =
|
||||
await this.FileTypeManager.promises.shouldIgnore('file.aux')
|
||||
const ignore = this.FileTypeManager.shouldIgnore('file.aux')
|
||||
ignore.should.equal(true)
|
||||
})
|
||||
|
||||
it('should ignore dotfiles', async function () {
|
||||
const ignore =
|
||||
await this.FileTypeManager.promises.shouldIgnore('path/.git')
|
||||
|
||||
const ignore = this.FileTypeManager.shouldIgnore('path/.git')
|
||||
ignore.should.equal(true)
|
||||
})
|
||||
|
||||
it('should ignore .git directories and contained files', async function () {
|
||||
const ignore =
|
||||
await this.FileTypeManager.promises.shouldIgnore('path/.git/info')
|
||||
|
||||
const ignore = await this.FileTypeManager.shouldIgnore('path/.git/info')
|
||||
ignore.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not ignore .latexmkrc dotfile', async function () {
|
||||
const ignore =
|
||||
await this.FileTypeManager.promises.shouldIgnore('path/.latexmkrc')
|
||||
|
||||
const ignore = this.FileTypeManager.shouldIgnore('path/.latexmkrc')
|
||||
ignore.should.equal(false)
|
||||
})
|
||||
|
||||
it('should ignore __MACOSX', async function () {
|
||||
const ignore =
|
||||
await this.FileTypeManager.promises.shouldIgnore('path/__MACOSX')
|
||||
|
||||
const ignore = this.FileTypeManager.shouldIgnore('path/__MACOSX')
|
||||
ignore.should.equal(true)
|
||||
})
|
||||
|
||||
it('should ignore synctex files', async function () {
|
||||
const ignore =
|
||||
await this.FileTypeManager.promises.shouldIgnore('file.synctex')
|
||||
|
||||
const ignore = this.FileTypeManager.shouldIgnore('file.synctex')
|
||||
ignore.should.equal(true)
|
||||
})
|
||||
|
||||
it('should ignore synctex(busy) files', async function () {
|
||||
const ignore =
|
||||
await this.FileTypeManager.promises.shouldIgnore('file.synctex(busy)')
|
||||
|
||||
const ignore = this.FileTypeManager.shouldIgnore('file.synctex(busy)')
|
||||
ignore.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not ignore .tex files', async function () {
|
||||
const ignore =
|
||||
await this.FileTypeManager.promises.shouldIgnore('file.tex')
|
||||
|
||||
const ignore = this.FileTypeManager.shouldIgnore('file.tex')
|
||||
ignore.should.equal(false)
|
||||
})
|
||||
|
||||
it('should ignore the case of the extension', async function () {
|
||||
const ignore =
|
||||
await this.FileTypeManager.promises.shouldIgnore('file.AUX')
|
||||
|
||||
const ignore = this.FileTypeManager.shouldIgnore('file.AUX')
|
||||
ignore.should.equal(true)
|
||||
})
|
||||
|
||||
it('should not ignore files with an ignored extension as full name', async function () {
|
||||
const ignore = await this.FileTypeManager.promises.shouldIgnore('dvi')
|
||||
const ignore = this.FileTypeManager.shouldIgnore('dvi')
|
||||
ignore.should.equal(false)
|
||||
})
|
||||
|
||||
it('should not ignore directories with an ignored extension as full name', async function () {
|
||||
this.stats.isDirectory.returns(true)
|
||||
const ignore = await this.FileTypeManager.promises.shouldIgnore('dvi')
|
||||
|
||||
const ignore = this.FileTypeManager.shouldIgnore('dvi')
|
||||
ignore.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -26,6 +26,7 @@ export type AdminCapability =
|
||||
| 'view-split-test'
|
||||
| 'view-user-additional-info'
|
||||
| 'create-stripe-account'
|
||||
| 'update-stripe-customer-segment'
|
||||
|
||||
export type AdminRole =
|
||||
| 'engagement'
|
||||
|
||||
@@ -18,4 +18,5 @@ export type SubscriptionMetadata = Stripe.Metadata & {
|
||||
itm_campaign?: string
|
||||
itm_content?: string
|
||||
itm_referrer?: string
|
||||
checkoutSource?: 'hosted-checkout' | 'elements-checkout'
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user