mirror of
https://github.com/overleaf/overleaf.git
synced 2025-12-05 01:10:29 +00:00
[filestore] remove user files endpoints (#28125)
* [filestore] remove user files endpoints * [web] remove user files integration for filestore GitOrigin-RevId: 565fa68a659c07420ee6141d0f276b4e4d2972e0
This commit is contained in:
@@ -69,7 +69,6 @@ services:
|
||||
- filestore-public-files:/overleaf/services/filestore/public_files
|
||||
- filestore-template-files:/overleaf/services/filestore/template_files
|
||||
- filestore-uploads:/overleaf/services/filestore/uploads
|
||||
- filestore-user-files:/overleaf/services/filestore/user_files
|
||||
|
||||
history-v1:
|
||||
build:
|
||||
|
||||
@@ -443,7 +443,6 @@ switch (process.env.OVERLEAF_FILESTORE_BACKEND) {
|
||||
settings.filestore = {
|
||||
backend: 's3',
|
||||
stores: {
|
||||
user_files: process.env.OVERLEAF_FILESTORE_USER_FILES_BUCKET_NAME,
|
||||
template_files:
|
||||
process.env.OVERLEAF_FILESTORE_TEMPLATE_FILES_BUCKET_NAME,
|
||||
project_blobs: process.env.OVERLEAF_HISTORY_PROJECT_BLOBS_BUCKET,
|
||||
@@ -468,7 +467,6 @@ switch (process.env.OVERLEAF_FILESTORE_BACKEND) {
|
||||
settings.filestore = {
|
||||
backend: 'fs',
|
||||
stores: {
|
||||
user_files: Path.join(DATA_DIR, 'user_files'),
|
||||
template_files: Path.join(DATA_DIR, 'template_files'),
|
||||
|
||||
// NOTE: The below paths are hard-coded in server-ce/config/production.json, so hard code them here as well.
|
||||
|
||||
@@ -4,9 +4,6 @@ set -e
|
||||
mkdir -p /var/lib/overleaf/data
|
||||
chown www-data:www-data /var/lib/overleaf/data
|
||||
|
||||
mkdir -p /var/lib/overleaf/data/user_files
|
||||
chown www-data:www-data /var/lib/overleaf/data/user_files
|
||||
|
||||
mkdir -p /var/lib/overleaf/data/compiles
|
||||
chown www-data:www-data /var/lib/overleaf/data/compiles
|
||||
|
||||
|
||||
1
services/filestore/.gitignore
vendored
1
services/filestore/.gitignore
vendored
@@ -1,3 +1,2 @@
|
||||
uploads/*
|
||||
user_files/*
|
||||
template_files/*
|
||||
|
||||
@@ -24,8 +24,8 @@ RUN cd /overleaf && npm ci --quiet
|
||||
COPY services/filestore/ /overleaf/services/filestore/
|
||||
|
||||
FROM app
|
||||
RUN mkdir -p uploads user_files template_files \
|
||||
&& chown node:node uploads user_files template_files
|
||||
RUN mkdir -p uploads template_files \
|
||||
&& chown node:node uploads template_files
|
||||
USER node
|
||||
|
||||
CMD ["node", "--expose-gc", "app.js"]
|
||||
|
||||
@@ -27,7 +27,7 @@ clean:
|
||||
-docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
-$(DOCKER_COMPOSE_TEST_UNIT) down --remove-orphans --rmi local --timeout 0 --volumes
|
||||
-$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --remove-orphans --rmi local --timeout 0 --volumes
|
||||
-git clean -dfX uploads user_files template_files
|
||||
-git clean -dfX uploads template_files
|
||||
|
||||
HERE=$(shell pwd)
|
||||
MONOREPO=$(shell cd ../../ && pwd)
|
||||
|
||||
@@ -9,11 +9,9 @@ logger.initialize(process.env.METRICS_APP_NAME || 'filestore')
|
||||
|
||||
const settings = require('@overleaf/settings')
|
||||
const express = require('express')
|
||||
const bodyParser = require('body-parser')
|
||||
|
||||
const fileController = require('./app/js/FileController')
|
||||
const keyBuilder = require('./app/js/KeyBuilder')
|
||||
const healthCheckController = require('./app/js/HealthCheckController')
|
||||
|
||||
const RequestLogger = require('./app/js/RequestLogger')
|
||||
|
||||
@@ -50,46 +48,6 @@ app.use((req, res, next) => {
|
||||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
|
||||
if (settings.filestore.stores.user_files) {
|
||||
app.head(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.getFileHead
|
||||
)
|
||||
app.get(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.getFile
|
||||
)
|
||||
app.post(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.insertFile
|
||||
)
|
||||
app.put(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
bodyParser.json(),
|
||||
fileController.copyFile
|
||||
)
|
||||
app.delete(
|
||||
'/project/:project_id/file/:file_id',
|
||||
keyBuilder.userFileKeyMiddleware,
|
||||
fileController.deleteFile
|
||||
)
|
||||
app.delete(
|
||||
'/project/:project_id',
|
||||
keyBuilder.userProjectKeyMiddleware,
|
||||
fileController.deleteProject
|
||||
)
|
||||
|
||||
app.get(
|
||||
'/project/:project_id/size',
|
||||
keyBuilder.userProjectKeyMiddleware,
|
||||
fileController.directorySize
|
||||
)
|
||||
}
|
||||
|
||||
if (settings.filestore.stores.template_files) {
|
||||
app.head(
|
||||
'/template/:template_id/v/:version/:format',
|
||||
@@ -138,7 +96,9 @@ app.get('/status', function (req, res) {
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/health_check', healthCheckController.check)
|
||||
app.get('/health_check', (req, res) => {
|
||||
res.sendStatus(200)
|
||||
})
|
||||
|
||||
app.use(RequestLogger.errorHandler)
|
||||
|
||||
|
||||
@@ -10,10 +10,6 @@ module.exports = {
|
||||
getFile,
|
||||
getFileHead,
|
||||
insertFile,
|
||||
copyFile,
|
||||
deleteFile,
|
||||
deleteProject,
|
||||
directorySize,
|
||||
}
|
||||
|
||||
function getFile(req, res, next) {
|
||||
@@ -125,85 +121,6 @@ function insertFile(req, res, next) {
|
||||
})
|
||||
}
|
||||
|
||||
function copyFile(req, res, next) {
|
||||
metrics.inc('copyFile')
|
||||
const { key, bucket } = req
|
||||
const oldProjectId = req.body.source.project_id
|
||||
const oldFileId = req.body.source.file_id
|
||||
|
||||
req.requestLogger.addFields({
|
||||
key,
|
||||
bucket,
|
||||
oldProject_id: oldProjectId,
|
||||
oldFile_id: oldFileId,
|
||||
})
|
||||
req.requestLogger.setMessage('copying file')
|
||||
|
||||
FileHandler.copyObject(bucket, `${oldProjectId}/${oldFileId}`, key, err => {
|
||||
if (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
res.sendStatus(404)
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
} else {
|
||||
res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function deleteFile(req, res, next) {
|
||||
metrics.inc('deleteFile')
|
||||
const { key, bucket } = req
|
||||
|
||||
req.requestLogger.addFields({ key, bucket })
|
||||
req.requestLogger.setMessage('deleting file')
|
||||
|
||||
FileHandler.deleteFile(bucket, key, function (err) {
|
||||
if (err) {
|
||||
next(err)
|
||||
} else {
|
||||
res.sendStatus(204)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function deleteProject(req, res, next) {
|
||||
metrics.inc('deleteProject')
|
||||
const { key, bucket } = req
|
||||
|
||||
req.requestLogger.setMessage('deleting project')
|
||||
req.requestLogger.addFields({ key, bucket })
|
||||
|
||||
FileHandler.deleteProject(bucket, key, function (err) {
|
||||
if (err) {
|
||||
if (err instanceof Errors.InvalidParametersError) {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
next(err)
|
||||
} else {
|
||||
res.sendStatus(204)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function directorySize(req, res, next) {
|
||||
metrics.inc('projectSize')
|
||||
const { project_id: projectId, bucket } = req
|
||||
|
||||
req.requestLogger.setMessage('getting project size')
|
||||
req.requestLogger.addFields({ projectId, bucket })
|
||||
|
||||
FileHandler.getDirectorySize(bucket, projectId, function (err, size) {
|
||||
if (err) {
|
||||
return next(err)
|
||||
}
|
||||
|
||||
res.json({ 'total bytes': size })
|
||||
req.requestLogger.addFields({ size })
|
||||
})
|
||||
}
|
||||
|
||||
function _getRange(header) {
|
||||
const parsed = parseRange(maxSizeInBytes, header)
|
||||
if (parsed === -1 || parsed === -2 || parsed.type !== 'bytes') {
|
||||
|
||||
@@ -10,23 +10,15 @@ const { ConversionError, InvalidParametersError } = require('./Errors')
|
||||
const metrics = require('@overleaf/metrics')
|
||||
|
||||
module.exports = {
|
||||
copyObject: callbackify(copyObject),
|
||||
insertFile: callbackify(insertFile),
|
||||
deleteFile: callbackify(deleteFile),
|
||||
deleteProject: callbackify(deleteProject),
|
||||
getFile: callbackify(getFile),
|
||||
getRedirectUrl: callbackify(getRedirectUrl),
|
||||
getFileSize: callbackify(getFileSize),
|
||||
getDirectorySize: callbackify(getDirectorySize),
|
||||
promises: {
|
||||
copyObject,
|
||||
getFile,
|
||||
getRedirectUrl,
|
||||
insertFile,
|
||||
deleteFile,
|
||||
deleteProject,
|
||||
getFileSize,
|
||||
getDirectorySize,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -36,13 +28,11 @@ if (process.env.NODE_ENV === 'test') {
|
||||
}
|
||||
}
|
||||
|
||||
async function copyObject(bucket, sourceKey, destinationKey) {
|
||||
await PersistorManager.copyObject(bucket, sourceKey, destinationKey)
|
||||
}
|
||||
|
||||
async function insertFile(bucket, key, stream) {
|
||||
const convertedKey = KeyBuilder.getConvertedFolderKey(key)
|
||||
if (!convertedKey.match(/^[0-9a-f]{24}\/([0-9a-f]{24}|v\/[0-9]+\/[a-z]+)/i)) {
|
||||
if (
|
||||
!convertedKey.match(/^[0-9a-f]{24}\/([0-9a-f]{24}|v\/[0-9]+\/[a-z0-9]+)/i)
|
||||
) {
|
||||
throw new InvalidParametersError('key does not match validation regex', {
|
||||
bucket,
|
||||
key,
|
||||
@@ -52,35 +42,6 @@ async function insertFile(bucket, key, stream) {
|
||||
await PersistorManager.sendStream(bucket, key, stream)
|
||||
}
|
||||
|
||||
async function deleteFile(bucket, key) {
|
||||
const convertedKey = KeyBuilder.getConvertedFolderKey(key)
|
||||
if (!convertedKey.match(/^[0-9a-f]{24}\/([0-9a-f]{24}|v\/[0-9]+\/[a-z]+)/i)) {
|
||||
throw new InvalidParametersError('key does not match validation regex', {
|
||||
bucket,
|
||||
key,
|
||||
convertedKey,
|
||||
})
|
||||
}
|
||||
const jobs = [PersistorManager.deleteObject(bucket, key)]
|
||||
if (
|
||||
Settings.enableConversions &&
|
||||
bucket === Settings.filestore.stores.template_files
|
||||
) {
|
||||
jobs.push(PersistorManager.deleteDirectory(bucket, convertedKey))
|
||||
}
|
||||
await Promise.all(jobs)
|
||||
}
|
||||
|
||||
async function deleteProject(bucket, key) {
|
||||
if (!key.match(/^[0-9a-f]{24}\//i)) {
|
||||
throw new InvalidParametersError('key does not match validation regex', {
|
||||
bucket,
|
||||
key,
|
||||
})
|
||||
}
|
||||
await PersistorManager.deleteDirectory(bucket, key)
|
||||
}
|
||||
|
||||
async function getFile(bucket, key, opts) {
|
||||
opts = opts || {}
|
||||
if (!opts.format && !opts.style) {
|
||||
@@ -129,10 +90,6 @@ async function getFileSize(bucket, key) {
|
||||
return await PersistorManager.getObjectSize(bucket, key)
|
||||
}
|
||||
|
||||
async function getDirectorySize(bucket, projectId) {
|
||||
return await PersistorManager.directorySize(bucket, projectId)
|
||||
}
|
||||
|
||||
async function _getConvertedFile(bucket, key, opts) {
|
||||
const convertedKey = KeyBuilder.addCachingToKey(key, opts)
|
||||
const exists = await PersistorManager.checkIfObjectExists(
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { WritableBuffer } = require('@overleaf/stream-utils')
|
||||
const { promisify } = require('node:util')
|
||||
const Stream = require('node:stream')
|
||||
|
||||
const pipeline = promisify(Stream.pipeline)
|
||||
const fsCopy = promisify(fs.copyFile)
|
||||
const fsUnlink = promisify(fs.unlink)
|
||||
|
||||
const { HealthCheckError } = require('./Errors')
|
||||
const FileConverter = require('./FileConverter').promises
|
||||
const FileHandler = require('./FileHandler').promises
|
||||
|
||||
async function checkCanGetFiles() {
|
||||
if (!Settings.health_check) {
|
||||
return
|
||||
}
|
||||
|
||||
const projectId = Settings.health_check.project_id
|
||||
const fileId = Settings.health_check.file_id
|
||||
const key = `${projectId}/${fileId}`
|
||||
const bucket = Settings.filestore.stores.user_files
|
||||
|
||||
const buffer = new WritableBuffer({ initialSize: 100 })
|
||||
|
||||
const sourceStream = await FileHandler.getFile(bucket, key, {})
|
||||
try {
|
||||
await pipeline(sourceStream, buffer)
|
||||
} catch (err) {
|
||||
throw new HealthCheckError('failed to get health-check file', {}, err)
|
||||
}
|
||||
|
||||
if (!buffer.size()) {
|
||||
throw new HealthCheckError('no bytes written to download stream')
|
||||
}
|
||||
}
|
||||
|
||||
async function checkFileConvert() {
|
||||
if (!Settings.enableConversions) {
|
||||
return
|
||||
}
|
||||
|
||||
const imgPath = path.join(Settings.path.uploadFolder, '/tiny.pdf')
|
||||
|
||||
let resultPath
|
||||
try {
|
||||
await fsCopy('./tiny.pdf', imgPath)
|
||||
resultPath = await FileConverter.thumbnail(imgPath)
|
||||
} finally {
|
||||
if (resultPath) {
|
||||
await fsUnlink(resultPath)
|
||||
}
|
||||
await fsUnlink(imgPath)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check(req, res, next) {
|
||||
Promise.all([checkCanGetFiles(), checkFileConvert()])
|
||||
.then(() => res.sendStatus(200))
|
||||
.catch(err => {
|
||||
next(err)
|
||||
})
|
||||
},
|
||||
}
|
||||
@@ -4,8 +4,6 @@ const projectKey = require('./project_key')
|
||||
module.exports = {
|
||||
getConvertedFolderKey,
|
||||
addCachingToKey,
|
||||
userFileKeyMiddleware,
|
||||
userProjectKeyMiddleware,
|
||||
bucketFileKeyMiddleware,
|
||||
globalBlobFileKeyMiddleware,
|
||||
projectBlobFileKeyMiddleware,
|
||||
@@ -32,21 +30,6 @@ function addCachingToKey(key, opts) {
|
||||
return key
|
||||
}
|
||||
|
||||
function userFileKeyMiddleware(req, res, next) {
|
||||
const { project_id: projectId, file_id: fileId } = req.params
|
||||
req.key = `${projectId}/${fileId}`
|
||||
req.bucket = settings.filestore.stores.user_files
|
||||
next()
|
||||
}
|
||||
|
||||
function userProjectKeyMiddleware(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
req.project_id = projectId
|
||||
req.key = `${projectId}/`
|
||||
req.bucket = settings.filestore.stores.user_files
|
||||
next()
|
||||
}
|
||||
|
||||
function bucketFileKeyMiddleware(req, res, next) {
|
||||
req.bucket = req.params.bucket
|
||||
req.key = req.params[0]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
filestore
|
||||
--data-dirs=uploads,user_files,template_files
|
||||
--data-dirs=uploads,template_files
|
||||
--dependencies=s3,gcs
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_STORAGE_CLASS=REDUCED_REDUNDANCY,AWS_S3_USER_FILES_BUCKET_NAME=fake-user-files,AWS_S3_USER_FILES_DEK_BUCKET_NAME=fake-user-files-dek,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake-template-files,GCS_USER_FILES_BUCKET_NAME=fake-gcs-user-files,GCS_TEMPLATE_FILES_BUCKET_NAME=fake-gcs-template-files
|
||||
|
||||
@@ -13,13 +13,10 @@ if (process.env.AWS_SECRET && !process.env.AWS_SECRET_ACCESS_KEY) {
|
||||
if (process.env.BACKEND == null) {
|
||||
if (process.env.AWS_ACCESS_KEY_ID || process.env.S3_BUCKET_CREDENTIALS) {
|
||||
process.env.BACKEND = 's3'
|
||||
process.env.USER_FILES_BUCKET_NAME =
|
||||
process.env.AWS_S3_USER_FILES_BUCKET_NAME
|
||||
process.env.TEMPLATE_FILES_BUCKET_NAME =
|
||||
process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME
|
||||
} else {
|
||||
process.env.BACKEND = 'fs'
|
||||
process.env.USER_FILES_BUCKET_NAME = Path.join(__dirname, '../user_files')
|
||||
process.env.TEMPLATE_FILES_BUCKET_NAME = Path.join(
|
||||
__dirname,
|
||||
'../template_files'
|
||||
@@ -71,7 +68,6 @@ const settings = {
|
||||
// which will be picked up automatically.
|
||||
|
||||
stores: {
|
||||
user_files: process.env.USER_FILES_BUCKET_NAME,
|
||||
template_files: process.env.TEMPLATE_FILES_BUCKET_NAME,
|
||||
|
||||
// allow signed links to be generated for these buckets
|
||||
@@ -107,14 +103,4 @@ const settings = {
|
||||
parseInt(process.env.GRACEFUL_SHUTDOWN_DELAY_SECONDS ?? '30', 10) * 1000,
|
||||
}
|
||||
|
||||
// Filestore health check
|
||||
// ----------------------
|
||||
// Project and file details to check in persistor when calling /health_check
|
||||
if (process.env.HEALTH_CHECK_PROJECT_ID && process.env.HEALTH_CHECK_FILE_ID) {
|
||||
settings.health_check = {
|
||||
project_id: process.env.HEALTH_CHECK_PROJECT_ID,
|
||||
file_id: process.env.HEALTH_CHECK_FILE_ID,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = settings
|
||||
|
||||
@@ -103,16 +103,18 @@ describe('Filestore', function () {
|
||||
previousEgress,
|
||||
previousIngress,
|
||||
metricPrefix,
|
||||
projectId,
|
||||
otherProjectId
|
||||
templateId,
|
||||
otherProjectId,
|
||||
templateUrl,
|
||||
fileId,
|
||||
fileKey,
|
||||
fileUrl
|
||||
|
||||
const dataEncryptionKeySize =
|
||||
backendSettings.backend === 's3SSEC' ? 32 : 0
|
||||
|
||||
const BUCKET_NAMES = [
|
||||
process.env.GCS_USER_FILES_BUCKET_NAME,
|
||||
process.env.GCS_TEMPLATE_FILES_BUCKET_NAME,
|
||||
`${process.env.GCS_USER_FILES_BUCKET_NAME}-deleted`,
|
||||
`${process.env.GCS_TEMPLATE_FILES_BUCKET_NAME}-deleted`,
|
||||
]
|
||||
|
||||
@@ -156,8 +158,12 @@ describe('Filestore', function () {
|
||||
`${metricPrefix}_egress`
|
||||
)
|
||||
}
|
||||
projectId = new ObjectId().toString()
|
||||
templateId = new ObjectId().toString()
|
||||
otherProjectId = new ObjectId().toString()
|
||||
templateUrl = `${filestoreUrl}/template/${templateId}/v/0`
|
||||
fileId = new ObjectId().toString()
|
||||
fileUrl = `${templateUrl}/${fileId}`
|
||||
fileKey = `${templateId}/v/0/${fileId}`
|
||||
})
|
||||
|
||||
it('should send a 200 for the status endpoint', async function () {
|
||||
@@ -169,14 +175,12 @@ describe('Filestore', function () {
|
||||
})
|
||||
|
||||
describe('with a file on the server', function () {
|
||||
let fileId, fileUrl, constantFileContent
|
||||
let constantFileContent
|
||||
|
||||
const localFileReadPath =
|
||||
'/tmp/filestore_acceptance_tests_file_read.txt'
|
||||
|
||||
beforeEach('upload file', async function () {
|
||||
fileId = new ObjectId().toString()
|
||||
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||
constantFileContent = [
|
||||
'hello world',
|
||||
`line 2 goes here ${Math.random()}`,
|
||||
@@ -224,10 +228,6 @@ describe('Filestore', function () {
|
||||
})
|
||||
|
||||
it('should send a 200 for the health-check endpoint using the file', async function () {
|
||||
Settings.health_check = {
|
||||
project_id: projectId,
|
||||
file_id: fileId,
|
||||
}
|
||||
const response = await fetch(`${filestoreUrl}/health_check`)
|
||||
expect(response.status).to.equal(200)
|
||||
const body = await response.text()
|
||||
@@ -254,8 +254,10 @@ describe('Filestore', function () {
|
||||
})
|
||||
|
||||
it('should be able to delete the file', async function () {
|
||||
const response = await fetch(fileUrl, { method: 'DELETE' })
|
||||
expect(response.status).to.equal(204)
|
||||
await app.persistor.deleteObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
const response2 = await fetch(fileUrl)
|
||||
expect(response2.status).to.equal(404)
|
||||
})
|
||||
@@ -263,23 +265,18 @@ describe('Filestore', function () {
|
||||
it('should be able to copy files', async function () {
|
||||
const newProjectID = new ObjectId().toString()
|
||||
const newFileId = new ObjectId().toString()
|
||||
const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}`
|
||||
let response = await fetch(newFileUrl, {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({
|
||||
source: {
|
||||
project_id: projectId,
|
||||
file_id: fileId,
|
||||
},
|
||||
}),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
expect(response.status).to.equal(200)
|
||||
response = await fetch(fileUrl, { method: 'DELETE' })
|
||||
expect(response.status).to.equal(204)
|
||||
response = await fetch(newFileUrl)
|
||||
const newFileUrl = `${filestoreUrl}/template/${newProjectID}/v/0/${newFileId}`
|
||||
const newFileKey = `${newProjectID}/v/0/${newFileId}`
|
||||
await app.persistor.copyObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
newFileKey
|
||||
)
|
||||
await app.persistor.deleteObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
const response = await fetch(newFileUrl)
|
||||
const body = await response.text()
|
||||
expect(body).to.equal(constantFileContent)
|
||||
})
|
||||
@@ -299,8 +296,8 @@ describe('Filestore', function () {
|
||||
it('should refuse to handle IfNoneMatch', async function () {
|
||||
await expect(
|
||||
app.persistor.sendStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
`${projectId}/${fileId}`,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
fs.createReadStream(localFileReadPath),
|
||||
{ ifNoneMatch: '*' }
|
||||
)
|
||||
@@ -310,8 +307,8 @@ describe('Filestore', function () {
|
||||
it('should reject sendStream on the same key with IfNoneMatch', async function () {
|
||||
await expect(
|
||||
app.persistor.sendStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
`${projectId}/${fileId}`,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
fs.createReadStream(localFileReadPath),
|
||||
{ ifNoneMatch: '*' }
|
||||
)
|
||||
@@ -319,8 +316,8 @@ describe('Filestore', function () {
|
||||
})
|
||||
it('should allow sendStream on a different key with IfNoneMatch', async function () {
|
||||
await app.persistor.sendStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
`${projectId}/${fileId}-other`,
|
||||
Settings.filestore.stores.template_files,
|
||||
`${templateId}/v/0/${fileId}-other`,
|
||||
fs.createReadStream(localFileReadPath),
|
||||
{ ifNoneMatch: '*' }
|
||||
)
|
||||
@@ -368,7 +365,7 @@ describe('Filestore', function () {
|
||||
})
|
||||
|
||||
describe('with multiple files', function () {
|
||||
let fileIds, fileUrls, otherFileUrls, projectUrl, otherProjectUrl
|
||||
let fileIds, fileUrls, otherFileUrls, otherProjectUrl
|
||||
const localFileReadPaths = [
|
||||
'/tmp/filestore_acceptance_tests_file_read_1.txt',
|
||||
'/tmp/filestore_acceptance_tests_file_read_2.txt',
|
||||
@@ -401,18 +398,17 @@ describe('Filestore', function () {
|
||||
})
|
||||
|
||||
beforeEach('upload two files', async function () {
|
||||
projectUrl = `${filestoreUrl}/project/${projectId}`
|
||||
otherProjectUrl = `${filestoreUrl}/project/${otherProjectId}`
|
||||
otherProjectUrl = `${filestoreUrl}/template/${otherProjectId}/v/0`
|
||||
fileIds = [
|
||||
new ObjectId().toString(),
|
||||
new ObjectId().toString(),
|
||||
new ObjectId().toString(),
|
||||
]
|
||||
fileUrls = [
|
||||
`${projectUrl}/file/${fileIds[0]}`,
|
||||
`${projectUrl}/file/${fileIds[1]}`,
|
||||
`${templateUrl}/${fileIds[0]}`,
|
||||
`${templateUrl}/${fileIds[1]}`,
|
||||
]
|
||||
otherFileUrls = [`${otherProjectUrl}/file/${fileIds[2]}`]
|
||||
otherFileUrls = [`${otherProjectUrl}/${fileIds[2]}`]
|
||||
|
||||
await Promise.all([
|
||||
fetch(fileUrls[0], {
|
||||
@@ -431,11 +427,12 @@ describe('Filestore', function () {
|
||||
})
|
||||
|
||||
it('should get the directory size', async function () {
|
||||
const response = await fetch(
|
||||
`${filestoreUrl}/project/${projectId}/size`
|
||||
)
|
||||
const body = await response.text()
|
||||
expect(parseInt(JSON.parse(body)['total bytes'])).to.equal(
|
||||
expect(
|
||||
await app.persistor.directorySize(
|
||||
Settings.filestore.stores.template_files,
|
||||
templateId
|
||||
)
|
||||
).to.equal(
|
||||
constantFileContents[0].length + constantFileContents[1].length
|
||||
)
|
||||
})
|
||||
@@ -448,12 +445,14 @@ describe('Filestore', function () {
|
||||
}
|
||||
})
|
||||
|
||||
it('should be able to delete the project', async function () {
|
||||
let response = await fetch(projectUrl, { method: 'DELETE' })
|
||||
expect(response.status).to.equal(204)
|
||||
it('should be able to delete a folder', async function () {
|
||||
await app.persistor.deleteDirectory(
|
||||
Settings.filestore.stores.template_files,
|
||||
templateId + '/'
|
||||
)
|
||||
|
||||
for (const index in fileUrls) {
|
||||
response = await fetch(fileUrls[index])
|
||||
const response = await fetch(fileUrls[index])
|
||||
expect(response.status).to.equal(404)
|
||||
}
|
||||
})
|
||||
@@ -464,23 +463,13 @@ describe('Filestore', function () {
|
||||
expect(response.status).to.equal(200)
|
||||
}
|
||||
})
|
||||
|
||||
it('should not delete a partial project id', async function () {
|
||||
const response = await fetch(`${filestoreUrl}/project/5`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
expect(response.status).to.equal(400)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a large file', function () {
|
||||
this.timeout(1000 * 20)
|
||||
let fileId, fileUrl, largeFileContent, error
|
||||
let largeFileContent
|
||||
|
||||
beforeEach('upload large file', async function () {
|
||||
fileId = new ObjectId().toString()
|
||||
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||
|
||||
largeFileContent = '_wombat_'.repeat(1024 * 1024) // 8 megabytes
|
||||
largeFileContent += Math.random()
|
||||
|
||||
@@ -495,10 +484,6 @@ describe('Filestore', function () {
|
||||
expect(body).to.equal(largeFileContent)
|
||||
})
|
||||
|
||||
it('should not throw an error', function () {
|
||||
expect(error).not.to.exist
|
||||
})
|
||||
|
||||
it('should not leak a socket', async function () {
|
||||
const response = await fetch(fileUrl)
|
||||
await response.text()
|
||||
@@ -557,21 +542,23 @@ describe('Filestore', function () {
|
||||
|
||||
if (backendSettings.backend === 'gcs') {
|
||||
describe('when deleting a file in GCS', function () {
|
||||
let fileId, fileUrl, content, error, dateBefore, dateAfter
|
||||
let content, error, dateBefore, dateAfter
|
||||
|
||||
beforeEach('upload and delete file', async function () {
|
||||
fileId = new ObjectId()
|
||||
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||
|
||||
content = '_wombat_' + Math.random()
|
||||
|
||||
const readStream = streamifier.createReadStream(content)
|
||||
let res = await fetch(fileUrl, { method: 'POST', body: readStream })
|
||||
const res = await fetch(fileUrl, {
|
||||
method: 'POST',
|
||||
body: readStream,
|
||||
})
|
||||
if (!res.ok) throw new Error(res.statusText)
|
||||
dateBefore = new Date()
|
||||
res = await fetch(fileUrl, { method: 'DELETE' })
|
||||
await app.persistor.deleteObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
dateAfter = new Date()
|
||||
if (!res.ok) throw new Error(res.statusText)
|
||||
})
|
||||
|
||||
it('should not throw an error', function () {
|
||||
@@ -582,12 +569,12 @@ describe('Filestore', function () {
|
||||
let date = dateBefore
|
||||
const keys = []
|
||||
while (date <= dateAfter) {
|
||||
keys.push(`${projectId}/${fileId}-${date.toISOString()}`)
|
||||
keys.push(`${templateId}/v/0/${fileId}-${date.toISOString()}`)
|
||||
date = new Date(date.getTime() + 1)
|
||||
}
|
||||
await TestHelper.expectPersistorToHaveSomeFile(
|
||||
app.persistor,
|
||||
`${Settings.filestore.stores.user_files}-deleted`,
|
||||
`${Settings.filestore.stores.template_files}-deleted`,
|
||||
keys,
|
||||
content
|
||||
)
|
||||
@@ -596,8 +583,8 @@ describe('Filestore', function () {
|
||||
it('should remove the file from the original bucket', async function () {
|
||||
await TestHelper.expectPersistorNotToHaveFile(
|
||||
app.persistor,
|
||||
Settings.filestore.stores.user_files,
|
||||
`${projectId}/${fileId}`
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -605,20 +592,11 @@ describe('Filestore', function () {
|
||||
|
||||
if (backendSettings.fallback) {
|
||||
describe('with a fallback', function () {
|
||||
let constantFileContent,
|
||||
fileId,
|
||||
fileKey,
|
||||
fileUrl,
|
||||
bucket,
|
||||
fallbackBucket
|
||||
let constantFileContent, bucket, fallbackBucket
|
||||
|
||||
beforeEach('prepare fallback', function () {
|
||||
constantFileContent = `This is yet more file content ${Math.random()}`
|
||||
fileId = new ObjectId().toString()
|
||||
fileKey = `${projectId}/${fileId}`
|
||||
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||
|
||||
bucket = Settings.filestore.stores.user_files
|
||||
bucket = Settings.filestore.stores.template_files
|
||||
fallbackBucket = Settings.filestore.fallback.buckets[bucket]
|
||||
})
|
||||
|
||||
@@ -701,34 +679,23 @@ describe('Filestore', function () {
|
||||
})
|
||||
|
||||
describe('when copying a file', function () {
|
||||
let newFileId, newFileUrl, newFileKey, opts
|
||||
let newFileKey
|
||||
|
||||
beforeEach('prepare to copy file', function () {
|
||||
const newProjectID = new ObjectId().toString()
|
||||
newFileId = new ObjectId().toString()
|
||||
newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}`
|
||||
newFileKey = `${newProjectID}/${newFileId}`
|
||||
|
||||
opts = {
|
||||
method: 'put',
|
||||
body: JSON.stringify({
|
||||
source: {
|
||||
project_id: projectId,
|
||||
file_id: fileId,
|
||||
},
|
||||
}),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
const newFileId = new ObjectId().toString()
|
||||
newFileKey = `${newProjectID}/v/0/${newFileId}`
|
||||
})
|
||||
|
||||
describe('when copyOnMiss is false', function () {
|
||||
beforeEach('copy with copyOnMiss=false', async function () {
|
||||
app.persistor.settings.copyOnMiss = false
|
||||
|
||||
const response = await fetch(newFileUrl, opts)
|
||||
expect(response.status).to.equal(200)
|
||||
await app.persistor.copyObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
newFileKey
|
||||
)
|
||||
})
|
||||
|
||||
it('should leave the old file in the old bucket', async function () {
|
||||
@@ -773,8 +740,11 @@ describe('Filestore', function () {
|
||||
beforeEach('copy with copyOnMiss=false', async function () {
|
||||
app.persistor.settings.copyOnMiss = true
|
||||
|
||||
const response = await fetch(newFileUrl, opts)
|
||||
expect(response.status).to.equal(200)
|
||||
await app.persistor.copyObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
newFileKey
|
||||
)
|
||||
})
|
||||
|
||||
it('should leave the old file in the old bucket', async function () {
|
||||
@@ -842,7 +812,7 @@ describe('Filestore', function () {
|
||||
await TestHelper.expectPersistorNotToHaveFile(
|
||||
app.persistor.fallbackPersistor,
|
||||
fallbackBucket,
|
||||
`${projectId}/${fileId}`
|
||||
fileKey
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -859,8 +829,10 @@ describe('Filestore', function () {
|
||||
})
|
||||
|
||||
it('should delete the file', async function () {
|
||||
const response1 = await fetch(fileUrl, { method: 'DELETE' })
|
||||
expect(response1.status).to.equal(204)
|
||||
await app.persistor.deleteObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
const response2 = await fetch(fileUrl)
|
||||
expect(response2.status).to.equal(404)
|
||||
})
|
||||
@@ -877,8 +849,10 @@ describe('Filestore', function () {
|
||||
})
|
||||
|
||||
it('should delete the file', async function () {
|
||||
const response1 = await fetch(fileUrl, { method: 'DELETE' })
|
||||
expect(response1.status).to.equal(204)
|
||||
await app.persistor.deleteObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
const response2 = await fetch(fileUrl)
|
||||
expect(response2.status).to.equal(404)
|
||||
})
|
||||
@@ -904,19 +878,23 @@ describe('Filestore', function () {
|
||||
)
|
||||
|
||||
it('should delete the files', async function () {
|
||||
const response1 = await fetch(fileUrl, { method: 'DELETE' })
|
||||
expect(response1.status).to.equal(204)
|
||||
await app.persistor.deleteObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
const response2 = await fetch(fileUrl)
|
||||
expect(response2.status).to.equal(404)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the file does not exist', function () {
|
||||
it('should return return 204', async function () {
|
||||
it('should return success', async function () {
|
||||
// S3 doesn't give us a 404 when the object doesn't exist, so to stay
|
||||
// consistent we merrily return 204 ourselves here as well
|
||||
const response = await fetch(fileUrl, { method: 'DELETE' })
|
||||
expect(response.status).to.equal(204)
|
||||
// consistent we merrily return success ourselves here as well
|
||||
await app.persistor.deleteObject(
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -924,15 +902,13 @@ describe('Filestore', function () {
|
||||
}
|
||||
|
||||
describe('with a pdf file', function () {
|
||||
let fileId, fileUrl, localFileSize
|
||||
let localFileSize
|
||||
const localFileReadPath = Path.resolve(
|
||||
__dirname,
|
||||
'../../fixtures/test.pdf'
|
||||
)
|
||||
|
||||
beforeEach('upload test.pdf', async function () {
|
||||
fileId = new ObjectId().toString()
|
||||
fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||
const stat = await fsStat(localFileReadPath)
|
||||
localFileSize = stat.size
|
||||
const readStream = fs.createReadStream(localFileReadPath)
|
||||
@@ -1032,18 +1008,18 @@ describe('Filestore', function () {
|
||||
beforeEach('prepare ids', function () {
|
||||
fileId1 = new ObjectId().toString()
|
||||
fileId2 = new ObjectId().toString()
|
||||
fileKey1 = `${projectId}/${fileId1}`
|
||||
fileKey2 = `${projectId}/${fileId2}`
|
||||
fileKeyOtherProject = `${new ObjectId().toString()}/${new ObjectId().toString()}`
|
||||
fileUrl1 = `${filestoreUrl}/project/${projectId}/file/${fileId1}`
|
||||
fileUrl2 = `${filestoreUrl}/project/${projectId}/file/${fileId2}`
|
||||
fileKey1 = `${templateId}/v/0/${fileId1}`
|
||||
fileKey2 = `${templateId}/v/0/${fileId2}`
|
||||
fileKeyOtherProject = `${new ObjectId().toString()}/v/0/${new ObjectId().toString()}`
|
||||
fileUrl1 = `${templateUrl}/${fileId1}`
|
||||
fileUrl2 = `${templateUrl}/${fileId2}`
|
||||
})
|
||||
|
||||
beforeEach('ensure DEK is missing', async function () {
|
||||
// Cannot use test helper expectPersistorNotToHaveFile here, we need to use the KEK.
|
||||
await expect(
|
||||
app.persistor.getDataEncryptionKeySize(
|
||||
backendSettings.stores.user_files,
|
||||
backendSettings.stores.template_files,
|
||||
fileKey1
|
||||
)
|
||||
).to.rejectedWith(NotFoundError)
|
||||
@@ -1065,12 +1041,12 @@ describe('Filestore', function () {
|
||||
|
||||
it('should create a DEK when asked explicitly', async function () {
|
||||
await app.persistor.generateDataEncryptionKey(
|
||||
backendSettings.stores.user_files,
|
||||
backendSettings.stores.template_files,
|
||||
fileKey1
|
||||
)
|
||||
expect(
|
||||
await app.persistor.getDataEncryptionKeySize(
|
||||
backendSettings.stores.user_files,
|
||||
backendSettings.stores.template_files,
|
||||
fileKey1
|
||||
)
|
||||
).to.equal(32)
|
||||
@@ -1080,7 +1056,7 @@ describe('Filestore', function () {
|
||||
await createRandomContent(fileUrl1)
|
||||
expect(
|
||||
await app.persistor.getDataEncryptionKeySize(
|
||||
backendSettings.stores.user_files,
|
||||
backendSettings.stores.template_files,
|
||||
fileKey1
|
||||
)
|
||||
).to.equal(32)
|
||||
@@ -1095,7 +1071,7 @@ describe('Filestore', function () {
|
||||
// Cannot use test helper expectPersistorNotToHaveFile here, we need to use the KEK.
|
||||
await expect(
|
||||
app.persistor.getDataEncryptionKeySize(
|
||||
backendSettings.stores.user_files,
|
||||
backendSettings.stores.template_files,
|
||||
fileKey1
|
||||
)
|
||||
).to.rejectedWith(NotFoundError)
|
||||
@@ -1106,7 +1082,7 @@ describe('Filestore', function () {
|
||||
|
||||
await expect(
|
||||
app.persistor.generateDataEncryptionKey(
|
||||
backendSettings.stores.user_files,
|
||||
backendSettings.stores.template_files,
|
||||
fileKey1
|
||||
)
|
||||
).to.rejectedWith(AlreadyWrittenError)
|
||||
@@ -1167,7 +1143,7 @@ describe('Filestore', function () {
|
||||
) {
|
||||
const content = Math.random().toString()
|
||||
await writer.sendStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
Stream.Readable.from([content])
|
||||
)
|
||||
@@ -1175,7 +1151,7 @@ describe('Filestore', function () {
|
||||
for (const persistor of readersSuccess) {
|
||||
await TestHelper.expectPersistorToHaveFile(
|
||||
persistor,
|
||||
backendSettings.stores.user_files,
|
||||
backendSettings.stores.template_files,
|
||||
fileKey,
|
||||
content
|
||||
)
|
||||
@@ -1185,7 +1161,7 @@ describe('Filestore', function () {
|
||||
await expect(
|
||||
TestHelper.expectPersistorToHaveFile(
|
||||
persistor,
|
||||
backendSettings.stores.user_files,
|
||||
backendSettings.stores.template_files,
|
||||
fileKey,
|
||||
content
|
||||
)
|
||||
@@ -1277,7 +1253,7 @@ describe('Filestore', function () {
|
||||
const { Contents: dekEntries } = await s3Client
|
||||
.listObjectsV2({
|
||||
Bucket: process.env.AWS_S3_USER_FILES_DEK_BUCKET_NAME,
|
||||
Prefix: `${projectId}/`,
|
||||
Prefix: `${templateId}/`,
|
||||
})
|
||||
.promise()
|
||||
expect(dekEntries).to.have.length(dekBucketKeys.length)
|
||||
@@ -1286,8 +1262,8 @@ describe('Filestore', function () {
|
||||
|
||||
const { Contents: userFilesEntries } = await s3Client
|
||||
.listObjectsV2({
|
||||
Bucket: backendSettings.stores.user_files,
|
||||
Prefix: `${projectId}/`,
|
||||
Bucket: backendSettings.stores.template_files,
|
||||
Prefix: `${templateId}/`,
|
||||
})
|
||||
.promise()
|
||||
expect(userFilesEntries).to.have.length(userFilesBucketKeys.length)
|
||||
@@ -1299,7 +1275,7 @@ describe('Filestore', function () {
|
||||
|
||||
it('should use a custom bucket for DEKs', async function () {
|
||||
await checkDEKStorage({
|
||||
dekBucketKeys: [`${projectId}/dek`],
|
||||
dekBucketKeys: [`${templateId}/dek`],
|
||||
userFilesBucketKeys: [fileKey1],
|
||||
})
|
||||
})
|
||||
@@ -1313,26 +1289,26 @@ describe('Filestore', function () {
|
||||
it('should refuse to delete top-level prefix', async function () {
|
||||
await expect(
|
||||
app.persistor.deleteDirectory(
|
||||
Settings.filestore.stores.user_files,
|
||||
projectId.slice(0, 3)
|
||||
Settings.filestore.stores.template_files,
|
||||
templateId.slice(0, 3)
|
||||
)
|
||||
).to.be.rejectedWith('not a project-folder')
|
||||
expect(
|
||||
await app.persistor.checkIfObjectExists(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey1
|
||||
)
|
||||
).to.equal(true)
|
||||
await checkGET1()
|
||||
expect(
|
||||
await app.persistor.checkIfObjectExists(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey2
|
||||
)
|
||||
).to.equal(true)
|
||||
expect(
|
||||
await app.persistor.getDataEncryptionKeySize(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey2
|
||||
)
|
||||
).to.equal(32)
|
||||
@@ -1340,24 +1316,24 @@ describe('Filestore', function () {
|
||||
})
|
||||
it('should delete sub-folder and keep DEK', async function () {
|
||||
await app.persistor.deleteDirectory(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey1 // not really a sub-folder, but it will do for this test.
|
||||
)
|
||||
expect(
|
||||
await app.persistor.checkIfObjectExists(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey1
|
||||
)
|
||||
).to.equal(false)
|
||||
expect(
|
||||
await app.persistor.checkIfObjectExists(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey2
|
||||
)
|
||||
).to.equal(true)
|
||||
expect(
|
||||
await app.persistor.getDataEncryptionKeySize(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey2
|
||||
)
|
||||
).to.equal(32)
|
||||
@@ -1365,24 +1341,24 @@ describe('Filestore', function () {
|
||||
})
|
||||
it('should delete project folder and DEK', async function () {
|
||||
await app.persistor.deleteDirectory(
|
||||
Settings.filestore.stores.user_files,
|
||||
`${projectId}/`
|
||||
Settings.filestore.stores.template_files,
|
||||
`${templateId}/`
|
||||
)
|
||||
expect(
|
||||
await app.persistor.checkIfObjectExists(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey1
|
||||
)
|
||||
).to.equal(false)
|
||||
expect(
|
||||
await app.persistor.checkIfObjectExists(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey2
|
||||
)
|
||||
).to.equal(false)
|
||||
await expect(
|
||||
app.persistor.getDataEncryptionKeySize(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey2
|
||||
)
|
||||
).to.rejectedWith(NotFoundError)
|
||||
@@ -1393,8 +1369,6 @@ describe('Filestore', function () {
|
||||
describe('getObjectSize', function () {
|
||||
it('should return a number', async function () {
|
||||
const buf = Buffer.from('hello')
|
||||
const fileId = new ObjectId().toString()
|
||||
const fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||
const res = await fetch(fileUrl, {
|
||||
method: 'POST',
|
||||
body: Stream.Readable.from([buf]),
|
||||
@@ -1402,8 +1376,8 @@ describe('Filestore', function () {
|
||||
if (!res.ok) throw new Error(res.statusText)
|
||||
expect(
|
||||
await app.persistor.getObjectSize(
|
||||
Settings.filestore.stores.user_files,
|
||||
`${projectId}/${fileId}`
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
).to.equal(buf.byteLength)
|
||||
})
|
||||
@@ -1413,14 +1387,12 @@ describe('Filestore', function () {
|
||||
it('should return false when the object does not exist', async function () {
|
||||
expect(
|
||||
await app.persistor.checkIfObjectExists(
|
||||
Settings.filestore.stores.user_files,
|
||||
`${projectId}/${new ObjectId().toString()}`
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
).to.equal(false)
|
||||
})
|
||||
it('should return true when the object exists', async function () {
|
||||
const fileId = new ObjectId().toString()
|
||||
const fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}`
|
||||
const res = await fetch(fileUrl, {
|
||||
method: 'POST',
|
||||
body: Stream.Readable.from(['hello']),
|
||||
@@ -1428,8 +1400,8 @@ describe('Filestore', function () {
|
||||
if (!res.ok) throw new Error(res.statusText)
|
||||
expect(
|
||||
await app.persistor.checkIfObjectExists(
|
||||
Settings.filestore.stores.user_files,
|
||||
`${projectId}/${fileId}`
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
).to.equal(true)
|
||||
})
|
||||
@@ -1438,31 +1410,29 @@ describe('Filestore', function () {
|
||||
if (backendSettings.backend === 's3SSEC') {
|
||||
describe('storageClass', function () {
|
||||
it('should use the default storage class for dek', async function () {
|
||||
const key = `${projectId}/${new ObjectId()}`
|
||||
const dekBucket = process.env.AWS_S3_USER_FILES_DEK_BUCKET_NAME
|
||||
await app.persistor.sendStream(
|
||||
dekBucket,
|
||||
key,
|
||||
fileKey,
|
||||
Stream.Readable.from(['hello'])
|
||||
)
|
||||
expect(
|
||||
await app.persistor.getObjectStorageClass(dekBucket, key)
|
||||
await app.persistor.getObjectStorageClass(dekBucket, fileKey)
|
||||
).to.equal(undefined)
|
||||
})
|
||||
|
||||
it('should use the custom storage class for user files', async function () {
|
||||
const key = `${projectId}/${new ObjectId()}`
|
||||
await app.persistor.sendStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
key,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
Stream.Readable.from(['hello'])
|
||||
)
|
||||
const sc = AWS_S3_USER_FILES_STORAGE_CLASS
|
||||
expect(sc).to.exist
|
||||
expect(
|
||||
await app.persistor.getObjectStorageClass(
|
||||
Settings.filestore.stores.user_files,
|
||||
key
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
).to.equal(sc)
|
||||
})
|
||||
@@ -1470,10 +1440,6 @@ describe('Filestore', function () {
|
||||
}
|
||||
|
||||
describe('autoGunzip', function () {
|
||||
let key
|
||||
beforeEach('new key', function () {
|
||||
key = `${projectId}/${new ObjectId().toString()}`
|
||||
})
|
||||
this.timeout(60 * 1000)
|
||||
const body = Buffer.alloc(10 * 1024 * 1024, 'hello')
|
||||
const gzippedBody = gzipSync(body)
|
||||
@@ -1486,7 +1452,7 @@ describe('Filestore', function () {
|
||||
*/
|
||||
async function checkBodyIsTheSame(key, wantBody, autoGunzip) {
|
||||
const s = await app.persistor.getObjectStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
Settings.filestore.stores.template_files,
|
||||
key,
|
||||
{ autoGunzip }
|
||||
)
|
||||
@@ -1499,8 +1465,8 @@ describe('Filestore', function () {
|
||||
it('should refuse to handle autoGunzip', async function () {
|
||||
await expect(
|
||||
app.persistor.getObjectStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
key,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
{ autoGunzip: true }
|
||||
)
|
||||
).to.be.rejectedWith(NotImplementedError)
|
||||
@@ -1508,54 +1474,54 @@ describe('Filestore', function () {
|
||||
} else {
|
||||
it('should return the raw body with gzip', async function () {
|
||||
await app.persistor.sendStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
key,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
Stream.Readable.from([gzippedBody]),
|
||||
{ contentEncoding: 'gzip' }
|
||||
)
|
||||
expect(
|
||||
await app.persistor.getObjectSize(
|
||||
Settings.filestore.stores.user_files,
|
||||
key
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
).to.equal(gzippedBody.byteLength)
|
||||
// raw body with autoGunzip=true
|
||||
await checkBodyIsTheSame(key, body, true)
|
||||
await checkBodyIsTheSame(fileKey, body, true)
|
||||
// gzip body without autoGunzip=false
|
||||
await checkBodyIsTheSame(key, gzippedBody, false)
|
||||
await checkBodyIsTheSame(fileKey, gzippedBody, false)
|
||||
})
|
||||
it('should return the raw body without gzip compression', async function () {
|
||||
await app.persistor.sendStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
key,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
Stream.Readable.from([body])
|
||||
)
|
||||
expect(
|
||||
await app.persistor.getObjectSize(
|
||||
Settings.filestore.stores.user_files,
|
||||
key
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
).to.equal(body.byteLength)
|
||||
// raw body with both autoGunzip options
|
||||
await checkBodyIsTheSame(key, body, true)
|
||||
await checkBodyIsTheSame(key, body, false)
|
||||
await checkBodyIsTheSame(fileKey, body, true)
|
||||
await checkBodyIsTheSame(fileKey, body, false)
|
||||
})
|
||||
|
||||
it('should return the gzip body without gzip header', async function () {
|
||||
await app.persistor.sendStream(
|
||||
Settings.filestore.stores.user_files,
|
||||
key,
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey,
|
||||
Stream.Readable.from([gzippedBody])
|
||||
)
|
||||
expect(
|
||||
await app.persistor.getObjectSize(
|
||||
Settings.filestore.stores.user_files,
|
||||
key
|
||||
Settings.filestore.stores.template_files,
|
||||
fileKey
|
||||
)
|
||||
).to.equal(gzippedBody.byteLength)
|
||||
// gzip body with both autoGunzip options
|
||||
await checkBodyIsTheSame(key, gzippedBody, true)
|
||||
await checkBodyIsTheSame(key, gzippedBody, false)
|
||||
await checkBodyIsTheSame(fileKey, gzippedBody, true)
|
||||
await checkBodyIsTheSame(fileKey, gzippedBody, false)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
@@ -49,7 +49,7 @@ function s3SSECConfig() {
|
||||
return S3SSECKeys
|
||||
},
|
||||
storageClass: {
|
||||
[process.env.AWS_S3_USER_FILES_BUCKET_NAME]:
|
||||
[process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME]:
|
||||
AWS_S3_USER_FILES_STORAGE_CLASS,
|
||||
},
|
||||
}
|
||||
@@ -63,7 +63,6 @@ function s3ConfigDefaultProviderCredentials() {
|
||||
|
||||
function s3Stores() {
|
||||
return {
|
||||
user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME,
|
||||
template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME,
|
||||
}
|
||||
}
|
||||
@@ -82,21 +81,18 @@ function gcsConfig() {
|
||||
|
||||
function gcsStores() {
|
||||
return {
|
||||
user_files: process.env.GCS_USER_FILES_BUCKET_NAME,
|
||||
template_files: process.env.GCS_TEMPLATE_FILES_BUCKET_NAME,
|
||||
}
|
||||
}
|
||||
|
||||
function fsStores() {
|
||||
return {
|
||||
user_files: Path.resolve(__dirname, '../../../user_files'),
|
||||
template_files: Path.resolve(__dirname, '../../../template_files'),
|
||||
}
|
||||
}
|
||||
|
||||
function fallbackStores(primaryConfig, fallbackConfig) {
|
||||
return {
|
||||
[primaryConfig.user_files]: fallbackConfig.user_files,
|
||||
[primaryConfig.template_files]: fallbackConfig.template_files,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ describe('FileController', function () {
|
||||
const settings = {
|
||||
s3: {
|
||||
buckets: {
|
||||
user_files: 'user_files',
|
||||
template_files: 'template_files',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -20,19 +20,15 @@ describe('FileController', function () {
|
||||
}
|
||||
const projectId = 'projectId'
|
||||
const fileId = 'file_id'
|
||||
const bucket = 'user_files'
|
||||
const bucket = 'template_files'
|
||||
const key = `${projectId}/${fileId}`
|
||||
const error = new Error('incorrect utensil')
|
||||
|
||||
beforeEach(function () {
|
||||
FileHandler = {
|
||||
copyObject: sinon.stub().yields(),
|
||||
getFile: sinon.stub().yields(null, fileStream),
|
||||
getFileSize: sinon.stub().yields(null, fileSize),
|
||||
deleteFile: sinon.stub().yields(),
|
||||
deleteProject: sinon.stub().yields(),
|
||||
insertFile: sinon.stub().yields(),
|
||||
getDirectorySize: sinon.stub().yields(null, fileSize),
|
||||
getRedirectUrl: sinon.stub().yields(null, null),
|
||||
}
|
||||
|
||||
@@ -235,102 +231,4 @@ describe('FileController', function () {
|
||||
FileController.insertFile(req, res, next)
|
||||
})
|
||||
})
|
||||
|
||||
describe('copyFile', function () {
|
||||
const oldFileId = 'oldFileId'
|
||||
const oldProjectId = 'oldProjectid'
|
||||
const oldKey = `${oldProjectId}/${oldFileId}`
|
||||
|
||||
beforeEach(function () {
|
||||
req.body = {
|
||||
source: {
|
||||
project_id: oldProjectId,
|
||||
file_id: oldFileId,
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
it('should send bucket name and both keys to FileHandler', function (done) {
|
||||
res.sendStatus = code => {
|
||||
code.should.equal(200)
|
||||
expect(FileHandler.copyObject).to.have.been.calledWith(
|
||||
bucket,
|
||||
oldKey,
|
||||
key
|
||||
)
|
||||
done()
|
||||
}
|
||||
FileController.copyFile(req, res, next)
|
||||
})
|
||||
|
||||
it('should send a 404 if the original file was not found', function (done) {
|
||||
FileHandler.copyObject.yields(
|
||||
new Errors.NotFoundError({ message: 'not found', info: {} })
|
||||
)
|
||||
res.sendStatus = code => {
|
||||
code.should.equal(404)
|
||||
done()
|
||||
}
|
||||
FileController.copyFile(req, res, next)
|
||||
})
|
||||
|
||||
it('should send an error if there was an error', function (done) {
|
||||
FileHandler.copyObject.yields(error)
|
||||
FileController.copyFile(req, res, err => {
|
||||
expect(err).to.equal(error)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('delete file', function () {
|
||||
it('should tell the file handler', function (done) {
|
||||
res.sendStatus = code => {
|
||||
code.should.equal(204)
|
||||
expect(FileHandler.deleteFile).to.have.been.calledWith(bucket, key)
|
||||
done()
|
||||
}
|
||||
FileController.deleteFile(req, res, next)
|
||||
})
|
||||
|
||||
it('should send a 500 if there was an error', function () {
|
||||
FileHandler.deleteFile.yields(error)
|
||||
FileController.deleteFile(req, res, next)
|
||||
expect(next).to.have.been.calledWith(error)
|
||||
})
|
||||
})
|
||||
|
||||
describe('delete project', function () {
|
||||
it('should tell the file handler', function (done) {
|
||||
res.sendStatus = code => {
|
||||
code.should.equal(204)
|
||||
expect(FileHandler.deleteProject).to.have.been.calledWith(bucket, key)
|
||||
done()
|
||||
}
|
||||
FileController.deleteProject(req, res, next)
|
||||
})
|
||||
|
||||
it('should send a 500 if there was an error', function () {
|
||||
FileHandler.deleteProject.yields(error)
|
||||
FileController.deleteProject(req, res, next)
|
||||
expect(next).to.have.been.calledWith(error)
|
||||
})
|
||||
})
|
||||
|
||||
describe('directorySize', function () {
|
||||
it('should return total directory size bytes', function (done) {
|
||||
FileController.directorySize(req, {
|
||||
json: result => {
|
||||
expect(result['total bytes']).to.equal(fileSize)
|
||||
done()
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should send a 500 if there was an error', function () {
|
||||
FileHandler.getDirectorySize.yields(error)
|
||||
FileController.directorySize(req, res, next)
|
||||
expect(next).to.have.been.calledWith(error)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -22,7 +22,6 @@ describe('FileHandler', function () {
|
||||
const bucket = 'my_bucket'
|
||||
const key = `${new ObjectId()}/${new ObjectId()}`
|
||||
const convertedFolderKey = `${new ObjectId()}/${new ObjectId()}`
|
||||
const projectKey = `${new ObjectId()}/`
|
||||
const sourceStream = 'sourceStream'
|
||||
const convertedKey = 'convertedKey'
|
||||
const redirectUrl = 'https://wombat.potato/giraffe'
|
||||
@@ -41,7 +40,6 @@ describe('FileHandler', function () {
|
||||
sendStream: sinon.stub().resolves(),
|
||||
insertFile: sinon.stub().resolves(),
|
||||
sendFile: sinon.stub().resolves(),
|
||||
directorySize: sinon.stub().resolves(),
|
||||
}
|
||||
LocalFileWriter = {
|
||||
// the callback style is used for detached cleanup calls
|
||||
@@ -69,7 +67,7 @@ describe('FileHandler', function () {
|
||||
}
|
||||
Settings = {
|
||||
filestore: {
|
||||
stores: { template_files: 'template_files', user_files: 'user_files' },
|
||||
stores: { template_files: 'template_files' },
|
||||
},
|
||||
}
|
||||
fs = {
|
||||
@@ -139,98 +137,6 @@ describe('FileHandler', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('deleteFile', function () {
|
||||
it('should tell the filestore manager to delete the file', function (done) {
|
||||
FileHandler.deleteFile(bucket, key, err => {
|
||||
expect(err).not.to.exist
|
||||
expect(PersistorManager.deleteObject).to.have.been.calledWith(
|
||||
bucket,
|
||||
key
|
||||
)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not tell the filestore manager to delete the cached folder', function (done) {
|
||||
FileHandler.deleteFile(bucket, key, err => {
|
||||
expect(err).not.to.exist
|
||||
expect(PersistorManager.deleteDirectory).not.to.have.been.called
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should accept templates-api key format', function (done) {
|
||||
KeyBuilder.getConvertedFolderKey.returns(
|
||||
'5ecba29f1a294e007d0bccb4/v/0/pdf'
|
||||
)
|
||||
FileHandler.deleteFile(bucket, key, err => {
|
||||
expect(err).not.to.exist
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error when the key is in the wrong format', function (done) {
|
||||
KeyBuilder.getConvertedFolderKey.returns('wombat')
|
||||
FileHandler.deleteFile(bucket, key, err => {
|
||||
expect(err).to.exist
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
describe('when conversions are enabled', function () {
|
||||
beforeEach(function () {
|
||||
Settings.enableConversions = true
|
||||
})
|
||||
|
||||
it('should delete the convertedKey folder for template files', function (done) {
|
||||
FileHandler.deleteFile(
|
||||
Settings.filestore.stores.template_files,
|
||||
key,
|
||||
err => {
|
||||
expect(err).not.to.exist
|
||||
expect(PersistorManager.deleteDirectory).to.have.been.calledWith(
|
||||
Settings.filestore.stores.template_files,
|
||||
convertedFolderKey
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should not delete the convertedKey folder for user files', function (done) {
|
||||
FileHandler.deleteFile(
|
||||
Settings.filestore.stores.user_files,
|
||||
key,
|
||||
err => {
|
||||
expect(err).not.to.exist
|
||||
expect(PersistorManager.deleteDirectory).to.not.have.been.called
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('deleteProject', function () {
|
||||
it('should tell the filestore manager to delete the folder', function (done) {
|
||||
FileHandler.deleteProject(bucket, projectKey, err => {
|
||||
expect(err).not.to.exist
|
||||
expect(PersistorManager.deleteDirectory).to.have.been.calledWith(
|
||||
bucket,
|
||||
projectKey
|
||||
)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error when the key is in the wrong format', function (done) {
|
||||
FileHandler.deleteProject(bucket, 'wombat', err => {
|
||||
expect(err).to.exist
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFile', function () {
|
||||
it('should return the source stream no format or style are defined', function (done) {
|
||||
FileHandler.getFile(bucket, key, null, (err, stream) => {
|
||||
@@ -389,17 +295,4 @@ describe('FileHandler', function () {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getDirectorySize', function () {
|
||||
it('should call the filestore manager to get directory size', function (done) {
|
||||
FileHandler.getDirectorySize(bucket, key, err => {
|
||||
expect(err).not.to.exist
|
||||
expect(PersistorManager.directorySize).to.have.been.calledWith(
|
||||
bucket,
|
||||
key
|
||||
)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -13,7 +13,6 @@ import en from '../../../services/web/locales/en.json'
|
||||
function resetMeta() {
|
||||
window.metaAttributesCache = new Map()
|
||||
window.metaAttributesCache.set('ol-i18n', { currentLangCode: 'en' })
|
||||
window.metaAttributesCache.set('ol-projectHistoryBlobsEnabled', true)
|
||||
window.metaAttributesCache.set('ol-capabilities', ['chat'])
|
||||
window.metaAttributesCache.set('ol-compileSettings', {
|
||||
compileTimeout: 20,
|
||||
|
||||
@@ -18,7 +18,6 @@ import mongoose from './app/src/infrastructure/Mongoose.js'
|
||||
import { triggerGracefulShutdown } from './app/src/infrastructure/GracefulShutdown.js'
|
||||
import FileWriter from './app/src/infrastructure/FileWriter.js'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import Features from './app/src/infrastructure/Features.js'
|
||||
|
||||
metrics.gauge(
|
||||
'web_startup',
|
||||
@@ -56,9 +55,6 @@ if (Settings.catchErrors) {
|
||||
// Create ./data/dumpFolder if needed
|
||||
FileWriter.ensureDumpFolderExists()
|
||||
|
||||
// Validate combination of feature flags.
|
||||
Features.validateSettings()
|
||||
|
||||
// handle SIGTERM for graceful shutdown in kubernetes
|
||||
process.on('SIGTERM', function (signal) {
|
||||
triggerGracefulShutdown(Server.server, signal)
|
||||
|
||||
@@ -15,7 +15,6 @@ const { Cookie } = require('tough-cookie')
|
||||
const ClsiCookieManager = require('./ClsiCookieManager')(
|
||||
Settings.apis.clsi?.backendGroupName
|
||||
)
|
||||
const Features = require('../../infrastructure/Features')
|
||||
const NewBackendCloudClsiCookieManager = require('./ClsiCookieManager')(
|
||||
Settings.apis.clsi_new?.backendGroupName
|
||||
)
|
||||
@@ -750,18 +749,9 @@ function _finaliseRequest(projectId, options, project, docs, files) {
|
||||
for (let path in files) {
|
||||
const file = files[path]
|
||||
path = path.replace(/^\//, '') // Remove leading /
|
||||
|
||||
const filestoreURL = `${Settings.apis.filestore.url}/project/${project._id}/file/${file._id}`
|
||||
let url = filestoreURL
|
||||
let fallbackURL
|
||||
if (file.hash && Features.hasFeature('project-history-blobs')) {
|
||||
url = getFilestoreBlobURL(historyId, file.hash)
|
||||
fallbackURL = filestoreURL
|
||||
}
|
||||
resources.push({
|
||||
path,
|
||||
url,
|
||||
fallbackURL,
|
||||
url: getFilestoreBlobURL(historyId, file.hash),
|
||||
modified: file.created?.getTime(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -8,8 +8,6 @@ const metrics = require('@overleaf/metrics')
|
||||
const { promisify, callbackify } = require('util')
|
||||
const { promisifyMultiResult } = require('@overleaf/promise-utils')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const FileStoreHandler = require('../FileStore/FileStoreHandler')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
const Modules = require('../../infrastructure/Modules')
|
||||
|
||||
function getProjectLastUpdatedAt(projectId, callback) {
|
||||
@@ -361,25 +359,19 @@ function resyncProjectHistory(
|
||||
doc: doc.doc._id,
|
||||
path: doc.path,
|
||||
}))
|
||||
const hasFilestore = Features.hasFeature('filestore')
|
||||
if (!hasFilestore) {
|
||||
// Files without a hash likely do not have a blob. Abort.
|
||||
for (const { file } of files) {
|
||||
if (!file.hash) {
|
||||
return callback(
|
||||
new OError('found file with missing hash', { projectId, file })
|
||||
)
|
||||
}
|
||||
// Files without a hash likely do not have a blob. Abort.
|
||||
for (const { file } of files) {
|
||||
if (!file.hash) {
|
||||
return callback(
|
||||
new OError('found file with missing hash', { projectId, file })
|
||||
)
|
||||
}
|
||||
}
|
||||
files = files.map(file => ({
|
||||
file: file.file._id,
|
||||
path: file.path,
|
||||
url: hasFilestore
|
||||
? FileStoreHandler._buildUrl(projectId, file.file._id)
|
||||
: undefined,
|
||||
_hash: file.file.hash,
|
||||
createdBlob: !hasFilestore,
|
||||
createdBlob: true,
|
||||
metadata: buildFileMetadataForHistory(file.file),
|
||||
}))
|
||||
|
||||
@@ -480,15 +472,12 @@ function updateProjectStructure(
|
||||
changes.newDocs,
|
||||
historyRangesSupport
|
||||
)
|
||||
const hasFilestore = Features.hasFeature('filestore')
|
||||
if (!hasFilestore) {
|
||||
for (const newEntity of changes.newFiles || []) {
|
||||
if (!newEntity.file.hash) {
|
||||
// Files without a hash likely do not have a blob. Abort.
|
||||
return callback(
|
||||
new OError('found file with missing hash', { newEntity })
|
||||
)
|
||||
}
|
||||
for (const newEntity of changes.newFiles || []) {
|
||||
if (!newEntity.file.hash) {
|
||||
// Files without a hash likely do not have a blob. Abort.
|
||||
return callback(
|
||||
new OError('found file with missing hash', { newEntity })
|
||||
)
|
||||
}
|
||||
}
|
||||
const {
|
||||
@@ -623,8 +612,6 @@ function _getUpdates(
|
||||
})
|
||||
}
|
||||
}
|
||||
const hasFilestore = Features.hasFeature('filestore')
|
||||
|
||||
for (const id in newEntitiesHash) {
|
||||
const newEntity = newEntitiesHash[id]
|
||||
const oldEntity = oldEntitiesHash[id]
|
||||
@@ -638,10 +625,9 @@ function _getUpdates(
|
||||
docLines: newEntity.docLines,
|
||||
ranges: newEntity.ranges,
|
||||
historyRangesSupport,
|
||||
url: newEntity.file != null && hasFilestore ? newEntity.url : undefined,
|
||||
hash: newEntity.file != null ? newEntity.file.hash : undefined,
|
||||
hash: newEntity.file?.hash,
|
||||
metadata: buildFileMetadataForHistory(newEntity.file),
|
||||
createdBlob: (newEntity.createdBlob || !hasFilestore) ?? false,
|
||||
createdBlob: true,
|
||||
})
|
||||
} else if (newEntity.path !== oldEntity.path) {
|
||||
// entity renamed
|
||||
|
||||
@@ -4,8 +4,6 @@ import logger from '@overleaf/logger'
|
||||
import ProjectEntityHandler from '../Project/ProjectEntityHandler.js'
|
||||
import ProjectGetter from '../Project/ProjectGetter.js'
|
||||
import HistoryManager from '../History/HistoryManager.js'
|
||||
import FileStoreHandler from '../FileStore/FileStoreHandler.js'
|
||||
import Features from '../../infrastructure/Features.js'
|
||||
let ProjectZipStreamManager
|
||||
|
||||
export default ProjectZipStreamManager = {
|
||||
@@ -111,22 +109,17 @@ export default ProjectZipStreamManager = {
|
||||
},
|
||||
|
||||
getFileStream: (projectId, file, callback) => {
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
HistoryManager.requestBlobWithFallback(
|
||||
projectId,
|
||||
file.hash,
|
||||
file._id,
|
||||
(error, result) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
const { stream } = result
|
||||
callback(null, stream)
|
||||
HistoryManager.requestBlobWithProjectId(
|
||||
projectId,
|
||||
file.hash,
|
||||
(error, result) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
FileStoreHandler.getFileStream(projectId, file._id, {}, callback)
|
||||
}
|
||||
const { stream } = result
|
||||
callback(null, stream)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
addAllFilesToArchive(projectId, archive, callback) {
|
||||
|
||||
@@ -4,11 +4,9 @@ import { pipeline } from 'node:stream/promises'
|
||||
import logger from '@overleaf/logger'
|
||||
import { expressify } from '@overleaf/promise-utils'
|
||||
import Metrics from '@overleaf/metrics'
|
||||
import FileStoreHandler from './FileStoreHandler.js'
|
||||
import ProjectLocator from '../Project/ProjectLocator.js'
|
||||
import HistoryManager from '../History/HistoryManager.js'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import Features from '../../infrastructure/Features.js'
|
||||
import { preparePlainTextResponse } from '../../infrastructure/Response.js'
|
||||
|
||||
async function getFile(req, res) {
|
||||
@@ -55,25 +53,15 @@ async function getFile(req, res) {
|
||||
status: Boolean(file?.hash),
|
||||
})
|
||||
|
||||
let source, stream, contentLength
|
||||
let stream, contentLength
|
||||
try {
|
||||
if (Features.hasFeature('project-history-blobs') && file?.hash) {
|
||||
// Get the file from history
|
||||
;({ source, stream, contentLength } =
|
||||
await HistoryManager.promises.requestBlobWithFallback(
|
||||
projectId,
|
||||
file.hash,
|
||||
fileId
|
||||
))
|
||||
} else {
|
||||
// The file-hash is missing. Fall back to filestore.
|
||||
stream = await FileStoreHandler.promises.getFileStream(
|
||||
// Get the file from history
|
||||
;({ stream, contentLength } =
|
||||
await HistoryManager.promises.requestBlobWithProjectId(
|
||||
projectId,
|
||||
fileId,
|
||||
queryString
|
||||
)
|
||||
source = 'filestore'
|
||||
}
|
||||
file.hash,
|
||||
'GET'
|
||||
))
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.status(404).end()
|
||||
@@ -97,7 +85,6 @@ async function getFile(req, res) {
|
||||
// allow the browser to cache these immutable files
|
||||
// note: both "private" and "max-age" appear to be required for caching
|
||||
res.setHeader('Cache-Control', 'private, max-age=3600')
|
||||
res.appendHeader('X-Served-By', source)
|
||||
try {
|
||||
await pipeline(stream, res)
|
||||
} catch (err) {
|
||||
@@ -150,20 +137,14 @@ async function getFileHead(req, res) {
|
||||
status: Boolean(file?.hash),
|
||||
})
|
||||
|
||||
let fileSize, source
|
||||
let fileSize
|
||||
try {
|
||||
if (Features.hasFeature('project-history-blobs') && file?.hash) {
|
||||
;({ source, contentLength: fileSize } =
|
||||
await HistoryManager.promises.requestBlobWithFallback(
|
||||
projectId,
|
||||
file.hash,
|
||||
fileId,
|
||||
'HEAD'
|
||||
))
|
||||
} else {
|
||||
fileSize = await FileStoreHandler.promises.getFileSize(projectId, fileId)
|
||||
source = 'filestore'
|
||||
}
|
||||
;({ contentLength: fileSize } =
|
||||
await HistoryManager.promises.requestBlobWithProjectId(
|
||||
projectId,
|
||||
file.hash,
|
||||
'HEAD'
|
||||
))
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return res.status(404).end()
|
||||
@@ -174,7 +155,6 @@ async function getFileHead(req, res) {
|
||||
}
|
||||
|
||||
res.setHeader('Content-Length', fileSize)
|
||||
res.appendHeader('X-Served-By', source)
|
||||
res.status(200).end()
|
||||
}
|
||||
|
||||
|
||||
@@ -1,20 +1,12 @@
|
||||
const _ = require('lodash')
|
||||
const logger = require('@overleaf/logger')
|
||||
const fs = require('fs')
|
||||
const request = require('request')
|
||||
const settings = require('@overleaf/settings')
|
||||
const Async = require('async')
|
||||
const FileHashManager = require('./FileHashManager')
|
||||
const HistoryManager = require('../History/HistoryManager')
|
||||
const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler')
|
||||
const { File } = require('../../models/File')
|
||||
const Errors = require('../Errors/Errors')
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { promisifyAll } = require('@overleaf/promise-utils')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
|
||||
const ONE_MIN_IN_MS = 60 * 1000
|
||||
const FIVE_MINS_IN_MS = ONE_MIN_IN_MS * 5
|
||||
|
||||
const FileStoreHandler = {
|
||||
RETRY_ATTEMPTS: 3,
|
||||
@@ -40,27 +32,14 @@ const FileStoreHandler = {
|
||||
},
|
||||
|
||||
_uploadToHistory(historyId, hash, size, fsPath, callback) {
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
Async.retry(
|
||||
FileStoreHandler.RETRY_ATTEMPTS,
|
||||
cb =>
|
||||
HistoryManager.uploadBlobFromDisk(historyId, hash, size, fsPath, cb),
|
||||
error => {
|
||||
if (error) return callback(error, false)
|
||||
callback(null, true)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
callback(null, false)
|
||||
}
|
||||
},
|
||||
|
||||
_uploadToFileStore(projectId, fileArgs, fsPath, callback) {
|
||||
Async.retry(
|
||||
FileStoreHandler.RETRY_ATTEMPTS,
|
||||
(cb, results) =>
|
||||
FileStoreHandler._doUploadFileFromDisk(projectId, fileArgs, fsPath, cb),
|
||||
callback
|
||||
cb =>
|
||||
HistoryManager.uploadBlobFromDisk(historyId, hash, size, fsPath, cb),
|
||||
error => {
|
||||
if (error) return callback(error, false)
|
||||
callback(null, true)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
@@ -99,274 +78,23 @@ const FileStoreHandler = {
|
||||
hash,
|
||||
stat.size,
|
||||
fsPath,
|
||||
function (err, createdBlob) {
|
||||
function (err) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
fileArgs = { ...fileArgs, hash }
|
||||
FileStoreHandler._uploadToFileStore(
|
||||
projectId,
|
||||
fileArgs,
|
||||
fsPath,
|
||||
function (err, result) {
|
||||
if (err) {
|
||||
OError.tag(err, 'Error uploading file, retries failed', {
|
||||
projectId,
|
||||
fileArgs,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
callback(err, result.url, result.fileRef, createdBlob)
|
||||
}
|
||||
)
|
||||
callback(err, new File(fileArgs), true)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_doUploadFileFromDisk(projectId, fileArgs, fsPath, callback) {
|
||||
const callbackOnce = _.once(callback)
|
||||
|
||||
const fileRef = new File(fileArgs)
|
||||
const fileId = fileRef._id
|
||||
const url = FileStoreHandler._buildUrl(projectId, fileId)
|
||||
|
||||
if (!Features.hasFeature('filestore')) {
|
||||
return callbackOnce(null, { url, fileRef })
|
||||
}
|
||||
|
||||
const readStream = fs.createReadStream(fsPath)
|
||||
readStream.on('error', function (err) {
|
||||
logger.warn(
|
||||
{ err, projectId, fileId, fsPath },
|
||||
'something went wrong on the read stream of uploadFileFromDisk'
|
||||
)
|
||||
callbackOnce(err)
|
||||
})
|
||||
readStream.on('open', function () {
|
||||
const opts = {
|
||||
method: 'post',
|
||||
uri: url,
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
headers: {
|
||||
'X-File-Hash-From-Web': fileArgs.hash,
|
||||
}, // send the hash to the filestore as a custom header so it can be checked
|
||||
}
|
||||
const writeStream = request(opts)
|
||||
writeStream.on('error', function (err) {
|
||||
logger.warn(
|
||||
{ err, projectId, fileId, fsPath },
|
||||
'something went wrong on the write stream of uploadFileFromDisk'
|
||||
)
|
||||
callbackOnce(err)
|
||||
})
|
||||
writeStream.on('response', function (response) {
|
||||
if (![200, 201].includes(response.statusCode)) {
|
||||
const err = new OError(
|
||||
`non-ok response from filestore for upload: ${response.statusCode}`,
|
||||
{ statusCode: response.statusCode }
|
||||
)
|
||||
return callbackOnce(err)
|
||||
}
|
||||
callbackOnce(null, { url, fileRef })
|
||||
}) // have to pass back an object because async.retry only accepts a single result argument
|
||||
readStream.pipe(writeStream)
|
||||
})
|
||||
},
|
||||
|
||||
getFileStreamNew(project, file, query, callback) {
|
||||
const projectId = project._id
|
||||
const historyId = project.overleaf?.history?.id
|
||||
const fileId = file._id
|
||||
const hash = file.hash
|
||||
if (historyId && hash && Features.hasFeature('project-history-blobs')) {
|
||||
// new behaviour - request from history
|
||||
const range = _extractRange(query?.range)
|
||||
HistoryManager.requestBlobWithFallback(
|
||||
projectId,
|
||||
hash,
|
||||
fileId,
|
||||
'GET',
|
||||
range,
|
||||
function (err, result) {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
const { stream } = result
|
||||
callback(null, stream)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
// original behaviour
|
||||
FileStoreHandler.getFileStream(projectId, fileId, query, callback)
|
||||
}
|
||||
},
|
||||
|
||||
getFileStream(projectId, fileId, query, callback) {
|
||||
if (!Features.hasFeature('filestore')) {
|
||||
return callback(
|
||||
new Errors.NotFoundError('filestore is disabled, file not found')
|
||||
)
|
||||
}
|
||||
|
||||
let queryString = '?from=getFileStream'
|
||||
if (query != null && query.format != null) {
|
||||
queryString += `&format=${query.format}`
|
||||
}
|
||||
const opts = {
|
||||
method: 'get',
|
||||
uri: `${this._buildUrl(projectId, fileId)}${queryString}`,
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
headers: {},
|
||||
}
|
||||
if (query != null && query.range != null) {
|
||||
const rangeText = query.range
|
||||
if (rangeText && rangeText.match != null && rangeText.match(/\d+-\d+/)) {
|
||||
opts.headers.range = `bytes=${query.range}`
|
||||
}
|
||||
}
|
||||
const readStream = request(opts)
|
||||
readStream.on('error', err =>
|
||||
logger.err(
|
||||
{ err, projectId, fileId, query, opts },
|
||||
'error in file stream'
|
||||
)
|
||||
)
|
||||
callback(null, readStream)
|
||||
},
|
||||
|
||||
getFileSize(projectId, fileId, callback) {
|
||||
const url = this._buildUrl(projectId, fileId)
|
||||
request.head(`${url}?from=getFileSize`, (err, res) => {
|
||||
if (err) {
|
||||
OError.tag(err, 'failed to get file size from filestore', {
|
||||
projectId,
|
||||
fileId,
|
||||
})
|
||||
return callback(err)
|
||||
}
|
||||
if (res.statusCode === 404) {
|
||||
return callback(new Errors.NotFoundError('file not found in filestore'))
|
||||
}
|
||||
if (res.statusCode !== 200) {
|
||||
logger.warn(
|
||||
{ projectId, fileId, statusCode: res.statusCode },
|
||||
'filestore returned non-200 response'
|
||||
)
|
||||
return callback(new Error('filestore returned non-200 response'))
|
||||
}
|
||||
const fileSize = res.headers['content-length']
|
||||
callback(null, fileSize)
|
||||
})
|
||||
},
|
||||
|
||||
deleteFile(projectId, fileId, callback) {
|
||||
logger.debug({ projectId, fileId }, 'telling file store to delete file')
|
||||
const opts = {
|
||||
method: 'delete',
|
||||
uri: this._buildUrl(projectId, fileId),
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
}
|
||||
request(opts, function (err, response) {
|
||||
if (err) {
|
||||
logger.warn(
|
||||
{ err, projectId, fileId },
|
||||
'something went wrong deleting file from filestore'
|
||||
)
|
||||
}
|
||||
callback(err)
|
||||
})
|
||||
},
|
||||
|
||||
deleteProject(projectId, callback) {
|
||||
if (!Features.hasFeature('filestore')) {
|
||||
return callback() // if filestore is not in use, we don't need to delete anything
|
||||
}
|
||||
request(
|
||||
{
|
||||
method: 'delete',
|
||||
uri: this._buildUrl(projectId),
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
},
|
||||
err => {
|
||||
if (err) {
|
||||
return callback(
|
||||
OError.tag(
|
||||
err,
|
||||
'something went wrong deleting a project in filestore',
|
||||
{ projectId }
|
||||
)
|
||||
)
|
||||
}
|
||||
callback()
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
copyFile(oldProjectId, oldFileId, newProjectId, newFileId, callback) {
|
||||
logger.debug(
|
||||
{ oldProjectId, oldFileId, newProjectId, newFileId },
|
||||
'telling filestore to copy a file'
|
||||
)
|
||||
const opts = {
|
||||
method: 'put',
|
||||
json: {
|
||||
source: {
|
||||
project_id: oldProjectId,
|
||||
file_id: oldFileId,
|
||||
},
|
||||
},
|
||||
uri: this._buildUrl(newProjectId, newFileId),
|
||||
timeout: FIVE_MINS_IN_MS,
|
||||
}
|
||||
request(opts, function (err, response) {
|
||||
if (err) {
|
||||
OError.tag(
|
||||
err,
|
||||
'something went wrong telling filestore api to copy file',
|
||||
{
|
||||
oldProjectId,
|
||||
oldFileId,
|
||||
newProjectId,
|
||||
newFileId,
|
||||
}
|
||||
)
|
||||
callback(err)
|
||||
} else if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||
// successful response
|
||||
callback(null, opts.uri)
|
||||
} else {
|
||||
err = new OError(
|
||||
`non-ok response from filestore for copyFile: ${response.statusCode}`,
|
||||
{
|
||||
uri: opts.uri,
|
||||
statusCode: response.statusCode,
|
||||
}
|
||||
)
|
||||
callback(err)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_buildUrl(projectId, fileId) {
|
||||
return (
|
||||
`${settings.apis.filestore.url}/project/${projectId}` +
|
||||
(fileId ? `/file/${fileId}` : '')
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
function _extractRange(range) {
|
||||
if (typeof range === 'string' && /\d+-\d+/.test(range)) {
|
||||
return `bytes=${range}`
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FileStoreHandler
|
||||
module.exports.promises = promisifyAll(FileStoreHandler, {
|
||||
multiResult: {
|
||||
uploadFileFromDisk: ['url', 'fileRef', 'createdBlob'],
|
||||
uploadFileFromDiskWithHistoryId: ['url', 'fileRef', 'createdBlob'],
|
||||
uploadFileFromDisk: ['fileRef', 'createdBlob'],
|
||||
uploadFileFromDiskWithHistoryId: ['fileRef', 'createdBlob'],
|
||||
},
|
||||
})
|
||||
|
||||
@@ -52,13 +52,12 @@ async function requestBlob(method, req, res) {
|
||||
}
|
||||
|
||||
const range = req.get('Range')
|
||||
let stream, source, contentLength
|
||||
let stream, contentLength
|
||||
try {
|
||||
;({ stream, source, contentLength } =
|
||||
await HistoryManager.promises.requestBlobWithFallback(
|
||||
;({ stream, contentLength } =
|
||||
await HistoryManager.promises.requestBlobWithProjectId(
|
||||
projectId,
|
||||
hash,
|
||||
req.query.fallback,
|
||||
method,
|
||||
range
|
||||
))
|
||||
@@ -66,7 +65,6 @@ async function requestBlob(method, req, res) {
|
||||
if (err instanceof Errors.NotFoundError) return res.status(404).end()
|
||||
throw err
|
||||
}
|
||||
res.appendHeader('X-Served-By', source)
|
||||
|
||||
if (contentLength) res.setHeader('Content-Length', contentLength) // set on HEAD
|
||||
res.setHeader('Content-Type', 'application/octet-stream')
|
||||
|
||||
@@ -11,9 +11,8 @@ const OError = require('@overleaf/o-error')
|
||||
const UserGetter = require('../User/UserGetter')
|
||||
const ProjectGetter = require('../Project/ProjectGetter')
|
||||
const HistoryBackupDeletionHandler = require('./HistoryBackupDeletionHandler')
|
||||
const { db, ObjectId, waitForDb } = require('../../infrastructure/mongodb')
|
||||
const { db, waitForDb } = require('../../infrastructure/mongodb')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const logger = require('@overleaf/logger')
|
||||
const { NotFoundError } = require('../Errors/Errors')
|
||||
|
||||
const HISTORY_V1_URL = settings.apis.v1_history.url
|
||||
@@ -169,22 +168,25 @@ async function copyBlob(sourceHistoryId, targetHistoryId, hash) {
|
||||
)
|
||||
}
|
||||
|
||||
async function requestBlobWithFallback(
|
||||
async function requestBlobWithProjectId(
|
||||
projectId,
|
||||
hash,
|
||||
fileId,
|
||||
method = 'GET',
|
||||
range = ''
|
||||
) {
|
||||
const project = await ProjectGetter.promises.getProject(projectId, {
|
||||
'overleaf.history.id': true,
|
||||
})
|
||||
return requestBlob(project.overleaf.history.id, hash, method, range)
|
||||
}
|
||||
|
||||
async function requestBlob(historyId, hash, method = 'GET', range = '') {
|
||||
// Talk to history-v1 directly to avoid streaming via project-history.
|
||||
let url = new URL(HISTORY_V1_URL)
|
||||
url.pathname += `/projects/${project.overleaf.history.id}/blobs/${hash}`
|
||||
const url = new URL(HISTORY_V1_URL)
|
||||
url.pathname += `/projects/${historyId}/blobs/${hash}`
|
||||
|
||||
const opts = { method, headers: { Range: range } }
|
||||
let stream, response, source
|
||||
let stream, response
|
||||
try {
|
||||
;({ stream, response } = await fetchStreamWithResponse(url, {
|
||||
...opts,
|
||||
@@ -193,38 +195,18 @@ async function requestBlobWithFallback(
|
||||
password: settings.apis.v1_history.pass,
|
||||
},
|
||||
}))
|
||||
source = 'history-v1'
|
||||
} catch (err) {
|
||||
if (err instanceof RequestFailedError && err.response.status === 404) {
|
||||
if (ObjectId.isValid(fileId)) {
|
||||
url = new URL(settings.apis.filestore.url)
|
||||
url.pathname = `/project/${projectId}/file/${fileId}`
|
||||
try {
|
||||
;({ stream, response } = await fetchStreamWithResponse(url, opts))
|
||||
} catch (err) {
|
||||
if (
|
||||
err instanceof RequestFailedError &&
|
||||
err.response.status === 404
|
||||
) {
|
||||
throw new NotFoundError()
|
||||
}
|
||||
throw err
|
||||
}
|
||||
logger.warn({ projectId, hash, fileId }, 'missing history blob')
|
||||
source = 'filestore'
|
||||
} else {
|
||||
throw new NotFoundError()
|
||||
}
|
||||
throw new NotFoundError()
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
Metrics.inc('request_blob', 1, { path: source })
|
||||
Metrics.inc('request_blob', 1, { path: 'history-v1' })
|
||||
return {
|
||||
url,
|
||||
stream,
|
||||
source,
|
||||
contentLength: response.headers.get('Content-Length'),
|
||||
contentLength: parseInt(response.headers.get('Content-Length'), 10),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -417,7 +399,8 @@ module.exports = {
|
||||
getCurrentContent: callbackify(getCurrentContent),
|
||||
uploadBlobFromDisk: callbackify(uploadBlobFromDisk),
|
||||
copyBlob: callbackify(copyBlob),
|
||||
requestBlobWithFallback: callbackify(requestBlobWithFallback),
|
||||
requestBlob: callbackify(requestBlob),
|
||||
requestBlobWithProjectId: callbackify(requestBlobWithProjectId),
|
||||
getLatestHistory: callbackify(getLatestHistory),
|
||||
getChanges: callbackify(getChanges),
|
||||
promises: {
|
||||
@@ -431,7 +414,8 @@ module.exports = {
|
||||
getContentAtVersion,
|
||||
uploadBlobFromDisk,
|
||||
copyBlob,
|
||||
requestBlobWithFallback,
|
||||
requestBlob,
|
||||
requestBlobWithProjectId,
|
||||
getLatestHistory,
|
||||
getChanges,
|
||||
},
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
// Pass settings to enable consistent unit tests from .js and .mjs modules
|
||||
function projectHistoryURLWithFilestoreFallback(
|
||||
Settings,
|
||||
projectId,
|
||||
historyId,
|
||||
fileRef,
|
||||
origin
|
||||
) {
|
||||
const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileRef._id}?from=${origin}`
|
||||
// TODO: When this file is converted to ES modules we will be able to use Features.hasFeature('project-history-blobs'). Currently we can't stub the feature return value in tests.
|
||||
if (fileRef.hash && Settings.filestoreMigrationLevel >= 1) {
|
||||
return {
|
||||
url: `${Settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`,
|
||||
fallbackURL: filestoreURL,
|
||||
}
|
||||
} else {
|
||||
return { url: filestoreURL }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { projectHistoryURLWithFilestoreFallback }
|
||||
@@ -15,7 +15,6 @@ const AuthorizationManager = require('../Authorization/AuthorizationManager')
|
||||
const ProjectLocator = require('../Project/ProjectLocator')
|
||||
const DocstoreManager = require('../Docstore/DocstoreManager')
|
||||
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||
const FileStoreHandler = require('../FileStore/FileStoreHandler')
|
||||
const _ = require('lodash')
|
||||
const LinkedFilesHandler = require('./LinkedFilesHandler')
|
||||
const {
|
||||
@@ -25,6 +24,7 @@ const {
|
||||
SourceFileNotFoundError,
|
||||
} = require('./LinkedFilesErrors')
|
||||
const { promisify } = require('@overleaf/promise-utils')
|
||||
const HistoryManager = require('../History/HistoryManager')
|
||||
|
||||
module.exports = ProjectFileAgent = {
|
||||
createLinkedFile(
|
||||
@@ -134,17 +134,17 @@ module.exports = ProjectFileAgent = {
|
||||
}
|
||||
) // Created
|
||||
} else if (type === 'file') {
|
||||
return FileStoreHandler.getFileStreamNew(
|
||||
sourceProject,
|
||||
entity,
|
||||
null,
|
||||
function (err, fileStream) {
|
||||
return HistoryManager.requestBlob(
|
||||
sourceProject.overleaf.history.id,
|
||||
entity.hash,
|
||||
'GET',
|
||||
function (err, result) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return LinkedFilesHandler.importFromStream(
|
||||
projectId,
|
||||
fileStream,
|
||||
result.stream,
|
||||
linkedFileData,
|
||||
name,
|
||||
parentFolderId,
|
||||
|
||||
@@ -844,9 +844,6 @@ const _ProjectController = {
|
||||
isInvitedMember
|
||||
),
|
||||
capabilities,
|
||||
projectHistoryBlobsEnabled: Features.hasFeature(
|
||||
'project-history-blobs'
|
||||
),
|
||||
roMirrorOnClientNoLocalStorage:
|
||||
Settings.adminOnlyLogin || project.name.startsWith('Debug: '),
|
||||
languages: Settings.languages,
|
||||
|
||||
@@ -16,7 +16,6 @@ const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter')
|
||||
const DocstoreManager = require('../Docstore/DocstoreManager')
|
||||
const EditorRealTimeController = require('../Editor/EditorRealTimeController')
|
||||
const HistoryManager = require('../History/HistoryManager')
|
||||
const FilestoreHandler = require('../FileStore/FileStoreHandler')
|
||||
const ChatApiHandler = require('../Chat/ChatApiHandler')
|
||||
const { promiseMapWithLimit } = require('@overleaf/promise-utils')
|
||||
const { READ_PREFERENCE_SECONDARY } = require('../../infrastructure/mongodb')
|
||||
@@ -350,7 +349,6 @@ async function expireDeletedProject(projectId) {
|
||||
deletedProject.project._id,
|
||||
historyId
|
||||
),
|
||||
FilestoreHandler.promises.deleteProject(deletedProject.project._id),
|
||||
ChatApiHandler.promises.destroyProject(deletedProject.project._id),
|
||||
ProjectAuditLogEntry.deleteMany({ projectId }),
|
||||
Modules.promises.hooks.fire('projectExpired', deletedProject.project._id),
|
||||
|
||||
@@ -7,7 +7,6 @@ const { Doc } = require('../../models/Doc')
|
||||
const { File } = require('../../models/File')
|
||||
const DocstoreManager = require('../Docstore/DocstoreManager')
|
||||
const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler')
|
||||
const FileStoreHandler = require('../FileStore/FileStoreHandler')
|
||||
const HistoryManager = require('../History/HistoryManager')
|
||||
const ProjectCreationHandler = require('./ProjectCreationHandler')
|
||||
const ProjectDeleter = require('./ProjectDeleter')
|
||||
@@ -20,7 +19,6 @@ const SafePath = require('./SafePath')
|
||||
const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher')
|
||||
const _ = require('lodash')
|
||||
const TagsHandler = require('../Tags/TagsHandler')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
const ClsiCacheManager = require('../Compile/ClsiCacheManager')
|
||||
|
||||
module.exports = {
|
||||
@@ -225,66 +223,29 @@ async function _copyFiles(sourceEntries, sourceProject, targetProject) {
|
||||
async sourceEntry => {
|
||||
const sourceFile = sourceEntry.file
|
||||
const path = sourceEntry.path
|
||||
const file = new File({ name: SafePath.clean(sourceFile.name) })
|
||||
const file = new File({
|
||||
name: SafePath.clean(sourceFile.name),
|
||||
hash: sourceFile.hash,
|
||||
})
|
||||
if (sourceFile.linkedFileData != null) {
|
||||
file.linkedFileData = sourceFile.linkedFileData
|
||||
file.created = sourceFile.created
|
||||
}
|
||||
if (sourceFile.hash != null) {
|
||||
file.hash = sourceFile.hash
|
||||
}
|
||||
let createdBlob = false
|
||||
const usingFilestore = Features.hasFeature('filestore')
|
||||
if (file.hash != null && Features.hasFeature('project-history-blobs')) {
|
||||
try {
|
||||
await HistoryManager.promises.copyBlob(
|
||||
sourceHistoryId,
|
||||
targetHistoryId,
|
||||
file.hash
|
||||
)
|
||||
createdBlob = true
|
||||
if (!usingFilestore) {
|
||||
return { createdBlob, file, path, url: null }
|
||||
}
|
||||
} catch (err) {
|
||||
if (!usingFilestore) {
|
||||
throw OError.tag(err, 'unexpected error copying blob', {
|
||||
sourceProjectId: sourceProject._id,
|
||||
targetProjectId: targetProject._id,
|
||||
sourceFile,
|
||||
sourceHistoryId,
|
||||
})
|
||||
} else {
|
||||
logger.error(
|
||||
{
|
||||
err,
|
||||
sourceProjectId: sourceProject._id,
|
||||
targetProjectId: targetProject._id,
|
||||
sourceFile,
|
||||
sourceHistoryId,
|
||||
},
|
||||
'unexpected error copying blob'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (createdBlob && Features.hasFeature('project-history-blobs')) {
|
||||
return { createdBlob, file, path, url: null }
|
||||
}
|
||||
if (!usingFilestore) {
|
||||
// Note: This is also checked in app.mjs
|
||||
throw new OError(
|
||||
'bad config: need to enable either filestore or project-history-blobs'
|
||||
try {
|
||||
await HistoryManager.promises.copyBlob(
|
||||
sourceHistoryId,
|
||||
targetHistoryId,
|
||||
file.hash
|
||||
)
|
||||
return { createdBlob: true, file, path }
|
||||
} catch (err) {
|
||||
throw OError.tag(err, 'unexpected error copying blob', {
|
||||
sourceProjectId: sourceProject._id,
|
||||
targetProjectId: targetProject._id,
|
||||
sourceFile,
|
||||
sourceHistoryId,
|
||||
})
|
||||
}
|
||||
const url = await FileStoreHandler.promises.copyFile(
|
||||
sourceProject._id,
|
||||
sourceFile._id,
|
||||
targetProject._id,
|
||||
file._id
|
||||
)
|
||||
|
||||
return { createdBlob, file, path, url }
|
||||
}
|
||||
)
|
||||
return targetEntries
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
let ProjectEditorHandler
|
||||
const _ = require('lodash')
|
||||
const Path = require('path')
|
||||
const Features = require('../../infrastructure/Features')
|
||||
|
||||
module.exports = ProjectEditorHandler = {
|
||||
trackChangesAvailable: false,
|
||||
@@ -98,18 +97,12 @@ module.exports = ProjectEditorHandler = {
|
||||
},
|
||||
|
||||
buildFileModelView(file) {
|
||||
const additionalFileProperties = {}
|
||||
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
additionalFileProperties.hash = file.hash
|
||||
}
|
||||
|
||||
return {
|
||||
_id: file._id,
|
||||
name: file.name,
|
||||
linkedFileData: file.linkedFileData,
|
||||
created: file.created,
|
||||
...additionalFileProperties,
|
||||
hash: file.hash,
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@@ -333,7 +333,7 @@ const addFile = wrapWithLock({
|
||||
if (!SafePath.isCleanFilename(fileName)) {
|
||||
throw new Errors.InvalidNameError('invalid element name')
|
||||
}
|
||||
const { url, fileRef, createdBlob } =
|
||||
const { fileRef, createdBlob } =
|
||||
await ProjectEntityUpdateHandler._uploadFile(
|
||||
projectId,
|
||||
folderId,
|
||||
@@ -347,7 +347,6 @@ const addFile = wrapWithLock({
|
||||
folderId,
|
||||
userId,
|
||||
fileRef,
|
||||
fileStoreUrl: url,
|
||||
createdBlob,
|
||||
source,
|
||||
}
|
||||
@@ -357,7 +356,6 @@ const addFile = wrapWithLock({
|
||||
folderId,
|
||||
userId,
|
||||
fileRef,
|
||||
fileStoreUrl,
|
||||
createdBlob,
|
||||
source,
|
||||
}) {
|
||||
@@ -374,7 +372,6 @@ const addFile = wrapWithLock({
|
||||
createdBlob,
|
||||
file: fileRef,
|
||||
path: result && result.path && result.path.fileSystem,
|
||||
url: fileStoreUrl,
|
||||
},
|
||||
]
|
||||
await DocumentUpdaterHandler.promises.updateProjectStructure(
|
||||
@@ -548,7 +545,7 @@ const upsertFile = wrapWithLock({
|
||||
name: fileName,
|
||||
linkedFileData,
|
||||
}
|
||||
const { url, fileRef, createdBlob } =
|
||||
const { fileRef, createdBlob } =
|
||||
await FileStoreHandler.promises.uploadFileFromDisk(
|
||||
projectId,
|
||||
fileArgs,
|
||||
@@ -563,7 +560,6 @@ const upsertFile = wrapWithLock({
|
||||
linkedFileData,
|
||||
userId,
|
||||
fileRef,
|
||||
fileStoreUrl: url,
|
||||
createdBlob,
|
||||
source,
|
||||
}
|
||||
@@ -574,7 +570,6 @@ const upsertFile = wrapWithLock({
|
||||
fileName,
|
||||
userId,
|
||||
fileRef,
|
||||
fileStoreUrl,
|
||||
createdBlob,
|
||||
source,
|
||||
}) {
|
||||
@@ -639,7 +634,6 @@ const upsertFile = wrapWithLock({
|
||||
createdBlob,
|
||||
file: fileRef,
|
||||
path: path.fileSystem,
|
||||
url: fileStoreUrl,
|
||||
},
|
||||
],
|
||||
newProject: project,
|
||||
@@ -659,7 +653,6 @@ const upsertFile = wrapWithLock({
|
||||
existingFile._id,
|
||||
userId,
|
||||
fileRef,
|
||||
fileStoreUrl,
|
||||
folderId,
|
||||
source,
|
||||
createdBlob
|
||||
@@ -673,7 +666,6 @@ const upsertFile = wrapWithLock({
|
||||
folderId,
|
||||
userId,
|
||||
fileRef,
|
||||
fileStoreUrl,
|
||||
createdBlob,
|
||||
source,
|
||||
})
|
||||
@@ -733,15 +725,12 @@ const upsertFileWithPath = wrapWithLock({
|
||||
name: fileName,
|
||||
linkedFileData,
|
||||
}
|
||||
const {
|
||||
url: fileStoreUrl,
|
||||
fileRef,
|
||||
createdBlob,
|
||||
} = await FileStoreHandler.promises.uploadFileFromDisk(
|
||||
projectId,
|
||||
fileArgs,
|
||||
fsPath
|
||||
)
|
||||
const { fileRef, createdBlob } =
|
||||
await FileStoreHandler.promises.uploadFileFromDisk(
|
||||
projectId,
|
||||
fileArgs,
|
||||
fsPath
|
||||
)
|
||||
|
||||
return {
|
||||
projectId,
|
||||
@@ -751,7 +740,6 @@ const upsertFileWithPath = wrapWithLock({
|
||||
linkedFileData,
|
||||
userId,
|
||||
fileRef,
|
||||
fileStoreUrl,
|
||||
createdBlob,
|
||||
source,
|
||||
}
|
||||
@@ -764,7 +752,6 @@ const upsertFileWithPath = wrapWithLock({
|
||||
linkedFileData,
|
||||
userId,
|
||||
fileRef,
|
||||
fileStoreUrl,
|
||||
createdBlob,
|
||||
source,
|
||||
}) {
|
||||
@@ -787,7 +774,6 @@ const upsertFileWithPath = wrapWithLock({
|
||||
linkedFileData,
|
||||
userId,
|
||||
fileRef,
|
||||
fileStoreUrl,
|
||||
createdBlob,
|
||||
source,
|
||||
})
|
||||
@@ -1084,15 +1070,12 @@ const convertDocToFile = wrapWithLock({
|
||||
}
|
||||
await DocumentUpdaterHandler.promises.deleteDoc(projectId, docId, false)
|
||||
const fsPath = await FileWriter.promises.writeLinesToDisk(projectId, lines)
|
||||
const {
|
||||
url: fileStoreUrl,
|
||||
fileRef,
|
||||
createdBlob,
|
||||
} = await FileStoreHandler.promises.uploadFileFromDisk(
|
||||
projectId,
|
||||
{ name: doc.name, rev: rev + 1 },
|
||||
fsPath
|
||||
)
|
||||
const { fileRef, createdBlob } =
|
||||
await FileStoreHandler.promises.uploadFileFromDisk(
|
||||
projectId,
|
||||
{ name: doc.name, rev: rev + 1 },
|
||||
fsPath
|
||||
)
|
||||
try {
|
||||
await fs.promises.unlink(fsPath)
|
||||
} catch (err) {
|
||||
@@ -1103,7 +1086,6 @@ const convertDocToFile = wrapWithLock({
|
||||
doc,
|
||||
path: docPath,
|
||||
fileRef,
|
||||
fileStoreUrl,
|
||||
userId,
|
||||
source,
|
||||
createdBlob,
|
||||
@@ -1114,7 +1096,6 @@ const convertDocToFile = wrapWithLock({
|
||||
doc,
|
||||
path,
|
||||
fileRef,
|
||||
fileStoreUrl,
|
||||
userId,
|
||||
source,
|
||||
createdBlob,
|
||||
@@ -1133,7 +1114,7 @@ const convertDocToFile = wrapWithLock({
|
||||
userId,
|
||||
{
|
||||
oldDocs: [{ doc, path }],
|
||||
newFiles: [{ file: fileRef, path, url: fileStoreUrl, createdBlob }],
|
||||
newFiles: [{ file: fileRef, path, createdBlob }],
|
||||
newProject: project,
|
||||
},
|
||||
source
|
||||
@@ -1380,7 +1361,6 @@ const ProjectEntityUpdateHandler = {
|
||||
fileId,
|
||||
userId,
|
||||
newFileRef,
|
||||
fileStoreUrl,
|
||||
folderId,
|
||||
source,
|
||||
createdBlob
|
||||
@@ -1409,7 +1389,6 @@ const ProjectEntityUpdateHandler = {
|
||||
file: updatedFileRef,
|
||||
createdBlob,
|
||||
path: path.fileSystem,
|
||||
url: fileStoreUrl,
|
||||
},
|
||||
]
|
||||
const projectHistoryId = project.overleaf?.history?.id
|
||||
|
||||
@@ -24,7 +24,6 @@ import _ from 'lodash'
|
||||
import Async from 'async'
|
||||
import Errors from '../Errors/Errors.js'
|
||||
import { promisify } from '@overleaf/promise-utils'
|
||||
import HistoryURLHelper from '../History/HistoryURLHelper.js'
|
||||
|
||||
let ReferencesHandler
|
||||
|
||||
@@ -167,21 +166,15 @@ export default ReferencesHandler = {
|
||||
const bibDocUrls = docIds.map(docId =>
|
||||
ReferencesHandler._buildDocUrl(projectId, docId)
|
||||
)
|
||||
const bibFileUrls = fileRefs.map(fileRef =>
|
||||
HistoryURLHelper.projectHistoryURLWithFilestoreFallback(
|
||||
settings,
|
||||
projectId,
|
||||
historyId,
|
||||
fileRef,
|
||||
'bibFileUrls'
|
||||
)
|
||||
)
|
||||
const bibFileUrls = fileRefs.map(fileRef => ({
|
||||
url: `${settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`,
|
||||
}))
|
||||
const sourceURLs = bibDocUrls.concat(bibFileUrls)
|
||||
return request.post(
|
||||
{
|
||||
url: `${settings.apis.references.url}/project/${projectId}/index`,
|
||||
json: {
|
||||
docUrls: sourceURLs.map(item => item.fallbackURL || item.url),
|
||||
docUrls: sourceURLs.map(item => item.url),
|
||||
sourceURLs,
|
||||
fullIndex: isFullIndex,
|
||||
},
|
||||
|
||||
@@ -6,7 +6,6 @@ const metrics = require('@overleaf/metrics')
|
||||
const Path = require('path')
|
||||
const { fetchNothing } = require('@overleaf/fetch-utils')
|
||||
const settings = require('@overleaf/settings')
|
||||
const HistoryURLHelper = require('../History/HistoryURLHelper')
|
||||
|
||||
const CollaboratorsGetter =
|
||||
require('../Collaborators/CollaboratorsGetter').promises
|
||||
@@ -81,24 +80,14 @@ async function addFile(params) {
|
||||
rev,
|
||||
folderId,
|
||||
} = params
|
||||
// Go through project-history to avoid the need for handling history-v1 authentication.
|
||||
const { url, fallbackURL } =
|
||||
HistoryURLHelper.projectHistoryURLWithFilestoreFallback(
|
||||
settings,
|
||||
projectId,
|
||||
historyId,
|
||||
{ _id: fileId, hash },
|
||||
'tpdsAddFile'
|
||||
)
|
||||
|
||||
await addEntity({
|
||||
projectId,
|
||||
path,
|
||||
projectName,
|
||||
rev,
|
||||
folderId,
|
||||
streamOrigin: url,
|
||||
streamFallback: fallbackURL,
|
||||
// Go through project-history to avoid the need for handling history-v1 authentication.
|
||||
streamOrigin: `${settings.apis.project_history.url}/project/${historyId}/blob/${hash}`,
|
||||
entityId: fileId,
|
||||
entityType: 'file',
|
||||
})
|
||||
|
||||
@@ -194,14 +194,14 @@ async function _createFile(project, projectPath, fsPath) {
|
||||
throw new OError('missing history id')
|
||||
}
|
||||
const fileName = Path.basename(projectPath)
|
||||
const { createdBlob, fileRef, url } =
|
||||
const { createdBlob, fileRef } =
|
||||
await FileStoreHandler.promises.uploadFileFromDiskWithHistoryId(
|
||||
projectId,
|
||||
historyId,
|
||||
{ name: fileName },
|
||||
fsPath
|
||||
)
|
||||
return { createdBlob, file: fileRef, path: projectPath, url }
|
||||
return { createdBlob, file: fileRef, path: projectPath }
|
||||
}
|
||||
|
||||
async function _notifyDocumentUpdater(project, userId, changes) {
|
||||
|
||||
@@ -19,7 +19,6 @@ const trackChangesModuleAvailable =
|
||||
* @property {boolean | undefined} enableGithubSync
|
||||
* @property {boolean | undefined} enableGitBridge
|
||||
* @property {boolean | undefined} enableHomepage
|
||||
* @property {number} filestoreMigrationLevel
|
||||
* @property {boolean | undefined} enableSaml
|
||||
* @property {boolean | undefined} ldap
|
||||
* @property {boolean | undefined} oauth
|
||||
@@ -29,14 +28,6 @@ const trackChangesModuleAvailable =
|
||||
*/
|
||||
|
||||
const Features = {
|
||||
validateSettings() {
|
||||
if (![0, 1, 2].includes(Settings.filestoreMigrationLevel)) {
|
||||
throw new Error(
|
||||
`invalid OVERLEAF_FILESTORE_MIGRATION_LEVEL=${Settings.filestoreMigrationLevel}, expected 0, 1 or 2`
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
@@ -97,10 +88,6 @@ const Features = {
|
||||
_.get(Settings, ['apis', 'linkedUrlProxy', 'url']) &&
|
||||
Settings.enabledLinkedFileTypes.includes('url')
|
||||
)
|
||||
case 'project-history-blobs':
|
||||
return Settings.filestoreMigrationLevel > 0
|
||||
case 'filestore':
|
||||
return Settings.filestoreMigrationLevel < 2
|
||||
case 'support':
|
||||
return supportModuleAvailable
|
||||
case 'symbol-palette':
|
||||
|
||||
@@ -13,7 +13,6 @@ meta(name="ol-maxDocLength" data-type="number" content=maxDocLength)
|
||||
meta(name="ol-maxReconnectGracefullyIntervalMs" data-type="number" content=maxReconnectGracefullyIntervalMs)
|
||||
meta(name="ol-wikiEnabled" data-type="boolean" content=settings.proxyLearn)
|
||||
meta(name="ol-capabilities" data-type="json" content=capabilities)
|
||||
meta(name="ol-projectHistoryBlobsEnabled" data-type="boolean" content=projectHistoryBlobsEnabled)
|
||||
meta(name="ol-gitBridgePublicBaseUrl" content=gitBridgePublicBaseUrl)
|
||||
meta(name="ol-gitBridgeEnabled" data-type="boolean" content=gitBridgeEnabled)
|
||||
meta(name="ol-compilesUserContentDomain" content=settings.compilesUserContentDomain)
|
||||
|
||||
@@ -441,9 +441,6 @@ module.exports = {
|
||||
','
|
||||
),
|
||||
|
||||
filestoreMigrationLevel:
|
||||
parseInt(process.env.OVERLEAF_FILESTORE_MIGRATION_LEVEL, 10) || 0,
|
||||
|
||||
// i18n
|
||||
// ------
|
||||
//
|
||||
|
||||
@@ -34,7 +34,6 @@ import {
|
||||
import { Folder } from '../../../../../types/folder'
|
||||
import { useReferencesContext } from '@/features/ide-react/context/references-context'
|
||||
import { usePermissionsContext } from '@/features/ide-react/context/permissions-context'
|
||||
import { fileUrl } from '@/features/utils/fileUrl'
|
||||
import { FileTreeEntity } from '@ol-types/file-tree-entity'
|
||||
|
||||
type DroppedFile = File & {
|
||||
@@ -500,7 +499,7 @@ export const FileTreeActionableProvider: FC<React.PropsWithChildren> = ({
|
||||
const selectedEntity = findInTree(fileTreeData, selectedEntityId)
|
||||
|
||||
if (selectedEntity?.type === 'fileRef') {
|
||||
return fileUrl(projectId, selectedEntityId, selectedEntity.entity.hash)
|
||||
return `/project/${projectId}/blob/${selectedEntity.entity.hash}`
|
||||
}
|
||||
|
||||
if (selectedEntity?.type === 'doc') {
|
||||
|
||||
@@ -3,7 +3,6 @@ import { FileTreeEntity } from '../../../../../types/file-tree-entity'
|
||||
import { Doc } from '../../../../../types/doc'
|
||||
import { FileRef } from '../../../../../types/file-ref'
|
||||
import { PreviewPath } from '../../../../../types/preview-path'
|
||||
import { fileUrl } from '../../utils/fileUrl'
|
||||
|
||||
type DocFindResult = {
|
||||
entity: Doc
|
||||
@@ -123,9 +122,9 @@ export function previewByPath(
|
||||
const result = findEntityByPath(folder, path + suffix)
|
||||
|
||||
if (result?.type === 'fileRef') {
|
||||
const { name, _id: id, hash } = result.entity
|
||||
const { name, hash } = result.entity
|
||||
return {
|
||||
url: fileUrl(projectId, id, hash),
|
||||
url: `/project/${projectId}/blob/${hash}`,
|
||||
extension: name.slice(name.lastIndexOf('.') + 1),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ import { useState, type ElementType } from 'react'
|
||||
import { Trans, useTranslation } from 'react-i18next'
|
||||
|
||||
import { formatTime, relativeDate } from '../../utils/format-date'
|
||||
import { fileUrl } from '../../utils/fileUrl'
|
||||
import { useFileTreeData } from '@/shared/context/file-tree-data-context'
|
||||
import { useProjectContext } from '@/shared/context/project-context'
|
||||
|
||||
@@ -83,7 +82,7 @@ export default function FileViewHeader({ file }: FileViewHeaderProps) {
|
||||
<OLButton
|
||||
variant="secondary"
|
||||
download={file.name}
|
||||
href={fileUrl(projectId, file.id, file.hash)}
|
||||
href={`/project/${projectId}/blob/${file.hash}`}
|
||||
>
|
||||
<MaterialIcon type="download" className="align-middle" />{' '}
|
||||
<span>{t('download')}</span>
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { useProjectContext } from '../../../shared/context/project-context'
|
||||
import { BinaryFile } from '@/features/file-view/types/binary-file'
|
||||
import { fileUrl } from '../../utils/fileUrl'
|
||||
|
||||
export default function FileViewImage({
|
||||
file,
|
||||
@@ -14,7 +13,7 @@ export default function FileViewImage({
|
||||
const { projectId } = useProjectContext()
|
||||
return (
|
||||
<img
|
||||
src={fileUrl(projectId, file.id, file.hash)}
|
||||
src={`/project/${projectId}/blob/${file.hash}`}
|
||||
onLoad={onLoad}
|
||||
onError={onError}
|
||||
alt={file.name}
|
||||
|
||||
@@ -3,7 +3,6 @@ import { useProjectContext } from '../../../shared/context/project-context'
|
||||
import { debugConsole } from '@/utils/debugging'
|
||||
import useAbortController from '../../../shared/hooks/use-abort-controller'
|
||||
import { BinaryFile } from '@/features/file-view/types/binary-file'
|
||||
import { fileUrl } from '../../utils/fileUrl'
|
||||
|
||||
const MAX_FILE_SIZE = 2 * 1024 * 1024
|
||||
|
||||
@@ -29,7 +28,7 @@ export default function FileViewText({
|
||||
if (inFlight) {
|
||||
return
|
||||
}
|
||||
const path = fileUrl(projectId, file.id, file.hash)
|
||||
const path = `/project/${projectId}/blob/${file.hash}`
|
||||
const fetchContentLengthTimeout = setTimeout(
|
||||
() => fetchContentLengthController.abort(),
|
||||
10000
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
// Helper function to compute the url for a file in history-v1 or filestore.
|
||||
// This will be obsolete when the migration to history-v1 is complete.
|
||||
|
||||
import getMeta from '@/utils/meta'
|
||||
|
||||
const projectHistoryBlobsEnabled = getMeta('ol-projectHistoryBlobsEnabled')
|
||||
|
||||
export function fileUrl(projectId, id, hash) {
|
||||
if (projectHistoryBlobsEnabled && hash) {
|
||||
return `/project/${projectId}/blob/${hash}?fallback=${id}`
|
||||
} else {
|
||||
return `/project/${projectId}/file/${id}`
|
||||
}
|
||||
}
|
||||
@@ -210,7 +210,6 @@ export interface Meta {
|
||||
'ol-primaryEmail': { email: string; confirmed: boolean }
|
||||
'ol-project': any // TODO
|
||||
'ol-projectEntityCounts': { files: number; docs: number }
|
||||
'ol-projectHistoryBlobsEnabled': boolean
|
||||
'ol-projectName': string
|
||||
'ol-projectOwnerHasPremiumOnPageLoad': boolean
|
||||
'ol-projectSyncSuccessMessage': string
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import '../../../../../test/acceptance/src/helpers/InitApp.mjs'
|
||||
import MockDocstoreApi from '../../../../../test/acceptance/src/mocks/MockDocstoreApi.mjs'
|
||||
import MockDocUpdaterApi from '../../../../../test/acceptance/src/mocks/MockDocUpdaterApi.mjs'
|
||||
import MockFilestoreApi from '../../../../../test/acceptance/src/mocks/MockFilestoreApi.mjs'
|
||||
import MockNotificationsApi from '../../../../../test/acceptance/src/mocks/MockNotificationsApi.mjs'
|
||||
import MockProjectHistoryApi from '../../../../../test/acceptance/src/mocks/MockProjectHistoryApi.mjs'
|
||||
import MockSpellingApi from '../../../../../test/acceptance/src/mocks/MockSpellingApi.mjs'
|
||||
@@ -14,7 +13,6 @@ const mockOpts = {
|
||||
|
||||
MockDocstoreApi.initialize(23016, mockOpts)
|
||||
MockDocUpdaterApi.initialize(23003, mockOpts)
|
||||
MockFilestoreApi.initialize(23009, mockOpts)
|
||||
MockNotificationsApi.initialize(23042, mockOpts)
|
||||
MockProjectHistoryApi.initialize(23054, mockOpts)
|
||||
MockSpellingApi.initialize(23005, mockOpts)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,18 +7,15 @@ import Path from 'node:path'
|
||||
import User from '../../../../../test/acceptance/src/helpers/User.mjs'
|
||||
import MockProjectHistoryApiClass from '../../../../../test/acceptance/src/mocks/MockProjectHistoryApi.mjs'
|
||||
import MockDocstoreApiClass from '../../../../../test/acceptance/src/mocks/MockDocstoreApi.mjs'
|
||||
import MockFilestoreApiClass from '../../../../../test/acceptance/src/mocks/MockFilestoreApi.mjs'
|
||||
import MockV1HistoryApiClass from '../../../../../test/acceptance/src/mocks/MockV1HistoryApi.mjs'
|
||||
import Features from '../../../../../app/src/infrastructure/Features.js'
|
||||
|
||||
let MockProjectHistoryApi, MockDocstoreApi, MockFilestoreApi, MockV1HistoryApi
|
||||
let MockProjectHistoryApi, MockDocstoreApi, MockV1HistoryApi
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url))
|
||||
|
||||
before(function () {
|
||||
MockProjectHistoryApi = MockProjectHistoryApiClass.instance()
|
||||
MockDocstoreApi = MockDocstoreApiClass.instance()
|
||||
MockFilestoreApi = MockFilestoreApiClass.instance()
|
||||
MockV1HistoryApi = MockV1HistoryApiClass.instance()
|
||||
})
|
||||
|
||||
@@ -121,41 +118,23 @@ describe('RestoringFiles', function () {
|
||||
)
|
||||
})
|
||||
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
it('should have created a file in history-v1', function (done) {
|
||||
this.owner.getProject(this.project_id, (error, project) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
let file = _.find(
|
||||
project.rootFolder[0].fileRefs,
|
||||
file => file.name === 'image.png'
|
||||
)
|
||||
file =
|
||||
MockV1HistoryApi.blobs[project.overleaf.history.id.toString()][
|
||||
file.hash
|
||||
]
|
||||
expect(file).to.deep.equal(Buffer.from(this.pngData))
|
||||
done()
|
||||
})
|
||||
it('should have created a file in history-v1', function (done) {
|
||||
this.owner.getProject(this.project_id, (error, project) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
let file = _.find(
|
||||
project.rootFolder[0].fileRefs,
|
||||
file => file.name === 'image.png'
|
||||
)
|
||||
file =
|
||||
MockV1HistoryApi.blobs[project.overleaf.history.id.toString()][
|
||||
file.hash
|
||||
]
|
||||
expect(file).to.deep.equal(Buffer.from(this.pngData))
|
||||
done()
|
||||
})
|
||||
}
|
||||
if (Features.hasFeature('filestore')) {
|
||||
it('should have created a file in filestore', function (done) {
|
||||
this.owner.getProject(this.project_id, (error, project) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
let file = _.find(
|
||||
project.rootFolder[0].fileRefs,
|
||||
file => file.name === 'image.png'
|
||||
)
|
||||
file = MockFilestoreApi.getFile(this.project_id, file._id)
|
||||
expect(file).to.deep.equal(this.pngData)
|
||||
done()
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('restoring to a directory that exists', function () {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
const Path = require('path')
|
||||
const DocstoreManager = require('../app/src/Features/Docstore/DocstoreManager')
|
||||
const DocumentUpdaterHandler = require('../app/src/Features/DocumentUpdater/DocumentUpdaterHandler')
|
||||
const FileStoreHandler = require('../app/src/Features/FileStore/FileStoreHandler')
|
||||
const ProjectGetter = require('../app/src/Features/Project/ProjectGetter')
|
||||
const ProjectEntityMongoUpdateHandler = require('../app/src/Features/Project/ProjectEntityMongoUpdateHandler')
|
||||
const { waitForDb, db, ObjectId } = require('../app/src/infrastructure/mongodb')
|
||||
const HistoryManager = require('../app/src/Features/History/HistoryManager')
|
||||
const logger = require('@overleaf/logger').logger
|
||||
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
@@ -211,10 +211,12 @@ async function checkProject(projectId) {
|
||||
}
|
||||
for (const { file, path } of fileEntries) {
|
||||
try {
|
||||
const fileSize = await FileStoreHandler.promises.getFileSize(
|
||||
projectId,
|
||||
file._id
|
||||
)
|
||||
const { contentLength: fileSize } =
|
||||
await HistoryManager.promises.requestBlobWithProjectId(
|
||||
projectId,
|
||||
file.hash,
|
||||
'HEAD'
|
||||
)
|
||||
if (pathCounts.get(path) > 1) {
|
||||
logFile(projectId, path, { ...file, fileSize }, 'duplicate path')
|
||||
errors++
|
||||
|
||||
@@ -3,7 +3,7 @@ import { ObjectId, db } from '../app/src/infrastructure/mongodb.js'
|
||||
import ProjectEntityHandler from '../app/src/Features/Project/ProjectEntityHandler.js'
|
||||
import ProjectGetter from '../app/src/Features/Project/ProjectGetter.js'
|
||||
import Errors from '../app/src/Features/Errors/Errors.js'
|
||||
import FileStoreHandler from '../app/src/Features/FileStore/FileStoreHandler.js'
|
||||
import HistoryManager from '../app/src/Features/History/HistoryManager.js'
|
||||
|
||||
// Handles a list of project IDs from stdin, one per line, and outputs the count of files and docs
|
||||
// in the project, along with the aggregated size in bytes for all files and docs.
|
||||
@@ -63,24 +63,16 @@ async function countFilesSize(files, projectId) {
|
||||
return 0
|
||||
}
|
||||
|
||||
const ids = files.map(fileObject => fileObject.file._id)
|
||||
|
||||
let totalFileSize = 0
|
||||
|
||||
for (const fileId of ids) {
|
||||
const contentLength = await FileStoreHandler.promises.getFileSize(
|
||||
projectId,
|
||||
fileId
|
||||
)
|
||||
const size = parseInt(contentLength, 10)
|
||||
|
||||
if (isNaN(size)) {
|
||||
throw new Error(
|
||||
`Unable to fetch file size for fileId=${fileId} and projectId=${projectId}`
|
||||
for (const { file } of files) {
|
||||
const { contentLength } =
|
||||
await HistoryManager.promises.requestBlobWithProjectId(
|
||||
projectId,
|
||||
file.hash,
|
||||
'HEAD'
|
||||
)
|
||||
}
|
||||
|
||||
totalFileSize += size
|
||||
totalFileSize += contentLength
|
||||
}
|
||||
|
||||
return totalFileSize
|
||||
|
||||
@@ -7,10 +7,10 @@ import minimist from 'minimist'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import { db } from '../app/src/infrastructure/mongodb.js'
|
||||
import Errors from '../app/src/Features/Errors/Errors.js'
|
||||
import FileStoreHandler from '../app/src/Features/FileStore/FileStoreHandler.js'
|
||||
import ProjectEntityMongoUpdateHandler from '../app/src/Features/Project/ProjectEntityMongoUpdateHandler.js'
|
||||
import { iterablePaths } from '../app/src/Features/Project/IterablePath.js'
|
||||
import { scriptRunner } from './lib/ScriptRunner.mjs'
|
||||
import HistoryManager from '../app/src/Features/History/HistoryManager.js'
|
||||
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
@@ -57,22 +57,22 @@ async function getProjects() {
|
||||
|
||||
async function processProject(project) {
|
||||
console.log(`Processing project ${project._id}`)
|
||||
const { docIds, fileIds } = findRefsInFolder(project.rootFolder[0])
|
||||
const { docIds, fileRefs } = findRefsInFolder(project.rootFolder[0])
|
||||
for (const docId of docIds) {
|
||||
if (!(await docExists(docId))) {
|
||||
await deleteDoc(project._id, docId)
|
||||
}
|
||||
}
|
||||
for (const fileId of fileIds) {
|
||||
if (!(await fileExists(project._id, fileId))) {
|
||||
await deleteFile(project._id, fileId)
|
||||
for (const fileRef of fileRefs) {
|
||||
if (!(await fileExists(project._id, fileRef.hash))) {
|
||||
await deleteFile(project._id, fileRef._id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function findRefsInFolder(folder) {
|
||||
let docIds = folder.docs.map(doc => doc._id)
|
||||
let fileIds = folder.fileRefs.map(file => file._id)
|
||||
let fileIds = folder.fileRefs.slice()
|
||||
for (const subfolder of iterablePaths(folder, 'folders')) {
|
||||
const subrefs = findRefsInFolder(subfolder)
|
||||
docIds = docIds.concat(subrefs.docIds)
|
||||
@@ -86,10 +86,14 @@ async function docExists(docId) {
|
||||
return doc != null
|
||||
}
|
||||
|
||||
async function fileExists(projectId, fileId) {
|
||||
async function fileExists(projectId, hash) {
|
||||
try {
|
||||
// Getting the file size to avoid downloading the whole file
|
||||
await FileStoreHandler.promises.getFileSize(projectId, fileId)
|
||||
await HistoryManager.promises.requestBlobWithProjectId(
|
||||
projectId,
|
||||
hash,
|
||||
'HEAD'
|
||||
)
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
return false
|
||||
|
||||
@@ -9,22 +9,16 @@ import settings from '@overleaf/settings'
|
||||
import { db, ObjectId } from '../../../app/src/infrastructure/mongodb.js'
|
||||
import Features from '../../../app/src/infrastructure/Features.js'
|
||||
import MockDocstoreApiClass from './mocks/MockDocstoreApi.mjs'
|
||||
import MockFilestoreApiClass from './mocks/MockFilestoreApi.mjs'
|
||||
import MockChatApiClass from './mocks/MockChatApi.mjs'
|
||||
import MockGitBridgeApiClass from './mocks/MockGitBridgeApi.mjs'
|
||||
import MockHistoryBackupDeletionApiClass from './mocks/MockHistoryBackupDeletionApi.mjs'
|
||||
|
||||
let MockDocstoreApi,
|
||||
MockFilestoreApi,
|
||||
MockChatApi,
|
||||
MockGitBridgeApi,
|
||||
MockHistoryBackupDeletionApi
|
||||
let MockDocstoreApi, MockChatApi, MockGitBridgeApi, MockHistoryBackupDeletionApi
|
||||
|
||||
let spy
|
||||
|
||||
before(function () {
|
||||
MockDocstoreApi = MockDocstoreApiClass.instance()
|
||||
MockFilestoreApi = MockFilestoreApiClass.instance()
|
||||
MockChatApi = MockChatApiClass.instance()
|
||||
MockGitBridgeApi = MockGitBridgeApiClass.instance()
|
||||
MockHistoryBackupDeletionApi = MockHistoryBackupDeletionApiClass.instance()
|
||||
@@ -284,9 +278,6 @@ describe('Deleting a project', function () {
|
||||
done()
|
||||
}
|
||||
)
|
||||
MockFilestoreApi.files[this.projectId.toString()] = {
|
||||
dummyFile: 'wombat',
|
||||
}
|
||||
MockChatApi.projects[this.projectId.toString()] = ['message']
|
||||
if (Features.hasFeature('git-bridge')) {
|
||||
MockGitBridgeApi.projects[this.projectId.toString()] = {
|
||||
@@ -352,33 +343,6 @@ describe('Deleting a project', function () {
|
||||
)
|
||||
})
|
||||
|
||||
it('Should destroy the files if filestore is in use', function (done) {
|
||||
expect(MockFilestoreApi.files[this.projectId.toString()]).to.exist
|
||||
|
||||
request.post(
|
||||
`/internal/project/${this.projectId}/expire-deleted-project`,
|
||||
{
|
||||
auth: {
|
||||
user: settings.apis.web.user,
|
||||
pass: settings.apis.web.pass,
|
||||
sendImmediately: true,
|
||||
},
|
||||
},
|
||||
(error, res) => {
|
||||
expect(error).not.to.exist
|
||||
expect(res.statusCode).to.equal(200)
|
||||
if (Features.hasFeature('filestore')) {
|
||||
expect(MockFilestoreApi.files[this.projectId.toString()]).not.to
|
||||
.exist
|
||||
} else {
|
||||
// don't touch files in filestore if it's not in use
|
||||
expect(MockFilestoreApi.files[this.projectId.toString()]).to.exist
|
||||
}
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('Should destroy the chat', function (done) {
|
||||
expect(MockChatApi.projects[this.projectId.toString()]).to.exist
|
||||
|
||||
|
||||
@@ -4,19 +4,16 @@ import { expect } from 'chai'
|
||||
import UserHelper from './helpers/User.mjs'
|
||||
import MockV1HistoryApiClass from './mocks/MockV1HistoryApi.mjs'
|
||||
import ProjectGetter from '../../../app/src/Features/Project/ProjectGetter.js'
|
||||
import MockFilestoreApiClass from './mocks/MockFilestoreApi.mjs'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import sinon from 'sinon'
|
||||
import logger from '@overleaf/logger'
|
||||
import Metrics from './helpers/metrics.mjs'
|
||||
import Features from '../../../app/src/infrastructure/Features.js'
|
||||
const User = UserHelper.promises
|
||||
|
||||
let MockV1HistoryApi, MockFilestoreApi
|
||||
let MockV1HistoryApi
|
||||
|
||||
before(function () {
|
||||
MockV1HistoryApi = MockV1HistoryApiClass.instance()
|
||||
MockFilestoreApi = MockFilestoreApiClass.instance()
|
||||
})
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url))
|
||||
@@ -26,8 +23,8 @@ const fileContent = fs.readFileSync(
|
||||
)
|
||||
|
||||
describe('HistoryTests', function () {
|
||||
let user, projectId, fileId, fileHash, fileURL, blobURL, blobURLWithFallback
|
||||
let historySource, filestoreSource
|
||||
let user, projectId, fileId, fileHash, fileURL, blobURL
|
||||
let historySource
|
||||
|
||||
async function getSourceMetric(source) {
|
||||
return await Metrics.promises.getMetric(
|
||||
@@ -50,22 +47,14 @@ describe('HistoryTests', function () {
|
||||
))
|
||||
fileURL = `/project/${projectId}/file/${fileId}`
|
||||
blobURL = `/project/${projectId}/blob/${fileHash}`
|
||||
blobURLWithFallback = `${blobURL}?fallback=${fileId}`
|
||||
historySource = await getSourceMetric('history-v1')
|
||||
filestoreSource = await getSourceMetric('filestore')
|
||||
})
|
||||
|
||||
async function expectHistoryV1Hit() {
|
||||
expect(await getSourceMetric('history-v1')).to.equal(historySource + 1)
|
||||
expect(await getSourceMetric('filestore')).to.equal(filestoreSource)
|
||||
}
|
||||
async function expectFilestoreHit() {
|
||||
expect(await getSourceMetric('history-v1')).to.equal(historySource)
|
||||
expect(await getSourceMetric('filestore')).to.equal(filestoreSource + 1)
|
||||
}
|
||||
async function expectNoIncrement() {
|
||||
expect(await getSourceMetric('history-v1')).to.equal(historySource)
|
||||
expect(await getSourceMetric('filestore')).to.equal(filestoreSource)
|
||||
}
|
||||
|
||||
describe('/project/:projectId/download/zip', function () {
|
||||
@@ -77,97 +66,62 @@ describe('HistoryTests', function () {
|
||||
afterEach(function () {
|
||||
spy.restore()
|
||||
})
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
it('should work from history-v1', async function () {
|
||||
const { response, body } = await user.doRequest('GET', downloadZIPURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(body).to.include('2pixel.png')
|
||||
await expectHistoryV1Hit()
|
||||
})
|
||||
if (Features.hasFeature('filestore')) {
|
||||
it('should work from filestore', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
const { response, body } = await user.doRequest('GET', downloadZIPURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(body).to.include('2pixel.png')
|
||||
await expectFilestoreHit()
|
||||
})
|
||||
}
|
||||
it('should not include when missing in both places', async function () {
|
||||
MockFilestoreApi.reset()
|
||||
MockV1HistoryApi.reset()
|
||||
const { response, body } = await user.doRequest('GET', downloadZIPURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(
|
||||
spy.args.find(([, msg]) => msg === 'error adding files to zip stream')
|
||||
).to.exist
|
||||
expect(body).to.not.include('2pixel.png')
|
||||
await expectNoIncrement()
|
||||
})
|
||||
}
|
||||
it('should work from history-v1', async function () {
|
||||
const { response, body } = await user.doRequest('GET', downloadZIPURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(body).to.include('2pixel.png')
|
||||
await expectHistoryV1Hit()
|
||||
})
|
||||
it('should not include when missing', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
const { response, body } = await user.doRequest('GET', downloadZIPURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(
|
||||
spy.args.find(([, msg]) => msg === 'error adding files to zip stream')
|
||||
).to.exist
|
||||
expect(body).to.not.include('2pixel.png')
|
||||
await expectNoIncrement()
|
||||
})
|
||||
})
|
||||
|
||||
describe('/project/:projectId/blob/:hash', function () {
|
||||
describe('HEAD', function () {
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
it('should fetch the file size from history-v1', async function () {
|
||||
const { response } = await user.doRequest('HEAD', blobURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['x-served-by']).to.include('history-v1')
|
||||
expect(response.headers['content-length']).to.equal('3694')
|
||||
await expectHistoryV1Hit()
|
||||
})
|
||||
}
|
||||
it('should fetch the file size from history-v1', async function () {
|
||||
const { response } = await user.doRequest('HEAD', blobURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['content-length']).to.equal('3694')
|
||||
await expectHistoryV1Hit()
|
||||
})
|
||||
it('should return 404 without fallback', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
const { response } = await user.doRequest('HEAD', blobURL)
|
||||
expect(response.statusCode).to.equal(404)
|
||||
await expectNoIncrement()
|
||||
})
|
||||
if (Features.hasFeature('filestore')) {
|
||||
it('should fetch the file size from filestore when missing in history-v1', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
const { response } = await user.doRequest('HEAD', blobURLWithFallback)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['x-served-by']).to.include('filestore')
|
||||
expect(response.headers['content-length']).to.equal('3694')
|
||||
await expectFilestoreHit()
|
||||
})
|
||||
}
|
||||
it('should return 404 with both files missing', async function () {
|
||||
MockFilestoreApi.reset()
|
||||
MockV1HistoryApi.reset()
|
||||
const { response } = await user.doRequest('HEAD', blobURLWithFallback)
|
||||
expect(response.statusCode).to.equal(404)
|
||||
await expectNoIncrement()
|
||||
})
|
||||
})
|
||||
describe('GET', function () {
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
it('should fetch the file from history-v1', async function () {
|
||||
const { response, body } = await user.doRequest('GET', blobURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['x-served-by']).to.include('history-v1')
|
||||
expect(body).to.equal(fileContent)
|
||||
await expectHistoryV1Hit()
|
||||
it('should fetch the file from history-v1', async function () {
|
||||
const { response, body } = await user.doRequest('GET', blobURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(body).to.equal(fileContent)
|
||||
await expectHistoryV1Hit()
|
||||
})
|
||||
it('should set cache headers', async function () {
|
||||
const { response } = await user.doRequest('GET', blobURL)
|
||||
expect(response.headers['cache-control']).to.equal(
|
||||
'private, max-age=86400, stale-while-revalidate=31536000'
|
||||
)
|
||||
expect(response.headers.etag).to.equal(fileHash)
|
||||
})
|
||||
it('should return a 304 when revalidating', async function () {
|
||||
const { response, body } = await user.doRequest('GET', {
|
||||
url: blobURL,
|
||||
headers: { 'If-None-Match': fileHash },
|
||||
})
|
||||
it('should set cache headers', async function () {
|
||||
const { response } = await user.doRequest('GET', blobURL)
|
||||
expect(response.headers['cache-control']).to.equal(
|
||||
'private, max-age=86400, stale-while-revalidate=31536000'
|
||||
)
|
||||
expect(response.headers.etag).to.equal(fileHash)
|
||||
})
|
||||
it('should return a 304 when revalidating', async function () {
|
||||
const { response, body } = await user.doRequest('GET', {
|
||||
url: blobURL,
|
||||
headers: { 'If-None-Match': fileHash },
|
||||
})
|
||||
expect(response.statusCode).to.equal(304)
|
||||
expect(response.headers.etag).to.equal(fileHash)
|
||||
expect(body).to.equal('')
|
||||
})
|
||||
}
|
||||
expect(response.statusCode).to.equal(304)
|
||||
expect(response.headers.etag).to.equal(fileHash)
|
||||
expect(body).to.equal('')
|
||||
})
|
||||
it('should return 404 without fallback', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
const { response } = await user.doRequest('GET', blobURL)
|
||||
@@ -181,67 +135,31 @@ describe('HistoryTests', function () {
|
||||
expect(response.headers).not.to.have.property('cache-control')
|
||||
expect(response.headers).not.to.have.property('etag')
|
||||
})
|
||||
if (Features.hasFeature('filestore')) {
|
||||
it('should fetch the file size from filestore when missing in history-v1', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
const { response, body } = await user.doRequest(
|
||||
'GET',
|
||||
blobURLWithFallback
|
||||
)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['x-served-by']).to.include('filestore')
|
||||
expect(body).to.equal(fileContent)
|
||||
await expectFilestoreHit()
|
||||
})
|
||||
}
|
||||
it('should return 404 with both files missing', async function () {
|
||||
MockFilestoreApi.reset()
|
||||
MockV1HistoryApi.reset()
|
||||
const { response } = await user.doRequest('GET', blobURLWithFallback)
|
||||
expect(response.statusCode).to.equal(404)
|
||||
await expectNoIncrement()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// Legacy endpoint that is powered by history-v1 in SaaS
|
||||
describe('/project/:projectId/file/:fileId', function () {
|
||||
describe('HEAD', function () {
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
it('should fetch the file size from history-v1', async function () {
|
||||
const { response } = await user.doRequest('HEAD', fileURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['x-served-by']).to.include('history-v1')
|
||||
expect(response.headers['content-length']).to.equal('3694')
|
||||
await expectHistoryV1Hit()
|
||||
})
|
||||
}
|
||||
if (Features.hasFeature('filestore')) {
|
||||
it('should fetch the file size from filestore when missing in history-v1', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
const { response } = await user.doRequest('HEAD', blobURLWithFallback)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['x-served-by']).to.include('filestore')
|
||||
expect(response.headers['content-length']).to.equal('3694')
|
||||
})
|
||||
}
|
||||
it('should fetch the file size from history-v1', async function () {
|
||||
const { response } = await user.doRequest('HEAD', fileURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['content-length']).to.equal('3694')
|
||||
await expectHistoryV1Hit()
|
||||
})
|
||||
it('should return 404 with both files missing', async function () {
|
||||
MockFilestoreApi.reset()
|
||||
MockV1HistoryApi.reset()
|
||||
const { response } = await user.doRequest('HEAD', blobURLWithFallback)
|
||||
const { response } = await user.doRequest('HEAD', blobURL)
|
||||
expect(response.statusCode).to.equal(404)
|
||||
})
|
||||
})
|
||||
describe('GET', function () {
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
it('should fetch the file from history-v1', async function () {
|
||||
const { response, body } = await user.doRequest('GET', fileURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['x-served-by']).to.include('history-v1')
|
||||
expect(body).to.equal(fileContent)
|
||||
await expectHistoryV1Hit()
|
||||
})
|
||||
}
|
||||
it('should fetch the file from history-v1', async function () {
|
||||
const { response, body } = await user.doRequest('GET', fileURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(body).to.equal(fileContent)
|
||||
await expectHistoryV1Hit()
|
||||
})
|
||||
it('should set cache headers', async function () {
|
||||
const { response } = await user.doRequest('GET', fileURL)
|
||||
expect(response.headers['cache-control']).to.equal(
|
||||
@@ -250,7 +168,6 @@ describe('HistoryTests', function () {
|
||||
})
|
||||
it('should not set cache headers on 404', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
MockFilestoreApi.reset()
|
||||
// The legacy filestore downloads are not properly handling 404s, so delete the file from the file-tree to trigger the 404. All the filestore code will be removed soon.
|
||||
await user.doRequest('DELETE', fileURL)
|
||||
|
||||
@@ -259,17 +176,7 @@ describe('HistoryTests', function () {
|
||||
expect(response.headers).not.to.have.property('cache-control')
|
||||
expect(response.headers).not.to.have.property('etag')
|
||||
})
|
||||
if (Features.hasFeature('filestore')) {
|
||||
it('should fetch the file size from filestore when missing in history-v1', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
const { response, body } = await user.doRequest('GET', fileURL)
|
||||
expect(response.statusCode).to.equal(200)
|
||||
expect(response.headers['x-served-by']).to.include('filestore')
|
||||
expect(body).to.equal(fileContent)
|
||||
})
|
||||
}
|
||||
it('should return 404 with both files missing', async function () {
|
||||
MockFilestoreApi.reset()
|
||||
it('should return 404 when missing', async function () {
|
||||
MockV1HistoryApi.reset()
|
||||
const { response } = await user.doRequest('GET', fileURL)
|
||||
expect(response.statusCode).to.equal(404)
|
||||
|
||||
@@ -6,7 +6,6 @@ import MockChatApi from './mocks/MockChatApi.mjs'
|
||||
import MockClsiApi from './mocks/MockClsiApi.mjs'
|
||||
import MockDocstoreApi from './mocks/MockDocstoreApi.mjs'
|
||||
import MockDocUpdaterApi from './mocks/MockDocUpdaterApi.mjs'
|
||||
import MockFilestoreApi from './mocks/MockFilestoreApi.mjs'
|
||||
import MockGitBridgeApi from './mocks/MockGitBridgeApi.mjs'
|
||||
import MockNotificationsApi from './mocks/MockNotificationsApi.mjs'
|
||||
import MockProjectHistoryApi from './mocks/MockProjectHistoryApi.mjs'
|
||||
@@ -25,7 +24,6 @@ MockChatApi.initialize(23010, mockOpts)
|
||||
MockClsiApi.initialize(23013, mockOpts)
|
||||
MockDocstoreApi.initialize(23016, mockOpts)
|
||||
MockDocUpdaterApi.initialize(23003, mockOpts)
|
||||
MockFilestoreApi.initialize(23009, mockOpts)
|
||||
MockNotificationsApi.initialize(23042, mockOpts)
|
||||
MockSpellingApi.initialize(23005, mockOpts)
|
||||
MockHaveIBeenPwnedApi.initialize(1337, mockOpts)
|
||||
|
||||
@@ -6,18 +6,15 @@ import _ from 'lodash'
|
||||
import User from './helpers/User.mjs'
|
||||
import UserHelper from './helpers/UserHelper.mjs'
|
||||
import MockDocstoreApiClass from './mocks/MockDocstoreApi.mjs'
|
||||
import MockFilestoreApiClass from './mocks/MockFilestoreApi.mjs'
|
||||
import MockV1HistoryApiClass from './mocks/MockV1HistoryApi.mjs'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import Features from '../../../app/src/infrastructure/Features.js'
|
||||
|
||||
let MockDocstoreApi, MockFilestoreApi, MockV1HistoryApi
|
||||
let MockDocstoreApi, MockV1HistoryApi
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url))
|
||||
|
||||
before(function () {
|
||||
MockDocstoreApi = MockDocstoreApiClass.instance()
|
||||
MockFilestoreApi = MockFilestoreApiClass.instance()
|
||||
MockV1HistoryApi = MockV1HistoryApiClass.instance()
|
||||
})
|
||||
|
||||
@@ -83,19 +80,11 @@ describe('ProjectDuplicateNames', function () {
|
||||
expect(Object.keys(docs).length).to.equal(2)
|
||||
})
|
||||
|
||||
if (Features.hasFeature('project-history-blobs')) {
|
||||
it('should create one file in the history-v1', function () {
|
||||
const files =
|
||||
MockV1HistoryApi.blobs[this.project.overleaf.history.id.toString()]
|
||||
expect(Object.keys(files).length).to.equal(1)
|
||||
})
|
||||
}
|
||||
if (Features.hasFeature('filestore')) {
|
||||
it('should create one file in the filestore', function () {
|
||||
const files = MockFilestoreApi.files[this.example_project_id]
|
||||
expect(Object.keys(files).length).to.equal(1)
|
||||
})
|
||||
}
|
||||
it('should create one file in the history-v1', function () {
|
||||
const files =
|
||||
MockV1HistoryApi.blobs[this.project.overleaf.history.id.toString()]
|
||||
expect(Object.keys(files).length).to.equal(1)
|
||||
})
|
||||
|
||||
describe('for an existing doc', function () {
|
||||
describe('trying to add a doc with the same name', function () {
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import AbstractMockApi from './AbstractMockApi.mjs'
|
||||
|
||||
class MockFilestoreApi extends AbstractMockApi {
|
||||
reset() {
|
||||
this.files = {}
|
||||
}
|
||||
|
||||
applyRoutes() {
|
||||
this.app.post('/project/:projectId/file/:fileId', (req, res) => {
|
||||
const chunks = []
|
||||
req.on('data', chunk => chunks.push(chunk))
|
||||
|
||||
req.on('end', () => {
|
||||
const content = Buffer.concat(chunks)
|
||||
const { projectId, fileId } = req.params
|
||||
if (!this.files[projectId]) {
|
||||
this.files[projectId] = {}
|
||||
}
|
||||
this.files[projectId][fileId] = content
|
||||
res.sendStatus(200)
|
||||
})
|
||||
})
|
||||
|
||||
this.app.head('/project/:projectId/file/:fileId', (req, res) => {
|
||||
const { projectId, fileId } = req.params
|
||||
const content = this.files[projectId]?.[fileId]
|
||||
if (!content) return res.status(404).end()
|
||||
res.set('Content-Length', content.byteLength)
|
||||
res.status(200).end()
|
||||
})
|
||||
|
||||
this.app.get('/project/:projectId/file/:fileId', (req, res) => {
|
||||
const { projectId, fileId } = req.params
|
||||
const content = this.files[projectId]?.[fileId]
|
||||
if (!content) return res.status(404).end()
|
||||
res.status(200).end(content)
|
||||
})
|
||||
|
||||
// handle file copying
|
||||
this.app.put('/project/:projectId/file/:fileId', (req, res) => {
|
||||
const { projectId, fileId } = req.params
|
||||
const { source } = req.body
|
||||
const content =
|
||||
this.files[source.project_id] &&
|
||||
this.files[source.project_id][source.file_id]
|
||||
if (!content) {
|
||||
res.sendStatus(500)
|
||||
} else {
|
||||
if (!this.files[projectId]) {
|
||||
this.files[projectId] = {}
|
||||
}
|
||||
this.files[projectId][fileId] = content
|
||||
res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
|
||||
this.app.delete('/project/:projectId', (req, res) => {
|
||||
const { projectId } = req.params
|
||||
delete this.files[projectId]
|
||||
res.sendStatus(204)
|
||||
})
|
||||
}
|
||||
|
||||
getFile(projectId, fileId) {
|
||||
return (
|
||||
this.files[projectId] &&
|
||||
this.files[projectId][fileId] &&
|
||||
this.files[projectId][fileId].toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export default MockFilestoreApi
|
||||
|
||||
// type hint for the inherited `instance` method
|
||||
/**
|
||||
* @function instance
|
||||
* @memberOf MockFilestoreApi
|
||||
* @static
|
||||
* @returns {MockFilestoreApi}
|
||||
*/
|
||||
@@ -154,7 +154,7 @@ describe('Path utils', function () {
|
||||
'test-folder/example.png'
|
||||
)
|
||||
expect(preview).to.deep.equal({
|
||||
url: '/project/test-project-id/blob/42?fallback=test-file-in-folder',
|
||||
url: '/project/test-project-id/blob/42',
|
||||
extension: 'png',
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
export function resetMeta() {
|
||||
window.metaAttributesCache = new Map()
|
||||
window.metaAttributesCache.set('ol-projectHistoryBlobsEnabled', true)
|
||||
window.metaAttributesCache.set('ol-i18n', { currentLangCode: 'en' })
|
||||
window.metaAttributesCache.set('ol-capabilities', [
|
||||
'chat',
|
||||
|
||||
@@ -149,9 +149,6 @@ describe('ClsiManager', function () {
|
||||
this.ClsiCacheHandler = {
|
||||
clearCache: sinon.stub().resolves(),
|
||||
}
|
||||
this.Features = {
|
||||
hasFeature: sinon.stub().withArgs('project-history-blobs').returns(true),
|
||||
}
|
||||
this.HistoryManager = {
|
||||
getFilestoreBlobURL: sinon.stub().callsFake((historyId, hash) => {
|
||||
if (hash === GLOBAL_BLOB_HASH) {
|
||||
@@ -167,7 +164,6 @@ describe('ClsiManager', function () {
|
||||
'../../models/Project': {
|
||||
Project: this.Project,
|
||||
},
|
||||
'../../infrastructure/Features': this.Features,
|
||||
'../Project/ProjectEntityHandler': this.ProjectEntityHandler,
|
||||
'../Project/ProjectGetter': this.ProjectGetter,
|
||||
'../DocumentUpdater/DocumentUpdaterHandler':
|
||||
@@ -1048,20 +1044,15 @@ function _makeResources(project, docs, files) {
|
||||
})
|
||||
}
|
||||
for (const [path, file] of Object.entries(files)) {
|
||||
let url, fallbackURL
|
||||
let url
|
||||
if (file.hash === GLOBAL_BLOB_HASH) {
|
||||
url = `${FILESTORE_URL}/history/global/hash/${file.hash}`
|
||||
fallbackURL = `${FILESTORE_URL}/project/${project._id}/file/${file._id}`
|
||||
} else if (file.hash) {
|
||||
url = `${FILESTORE_URL}/history/project/${project.overleaf.history.id}/hash/${file.hash}`
|
||||
fallbackURL = `${FILESTORE_URL}/project/${project._id}/file/${file._id}`
|
||||
} else {
|
||||
url = `${FILESTORE_URL}/project/${project._id}/file/${file._id}`
|
||||
url = `${FILESTORE_URL}/history/project/${project.overleaf.history.id}/hash/${file.hash}`
|
||||
}
|
||||
resources.push({
|
||||
path: path.replace(/^\//, ''),
|
||||
url,
|
||||
fallbackURL,
|
||||
modified: file.created.getTime(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@ describe('DocumentUpdaterHandler', function () {
|
||||
url: 'http://project_history.example.com',
|
||||
},
|
||||
},
|
||||
filestoreMigrationLevel: 0,
|
||||
moduleImportSequence: [],
|
||||
}
|
||||
this.source = 'dropbox'
|
||||
@@ -55,11 +54,6 @@ describe('DocumentUpdaterHandler', function () {
|
||||
done() {}
|
||||
},
|
||||
},
|
||||
'../FileStore/FileStoreHandler': {
|
||||
_buildUrl: sinon.stub().callsFake((projectId, fileId) => {
|
||||
return `http://filestore/project/${projectId}/file/${fileId}`
|
||||
}),
|
||||
},
|
||||
'../../infrastructure/Modules': {
|
||||
promises: {
|
||||
hooks: {
|
||||
@@ -1157,11 +1151,10 @@ describe('DocumentUpdaterHandler', function () {
|
||||
pathname: '/foo',
|
||||
docLines: 'a\nb',
|
||||
historyRangesSupport: false,
|
||||
url: undefined,
|
||||
hash: undefined,
|
||||
ranges: undefined,
|
||||
metadata: undefined,
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -1195,7 +1188,6 @@ describe('DocumentUpdaterHandler', function () {
|
||||
newFiles: [
|
||||
{
|
||||
path: '/bar',
|
||||
url: 'filestore.example.com/file',
|
||||
file: { _id: this.fileId, hash: '12345' },
|
||||
},
|
||||
],
|
||||
@@ -1207,12 +1199,11 @@ describe('DocumentUpdaterHandler', function () {
|
||||
type: 'add-file',
|
||||
id: this.fileId.toString(),
|
||||
pathname: '/bar',
|
||||
url: 'filestore.example.com/file',
|
||||
docLines: undefined,
|
||||
historyRangesSupport: false,
|
||||
hash: '12345',
|
||||
ranges: undefined,
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
metadata: undefined,
|
||||
},
|
||||
]
|
||||
@@ -1290,7 +1281,6 @@ describe('DocumentUpdaterHandler', function () {
|
||||
oldFiles: [
|
||||
{
|
||||
path: '/foo.doc',
|
||||
url: 'filestore.example.com/file',
|
||||
file: { _id: this.fileId },
|
||||
},
|
||||
],
|
||||
@@ -1317,11 +1307,10 @@ describe('DocumentUpdaterHandler', function () {
|
||||
pathname: '/foo.doc',
|
||||
docLines: 'hello there',
|
||||
historyRangesSupport: false,
|
||||
url: undefined,
|
||||
hash: undefined,
|
||||
ranges: undefined,
|
||||
metadata: undefined,
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -1421,11 +1410,10 @@ describe('DocumentUpdaterHandler', function () {
|
||||
pathname: '/foo',
|
||||
docLines: 'foo\nbar',
|
||||
historyRangesSupport: false,
|
||||
url: undefined,
|
||||
hash: undefined,
|
||||
ranges: this.ranges,
|
||||
metadata: undefined,
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -1466,11 +1454,10 @@ describe('DocumentUpdaterHandler', function () {
|
||||
pathname: '/foo',
|
||||
docLines: 'foo\nbar',
|
||||
historyRangesSupport: true,
|
||||
url: undefined,
|
||||
hash: undefined,
|
||||
ranges: this.ranges,
|
||||
metadata: undefined,
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -1498,16 +1485,12 @@ describe('DocumentUpdaterHandler', function () {
|
||||
})
|
||||
|
||||
describe('with filestore disabled', function () {
|
||||
beforeEach(function () {
|
||||
this.settings.filestoreMigrationLevel = 2
|
||||
})
|
||||
it('should add files without URL and with createdBlob', async function () {
|
||||
this.fileId = new ObjectId()
|
||||
this.changes = {
|
||||
newFiles: [
|
||||
{
|
||||
path: '/bar',
|
||||
url: 'filestore.example.com/file',
|
||||
file: { _id: this.fileId, hash: '12345' },
|
||||
},
|
||||
],
|
||||
@@ -1521,7 +1504,6 @@ describe('DocumentUpdaterHandler', function () {
|
||||
pathname: '/bar',
|
||||
docLines: undefined,
|
||||
historyRangesSupport: false,
|
||||
url: undefined,
|
||||
hash: '12345',
|
||||
ranges: undefined,
|
||||
createdBlob: true,
|
||||
@@ -1556,7 +1538,6 @@ describe('DocumentUpdaterHandler', function () {
|
||||
newFiles: [
|
||||
{
|
||||
path: '/bar',
|
||||
url: 'filestore.example.com/file',
|
||||
file: { _id: this.fileId },
|
||||
},
|
||||
],
|
||||
@@ -1671,16 +1652,14 @@ describe('DocumentUpdaterHandler', function () {
|
||||
file: fileId1,
|
||||
_hash: '42',
|
||||
path: '1.png',
|
||||
url: `http://filestore/project/${projectId}/file/${fileId1}`,
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
metadata: undefined,
|
||||
},
|
||||
{
|
||||
file: fileId2,
|
||||
_hash: '1337',
|
||||
path: '1.bib',
|
||||
url: `http://filestore/project/${projectId}/file/${fileId2}`,
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
metadata: {
|
||||
importedAt: fileCreated2,
|
||||
provider: 'references-provider',
|
||||
@@ -1690,8 +1669,7 @@ describe('DocumentUpdaterHandler', function () {
|
||||
file: fileId3,
|
||||
_hash: '21',
|
||||
path: 'bar.txt',
|
||||
url: `http://filestore/project/${projectId}/file/${fileId3}`,
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
metadata: {
|
||||
importedAt: fileCreated3,
|
||||
provider: 'project_output_file',
|
||||
@@ -1706,151 +1684,143 @@ describe('DocumentUpdaterHandler', function () {
|
||||
timeout: 6 * 60 * 1000,
|
||||
})
|
||||
})
|
||||
describe('with filestore disabled', function () {
|
||||
beforeEach(function () {
|
||||
this.settings.filestoreMigrationLevel = 2
|
||||
})
|
||||
it('should add files without URL', async function () {
|
||||
const fileId1 = new ObjectId()
|
||||
const fileId2 = new ObjectId()
|
||||
const fileId3 = new ObjectId()
|
||||
const fileCreated2 = new Date()
|
||||
const fileCreated3 = new Date()
|
||||
const otherProjectId = new ObjectId().toString()
|
||||
const files = [
|
||||
{ file: { _id: fileId1, hash: '42' }, path: '1.png' },
|
||||
{
|
||||
file: {
|
||||
_id: fileId2,
|
||||
hash: '1337',
|
||||
created: fileCreated2,
|
||||
linkedFileData: {
|
||||
it('should add files without URL', async function () {
|
||||
const fileId1 = new ObjectId()
|
||||
const fileId2 = new ObjectId()
|
||||
const fileId3 = new ObjectId()
|
||||
const fileCreated2 = new Date()
|
||||
const fileCreated3 = new Date()
|
||||
const otherProjectId = new ObjectId().toString()
|
||||
const files = [
|
||||
{ file: { _id: fileId1, hash: '42' }, path: '1.png' },
|
||||
{
|
||||
file: {
|
||||
_id: fileId2,
|
||||
hash: '1337',
|
||||
created: fileCreated2,
|
||||
linkedFileData: {
|
||||
provider: 'references-provider',
|
||||
},
|
||||
},
|
||||
path: '1.bib',
|
||||
},
|
||||
{
|
||||
file: {
|
||||
_id: fileId3,
|
||||
hash: '21',
|
||||
created: fileCreated3,
|
||||
linkedFileData: {
|
||||
provider: 'project_output_file',
|
||||
build_id: '1234-abc',
|
||||
clsiServerId: 'server-1',
|
||||
source_project_id: otherProjectId,
|
||||
source_output_file_path: 'foo/bar.txt',
|
||||
},
|
||||
},
|
||||
path: 'bar.txt',
|
||||
},
|
||||
]
|
||||
const docs = []
|
||||
this.request.yields(null, { statusCode: 200 })
|
||||
const projectId = new ObjectId()
|
||||
const projectHistoryId = 99
|
||||
await this.handler.promises.resyncProjectHistory(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
docs,
|
||||
files,
|
||||
{}
|
||||
)
|
||||
this.request.should.have.been.calledWith({
|
||||
url: `${this.settings.apis.documentupdater.url}/project/${projectId}/history/resync`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
docs: [],
|
||||
files: [
|
||||
{
|
||||
file: fileId1,
|
||||
_hash: '42',
|
||||
path: '1.png',
|
||||
createdBlob: true,
|
||||
metadata: undefined,
|
||||
},
|
||||
{
|
||||
file: fileId2,
|
||||
_hash: '1337',
|
||||
path: '1.bib',
|
||||
createdBlob: true,
|
||||
metadata: {
|
||||
importedAt: fileCreated2,
|
||||
provider: 'references-provider',
|
||||
},
|
||||
},
|
||||
path: '1.bib',
|
||||
},
|
||||
{
|
||||
file: {
|
||||
_id: fileId3,
|
||||
hash: '21',
|
||||
created: fileCreated3,
|
||||
linkedFileData: {
|
||||
{
|
||||
file: fileId3,
|
||||
_hash: '21',
|
||||
path: 'bar.txt',
|
||||
createdBlob: true,
|
||||
metadata: {
|
||||
importedAt: fileCreated3,
|
||||
provider: 'project_output_file',
|
||||
build_id: '1234-abc',
|
||||
clsiServerId: 'server-1',
|
||||
source_project_id: otherProjectId,
|
||||
source_output_file_path: 'foo/bar.txt',
|
||||
// build_id and clsiServerId are omitted
|
||||
},
|
||||
},
|
||||
path: 'bar.txt',
|
||||
],
|
||||
projectHistoryId,
|
||||
},
|
||||
timeout: 6 * 60 * 1000,
|
||||
})
|
||||
})
|
||||
it('should flag files with missing hashes', async function () {
|
||||
const fileId1 = new ObjectId()
|
||||
const fileId2 = new ObjectId()
|
||||
const fileId3 = new ObjectId()
|
||||
const fileCreated2 = new Date()
|
||||
const fileCreated3 = new Date()
|
||||
const otherProjectId = new ObjectId().toString()
|
||||
const files = [
|
||||
{ file: { _id: fileId1, hash: '42' }, path: '1.png' },
|
||||
{
|
||||
file: {
|
||||
_id: fileId2,
|
||||
created: fileCreated2,
|
||||
linkedFileData: {
|
||||
provider: 'references-provider',
|
||||
},
|
||||
},
|
||||
]
|
||||
const docs = []
|
||||
this.request.yields(null, { statusCode: 200 })
|
||||
const projectId = new ObjectId()
|
||||
const projectHistoryId = 99
|
||||
await this.handler.promises.resyncProjectHistory(
|
||||
path: '1.bib',
|
||||
},
|
||||
{
|
||||
file: {
|
||||
_id: fileId3,
|
||||
hash: '21',
|
||||
created: fileCreated3,
|
||||
linkedFileData: {
|
||||
provider: 'project_output_file',
|
||||
build_id: '1234-abc',
|
||||
clsiServerId: 'server-1',
|
||||
source_project_id: otherProjectId,
|
||||
source_output_file_path: 'foo/bar.txt',
|
||||
},
|
||||
},
|
||||
path: 'bar.txt',
|
||||
},
|
||||
]
|
||||
const docs = []
|
||||
this.request.yields(null, { statusCode: 200 })
|
||||
const projectId = new ObjectId()
|
||||
const projectHistoryId = 99
|
||||
await expect(
|
||||
this.handler.promises.resyncProjectHistory(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
docs,
|
||||
files,
|
||||
{}
|
||||
)
|
||||
this.request.should.have.been.calledWith({
|
||||
url: `${this.settings.apis.documentupdater.url}/project/${projectId}/history/resync`,
|
||||
method: 'POST',
|
||||
json: {
|
||||
docs: [],
|
||||
files: [
|
||||
{
|
||||
file: fileId1,
|
||||
_hash: '42',
|
||||
path: '1.png',
|
||||
url: undefined,
|
||||
createdBlob: true,
|
||||
metadata: undefined,
|
||||
},
|
||||
{
|
||||
file: fileId2,
|
||||
_hash: '1337',
|
||||
path: '1.bib',
|
||||
url: undefined,
|
||||
createdBlob: true,
|
||||
metadata: {
|
||||
importedAt: fileCreated2,
|
||||
provider: 'references-provider',
|
||||
},
|
||||
},
|
||||
{
|
||||
file: fileId3,
|
||||
_hash: '21',
|
||||
path: 'bar.txt',
|
||||
url: undefined,
|
||||
createdBlob: true,
|
||||
metadata: {
|
||||
importedAt: fileCreated3,
|
||||
provider: 'project_output_file',
|
||||
source_project_id: otherProjectId,
|
||||
source_output_file_path: 'foo/bar.txt',
|
||||
// build_id and clsiServerId are omitted
|
||||
},
|
||||
},
|
||||
],
|
||||
projectHistoryId,
|
||||
},
|
||||
timeout: 6 * 60 * 1000,
|
||||
})
|
||||
})
|
||||
it('should flag files with missing hashes', async function () {
|
||||
const fileId1 = new ObjectId()
|
||||
const fileId2 = new ObjectId()
|
||||
const fileId3 = new ObjectId()
|
||||
const fileCreated2 = new Date()
|
||||
const fileCreated3 = new Date()
|
||||
const otherProjectId = new ObjectId().toString()
|
||||
const files = [
|
||||
{ file: { _id: fileId1, hash: '42' }, path: '1.png' },
|
||||
{
|
||||
file: {
|
||||
_id: fileId2,
|
||||
created: fileCreated2,
|
||||
linkedFileData: {
|
||||
provider: 'references-provider',
|
||||
},
|
||||
},
|
||||
path: '1.bib',
|
||||
},
|
||||
{
|
||||
file: {
|
||||
_id: fileId3,
|
||||
hash: '21',
|
||||
created: fileCreated3,
|
||||
linkedFileData: {
|
||||
provider: 'project_output_file',
|
||||
build_id: '1234-abc',
|
||||
clsiServerId: 'server-1',
|
||||
source_project_id: otherProjectId,
|
||||
source_output_file_path: 'foo/bar.txt',
|
||||
},
|
||||
},
|
||||
path: 'bar.txt',
|
||||
},
|
||||
]
|
||||
const docs = []
|
||||
this.request.yields(null, { statusCode: 200 })
|
||||
const projectId = new ObjectId()
|
||||
const projectHistoryId = 99
|
||||
await expect(
|
||||
this.handler.promises.resyncProjectHistory(
|
||||
projectId,
|
||||
projectHistoryId,
|
||||
docs,
|
||||
files,
|
||||
{}
|
||||
)
|
||||
).to.be.rejected
|
||||
})
|
||||
).to.be.rejected
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -58,15 +58,6 @@ describe('ProjectZipStreamManager', function () {
|
||||
})
|
||||
)
|
||||
|
||||
vi.doMock('../../../../app/src/infrastructure/Features', () => ({
|
||||
default: (ctx.Features = {
|
||||
hasFeature: sinon
|
||||
.stub()
|
||||
.withArgs('project-history-blobs')
|
||||
.returns(true),
|
||||
}),
|
||||
}))
|
||||
|
||||
ctx.ProjectZipStreamManager = (await import(modulePath)).default
|
||||
})
|
||||
|
||||
@@ -411,93 +402,55 @@ describe('ProjectZipStreamManager', function () {
|
||||
},
|
||||
}
|
||||
ctx.streams = {
|
||||
'file-id-1': new EventEmitter(),
|
||||
'file-id-2': new EventEmitter(),
|
||||
abc: new EventEmitter(),
|
||||
def: new EventEmitter(),
|
||||
}
|
||||
ctx.ProjectEntityHandler.getAllFiles = sinon
|
||||
.stub()
|
||||
.callsArgWith(1, null, ctx.files)
|
||||
ctx.HistoryManager.requestBlobWithProjectId = (
|
||||
projectId,
|
||||
hash,
|
||||
callback
|
||||
) => {
|
||||
return callback(null, { stream: ctx.streams[hash] })
|
||||
}
|
||||
sinon.spy(ctx.HistoryManager, 'requestBlobWithProjectId')
|
||||
ctx.ProjectZipStreamManager.addAllFilesToArchive(
|
||||
ctx.project_id,
|
||||
ctx.archive,
|
||||
ctx.callback
|
||||
)
|
||||
for (const hash in ctx.streams) {
|
||||
const stream = ctx.streams[hash]
|
||||
stream.emit('end')
|
||||
}
|
||||
})
|
||||
describe('with project-history-blobs feature enabled', function () {
|
||||
beforeEach(function (ctx) {
|
||||
ctx.HistoryManager.requestBlobWithFallback = (
|
||||
projectId,
|
||||
hash,
|
||||
fileId,
|
||||
callback
|
||||
) => {
|
||||
return callback(null, { stream: ctx.streams[fileId] })
|
||||
}
|
||||
sinon.spy(ctx.HistoryManager, 'requestBlobWithFallback')
|
||||
ctx.ProjectZipStreamManager.addAllFilesToArchive(
|
||||
ctx.project_id,
|
||||
ctx.archive,
|
||||
ctx.callback
|
||||
)
|
||||
for (const path in ctx.streams) {
|
||||
const stream = ctx.streams[path]
|
||||
stream.emit('end')
|
||||
}
|
||||
})
|
||||
|
||||
it('should get the files for the project', function (ctx) {
|
||||
return ctx.ProjectEntityHandler.getAllFiles
|
||||
.calledWith(ctx.project_id)
|
||||
it('should get the files for the project', function (ctx) {
|
||||
return ctx.ProjectEntityHandler.getAllFiles
|
||||
.calledWith(ctx.project_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should get a stream for each file', function (ctx) {
|
||||
for (const path in ctx.files) {
|
||||
const file = ctx.files[path]
|
||||
|
||||
ctx.HistoryManager.requestBlobWithProjectId
|
||||
.calledWith(ctx.project_id, file.hash)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should get a stream for each file', function (ctx) {
|
||||
for (const path in ctx.files) {
|
||||
const file = ctx.files[path]
|
||||
|
||||
ctx.HistoryManager.requestBlobWithFallback
|
||||
.calledWith(ctx.project_id, file.hash, file._id)
|
||||
.should.equal(true)
|
||||
}
|
||||
})
|
||||
|
||||
it('should add each file to the archive', function (ctx) {
|
||||
for (let path in ctx.files) {
|
||||
const file = ctx.files[path]
|
||||
path = path.slice(1) // remove "/"
|
||||
ctx.archive.append
|
||||
.calledWith(ctx.streams[file._id], { name: path })
|
||||
.should.equal(true)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
describe('with project-history-blobs feature disabled', function () {
|
||||
beforeEach(function (ctx) {
|
||||
ctx.FileStoreHandler.getFileStream = (
|
||||
projectId,
|
||||
fileId,
|
||||
query,
|
||||
callback
|
||||
) => callback(null, ctx.streams[fileId])
|
||||
|
||||
sinon.spy(ctx.FileStoreHandler, 'getFileStream')
|
||||
ctx.Features.hasFeature.withArgs('project-history-blobs').returns(false)
|
||||
ctx.ProjectZipStreamManager.addAllFilesToArchive(
|
||||
ctx.project_id,
|
||||
ctx.archive,
|
||||
ctx.callback
|
||||
)
|
||||
for (const path in ctx.streams) {
|
||||
const stream = ctx.streams[path]
|
||||
stream.emit('end')
|
||||
}
|
||||
})
|
||||
|
||||
it('should get a stream for each file', function (ctx) {
|
||||
for (const path in ctx.files) {
|
||||
const file = ctx.files[path]
|
||||
|
||||
ctx.FileStoreHandler.getFileStream
|
||||
.calledWith(ctx.project_id, file._id)
|
||||
.should.equal(true)
|
||||
}
|
||||
})
|
||||
it('should add each file to the archive', function (ctx) {
|
||||
for (let path in ctx.files) {
|
||||
const file = ctx.files[path]
|
||||
path = path.slice(1) // remove "/"
|
||||
ctx.archive.append.should.have.been.calledWith(ctx.streams[file.hash], {
|
||||
name: path,
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -8,7 +8,6 @@ const MODULE_PATH =
|
||||
|
||||
const expectedFileHeaders = {
|
||||
'Cache-Control': 'private, max-age=3600',
|
||||
'X-Served-By': 'filestore',
|
||||
}
|
||||
|
||||
vi.mock('../../../../app/src/Features/Errors/Errors.js', () =>
|
||||
@@ -17,15 +16,11 @@ vi.mock('../../../../app/src/Features/Errors/Errors.js', () =>
|
||||
|
||||
describe('FileStoreController', function () {
|
||||
beforeEach(async function (ctx) {
|
||||
ctx.FileStoreHandler = {
|
||||
promises: {
|
||||
getFileStream: sinon.stub(),
|
||||
getFileSize: sinon.stub(),
|
||||
},
|
||||
}
|
||||
ctx.ProjectLocator = { promises: { findElement: sinon.stub() } }
|
||||
ctx.Stream = { pipeline: sinon.stub().resolves() }
|
||||
ctx.HistoryManager = {}
|
||||
ctx.HistoryManager = {
|
||||
promises: { requestBlobWithProjectId: sinon.stub() },
|
||||
}
|
||||
|
||||
vi.doMock('node:stream/promises', () => ctx.Stream)
|
||||
|
||||
@@ -37,13 +32,6 @@ describe('FileStoreController', function () {
|
||||
default: ctx.ProjectLocator,
|
||||
}))
|
||||
|
||||
vi.doMock(
|
||||
'../../../../app/src/Features/FileStore/FileStoreHandler',
|
||||
() => ({
|
||||
default: ctx.FileStoreHandler,
|
||||
})
|
||||
)
|
||||
|
||||
vi.doMock('../../../../app/src/Features/History/HistoryManager', () => ({
|
||||
default: ctx.HistoryManager,
|
||||
}))
|
||||
@@ -67,21 +55,24 @@ describe('FileStoreController', function () {
|
||||
}
|
||||
ctx.res = new MockResponse()
|
||||
ctx.next = sinon.stub()
|
||||
ctx.file = { name: 'myfile.png' }
|
||||
ctx.hash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
|
||||
ctx.file = { name: 'myfile.png', hash: ctx.hash }
|
||||
})
|
||||
|
||||
describe('getFile', function () {
|
||||
beforeEach(function (ctx) {
|
||||
ctx.FileStoreHandler.promises.getFileStream.resolves(ctx.stream)
|
||||
ctx.HistoryManager.promises.requestBlobWithProjectId.resolves({
|
||||
stream: ctx.stream,
|
||||
})
|
||||
ctx.ProjectLocator.promises.findElement.resolves({ element: ctx.file })
|
||||
})
|
||||
|
||||
it('should call the file store handler with the project_id file_id and any query string', async function (ctx) {
|
||||
it('should call the history manager with the project_id hash', async function (ctx) {
|
||||
await ctx.controller.getFile(ctx.req, ctx.res)
|
||||
ctx.FileStoreHandler.promises.getFileStream.should.have.been.calledWith(
|
||||
ctx.HistoryManager.promises.requestBlobWithProjectId.should.have.been.calledWith(
|
||||
ctx.req.params.Project_id,
|
||||
ctx.req.params.File_id,
|
||||
ctx.req.query
|
||||
ctx.hash,
|
||||
'GET'
|
||||
)
|
||||
})
|
||||
|
||||
@@ -217,12 +208,12 @@ describe('FileStoreController', function () {
|
||||
it('reports the file size', async function (ctx) {
|
||||
await new Promise(resolve => {
|
||||
const expectedFileSize = 99393
|
||||
ctx.FileStoreHandler.promises.getFileSize.rejects(
|
||||
ctx.HistoryManager.promises.requestBlobWithProjectId.rejects(
|
||||
new Error('getFileSize: unexpected arguments')
|
||||
)
|
||||
ctx.FileStoreHandler.promises.getFileSize
|
||||
.withArgs(ctx.projectId, ctx.fileId)
|
||||
.resolves(expectedFileSize)
|
||||
ctx.HistoryManager.promises.requestBlobWithProjectId
|
||||
.withArgs(ctx.projectId, ctx.hash)
|
||||
.resolves({ contentLength: expectedFileSize })
|
||||
|
||||
ctx.res.end = () => {
|
||||
expect(ctx.res.status.lastCall.args).to.deep.equal([200])
|
||||
@@ -239,7 +230,7 @@ describe('FileStoreController', function () {
|
||||
|
||||
it('returns 404 on NotFoundError', async function (ctx) {
|
||||
await new Promise(resolve => {
|
||||
ctx.FileStoreHandler.promises.getFileSize.rejects(
|
||||
ctx.HistoryManager.promises.requestBlobWithProjectId.rejects(
|
||||
new Errors.NotFoundError()
|
||||
)
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
const { assert, expect } = require('chai')
|
||||
const { expect } = require('chai')
|
||||
const sinon = require('sinon')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const Errors = require('../../../../app/src/Features/Errors/Errors')
|
||||
const OError = require('@overleaf/o-error')
|
||||
|
||||
const MODULE_PATH = '../../../../app/src/Features/FileStore/FileStoreHandler.js'
|
||||
|
||||
@@ -133,206 +131,52 @@ describe('FileStoreHandler', function () {
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
describe('when project-history-blobs feature is enabled', function () {
|
||||
it('should upload the file to the history store as a blob', async function () {
|
||||
this.fs.createReadStream.returns({
|
||||
pipe() {},
|
||||
on(type, cb) {
|
||||
if (type === 'open') {
|
||||
cb()
|
||||
}
|
||||
},
|
||||
})
|
||||
this.Features.hasFeature.withArgs('project-history-blobs').returns(true)
|
||||
await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
this.HistoryManager.uploadBlobFromDisk
|
||||
.calledWith(
|
||||
this.historyId,
|
||||
this.hashValue,
|
||||
this.fileSize,
|
||||
this.fsPath
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
describe('when project-history-blobs feature is disabled', function () {
|
||||
it('should not upload the file to the history store as a blob', async function () {
|
||||
this.fs.createReadStream.returns({
|
||||
pipe() {},
|
||||
on(type, cb) {
|
||||
if (type === 'open') {
|
||||
cb()
|
||||
}
|
||||
},
|
||||
})
|
||||
await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
this.HistoryManager.uploadBlobFromDisk.called.should.equal(false)
|
||||
it('should upload the file to the history store as a blob', async function () {
|
||||
this.fs.createReadStream.returns({
|
||||
pipe() {},
|
||||
on(type, cb) {
|
||||
if (type === 'open') {
|
||||
cb()
|
||||
}
|
||||
},
|
||||
})
|
||||
await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
this.HistoryManager.uploadBlobFromDisk
|
||||
.calledWith(this.historyId, this.hashValue, this.fileSize, this.fsPath)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
describe('when filestore feature is enabled', function () {
|
||||
beforeEach(function () {
|
||||
this.Features.hasFeature.withArgs('filestore').returns(true)
|
||||
})
|
||||
it('should create read stream', function (done) {
|
||||
this.fs.createReadStream.returns({
|
||||
pipe() {},
|
||||
on(type, cb) {
|
||||
if (type === 'open') {
|
||||
cb()
|
||||
}
|
||||
},
|
||||
})
|
||||
this.handler.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath,
|
||||
() => {
|
||||
this.fs.createReadStream.calledWith(this.fsPath).should.equal(true)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should pipe the read stream to request', function () {
|
||||
this.request.returns(this.writeStream)
|
||||
return new Promise((resolve, reject) => {
|
||||
this.fs.createReadStream.returns({
|
||||
on(type, cb) {
|
||||
if (type === 'open') {
|
||||
cb()
|
||||
}
|
||||
},
|
||||
pipe: o => {
|
||||
this.writeStream.should.equal(o)
|
||||
resolve()
|
||||
},
|
||||
})
|
||||
this.handler.promises
|
||||
.uploadFileFromDisk(this.projectId, this.fileArgs, this.fsPath)
|
||||
.catch(reject)
|
||||
})
|
||||
})
|
||||
|
||||
it('should pass the correct options to request', async function () {
|
||||
const fileUrl = this.getFileUrl(this.projectId, this.fileId)
|
||||
this.fs.createReadStream.returns({
|
||||
pipe: sinon.stub(),
|
||||
on: sinon.stub((type, cb) => {
|
||||
if (type === 'open') {
|
||||
cb()
|
||||
}
|
||||
}),
|
||||
})
|
||||
await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
this.request.args[0][0].method.should.equal('post')
|
||||
this.request.args[0][0].uri.should.equal(fileUrl)
|
||||
})
|
||||
|
||||
it('should resolve with the url and fileRef', async function () {
|
||||
const fileUrl = this.getFileUrl(this.projectId, this.fileId)
|
||||
this.fs.createReadStream.returns({
|
||||
pipe: sinon.stub(),
|
||||
on: sinon.stub((type, cb) => {
|
||||
if (type === 'open') {
|
||||
cb()
|
||||
}
|
||||
}),
|
||||
})
|
||||
const { url, fileRef } = await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
expect(url).to.equal(fileUrl)
|
||||
expect(fileRef._id).to.equal(this.fileId)
|
||||
expect(fileRef.hash).to.equal(this.hashValue)
|
||||
})
|
||||
describe('when upload to filestore fails', function () {
|
||||
beforeEach(function () {
|
||||
this.writeStream.on = function (type, fn) {
|
||||
if (type === 'response') {
|
||||
fn({ statusCode: 500 })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('should reject with an error', async function () {
|
||||
this.fs.createReadStream.callCount = 0
|
||||
this.fs.createReadStream.returns({
|
||||
pipe: sinon.stub(),
|
||||
on: sinon.stub((type, cb) => {
|
||||
if (type === 'open') {
|
||||
cb()
|
||||
}
|
||||
}),
|
||||
})
|
||||
let error
|
||||
|
||||
try {
|
||||
await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.be.instanceOf(Error)
|
||||
|
||||
expect(this.fs.createReadStream.callCount).to.equal(
|
||||
this.handler.RETRY_ATTEMPTS
|
||||
)
|
||||
})
|
||||
})
|
||||
it('should not open file handle', async function () {
|
||||
await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
expect(this.fs.createReadStream).to.not.have.been.called
|
||||
})
|
||||
describe('when filestore feature is disabled', function () {
|
||||
beforeEach(function () {
|
||||
this.Features.hasFeature.withArgs('filestore').returns(false)
|
||||
})
|
||||
it('should not open file handle', async function () {
|
||||
await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
expect(this.fs.createReadStream).to.not.have.been.called
|
||||
})
|
||||
|
||||
it('should not talk to filestore', async function () {
|
||||
await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
it('should not talk to filestore', async function () {
|
||||
await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
|
||||
expect(this.request).to.not.have.been.called
|
||||
})
|
||||
expect(this.request).to.not.have.been.called
|
||||
})
|
||||
|
||||
it('should resolve with the url and fileRef', async function () {
|
||||
const fileUrl = this.getFileUrl(this.projectId, this.fileId)
|
||||
const { url, fileRef } = await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
expect(url).to.equal(fileUrl)
|
||||
expect(fileRef._id).to.equal(this.fileId)
|
||||
expect(fileRef.hash).to.equal(this.hashValue)
|
||||
})
|
||||
it('should resolve with the url and fileRef', async function () {
|
||||
const { fileRef } = await this.handler.promises.uploadFileFromDisk(
|
||||
this.projectId,
|
||||
this.fileArgs,
|
||||
this.fsPath
|
||||
)
|
||||
expect(fileRef._id).to.equal(this.fileId)
|
||||
expect(fileRef.hash).to.equal(this.hashValue)
|
||||
})
|
||||
|
||||
describe('symlink', function () {
|
||||
@@ -383,341 +227,4 @@ describe('FileStoreHandler', function () {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('deleteFile', function () {
|
||||
it('should send a delete request to filestore api', function (done) {
|
||||
const fileUrl = this.getFileUrl(this.projectId, this.fileId)
|
||||
this.request.callsArgWith(1, null)
|
||||
|
||||
this.handler.deleteFile(this.projectId, this.fileId, err => {
|
||||
assert.equal(err, undefined)
|
||||
this.request.args[0][0].method.should.equal('delete')
|
||||
this.request.args[0][0].uri.should.equal(fileUrl)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should reject with the error if there is one', async function () {
|
||||
const expectedError = 'my error'
|
||||
this.request.callsArgWith(1, expectedError)
|
||||
let error
|
||||
|
||||
try {
|
||||
await this.handler.promises.deleteFile(this.projectId, this.fileId)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.equal(expectedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('deleteProject', function () {
|
||||
describe('when filestore is enabled', function () {
|
||||
beforeEach(function () {
|
||||
this.Features.hasFeature.withArgs('filestore').returns(true)
|
||||
})
|
||||
it('should send a delete request to filestore api', async function () {
|
||||
const projectUrl = this.getProjectUrl(this.projectId)
|
||||
this.request.callsArgWith(1, null)
|
||||
|
||||
await this.handler.promises.deleteProject(this.projectId)
|
||||
this.request.args[0][0].method.should.equal('delete')
|
||||
this.request.args[0][0].uri.should.equal(projectUrl)
|
||||
})
|
||||
|
||||
it('should wrap the error if there is one', async function () {
|
||||
const expectedError = new Error('my error')
|
||||
this.request.callsArgWith(1, expectedError)
|
||||
const promise = this.handler.promises.deleteProject(this.projectId)
|
||||
let error
|
||||
|
||||
try {
|
||||
await promise
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.exist
|
||||
|
||||
expect(OError.getFullStack(error)).to.match(
|
||||
/something went wrong deleting a project in filestore/
|
||||
)
|
||||
expect(OError.getFullStack(error)).to.match(/my error/)
|
||||
})
|
||||
})
|
||||
describe('when filestore is disabled', function () {
|
||||
beforeEach(function () {
|
||||
this.Features.hasFeature.withArgs('filestore').returns(false)
|
||||
})
|
||||
it('should not send a delete request to filestore api', async function () {
|
||||
await this.handler.promises.deleteProject(this.projectId)
|
||||
|
||||
this.request.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFileStream', function () {
|
||||
describe('when filestore is disabled', function () {
|
||||
beforeEach(function () {
|
||||
this.Features.hasFeature.withArgs('filestore').returns(false)
|
||||
})
|
||||
|
||||
it('should callback with a NotFoundError', async function () {
|
||||
let error
|
||||
|
||||
try {
|
||||
await this.handler.promises.getFileStream(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
{}
|
||||
)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.be.instanceOf(Errors.NotFoundError)
|
||||
})
|
||||
|
||||
it('should not call request', async function () {
|
||||
let error
|
||||
|
||||
try {
|
||||
await this.handler.promises.getFileStream(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
{}
|
||||
)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.exist
|
||||
this.request.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
describe('when filestore is enabled', function () {
|
||||
beforeEach(function () {
|
||||
this.query = {}
|
||||
this.request.returns(this.readStream)
|
||||
this.Features.hasFeature.withArgs('filestore').returns(true)
|
||||
})
|
||||
|
||||
it('should get the stream with the correct params', async function () {
|
||||
const fileUrl = this.getFileUrl(this.projectId, this.fileId)
|
||||
await this.handler.promises.getFileStream(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
this.query
|
||||
)
|
||||
this.request.args[0][0].method.should.equal('get')
|
||||
this.request.args[0][0].uri.should.equal(
|
||||
fileUrl + '?from=getFileStream'
|
||||
)
|
||||
})
|
||||
|
||||
it('should get stream from request', async function () {
|
||||
const stream = await this.handler.promises.getFileStream(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
this.query
|
||||
)
|
||||
|
||||
stream.should.equal(this.readStream)
|
||||
})
|
||||
|
||||
it('should add an error handler', async function () {
|
||||
const stream = await this.handler.promises.getFileStream(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
this.query
|
||||
)
|
||||
stream.on.calledWith('error').should.equal(true)
|
||||
})
|
||||
|
||||
describe('when range is specified in query', function () {
|
||||
beforeEach(function () {
|
||||
this.query = { range: '0-10' }
|
||||
})
|
||||
|
||||
it('should add a range header', async function () {
|
||||
await this.handler.promises.getFileStream(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
this.query
|
||||
)
|
||||
|
||||
this.request.callCount.should.equal(1)
|
||||
const { headers } = this.request.firstCall.args[0]
|
||||
expect(headers).to.have.keys('range')
|
||||
expect(headers.range).to.equal('bytes=0-10')
|
||||
})
|
||||
|
||||
describe('when range is invalid', function () {
|
||||
;['0-', '-100', 'one-two', 'nonsense'].forEach(r => {
|
||||
beforeEach(function () {
|
||||
this.query = { range: `${r}` }
|
||||
})
|
||||
|
||||
it(`should not add a range header for '${r}'`, async function () {
|
||||
await this.handler.promises.getFileStream(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
this.query
|
||||
)
|
||||
this.request.callCount.should.equal(1)
|
||||
const { headers } = this.request.firstCall.args[0]
|
||||
expect(headers).to.not.have.keys('range')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFileSize', function () {
|
||||
it('returns the file size reported by filestore', async function () {
|
||||
const expectedFileSize = 32432
|
||||
const fileUrl =
|
||||
this.getFileUrl(this.projectId, this.fileId) + '?from=getFileSize'
|
||||
this.request.head.yields(
|
||||
new Error('request.head() received unexpected arguments')
|
||||
)
|
||||
this.request.head.withArgs(fileUrl).yields(null, {
|
||||
statusCode: 200,
|
||||
headers: {
|
||||
'content-length': expectedFileSize,
|
||||
},
|
||||
})
|
||||
|
||||
const fileSize = await this.handler.promises.getFileSize(
|
||||
this.projectId,
|
||||
this.fileId
|
||||
)
|
||||
expect(fileSize).to.equal(expectedFileSize)
|
||||
})
|
||||
|
||||
it('throws a NotFoundError on a 404 from filestore', async function () {
|
||||
this.request.head.yields(null, { statusCode: 404 })
|
||||
|
||||
let error
|
||||
|
||||
try {
|
||||
await this.handler.promises.getFileSize(this.projectId, this.fileId)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.be.instanceOf(Errors.NotFoundError)
|
||||
})
|
||||
|
||||
it('throws an error on a non-200 from filestore', async function () {
|
||||
this.request.head.yields(null, { statusCode: 500 })
|
||||
|
||||
let error
|
||||
|
||||
try {
|
||||
await this.handler.promises.getFileSize(this.projectId, this.fileId)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.be.instanceOf(Error)
|
||||
})
|
||||
|
||||
it('rethrows errors from filestore', async function () {
|
||||
const expectedError = new Error('from filestore')
|
||||
this.request.head.yields(expectedError)
|
||||
|
||||
let error
|
||||
|
||||
try {
|
||||
await this.handler.promises.getFileSize(this.projectId, this.fileId)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.equal(expectedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('copyFile', function () {
|
||||
beforeEach(function () {
|
||||
this.newProjectId = 'new project'
|
||||
this.newFileId = 'new file id'
|
||||
})
|
||||
|
||||
it('should post json', async function () {
|
||||
const newFileUrl = this.getFileUrl(this.newProjectId, this.newFileId)
|
||||
this.request.callsArgWith(1, null, { statusCode: 200 })
|
||||
|
||||
await this.handler.promises.copyFile(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
this.newProjectId,
|
||||
this.newFileId
|
||||
)
|
||||
this.request.args[0][0].method.should.equal('put')
|
||||
this.request.args[0][0].uri.should.equal(newFileUrl)
|
||||
this.request.args[0][0].json.source.project_id.should.equal(
|
||||
this.projectId
|
||||
)
|
||||
this.request.args[0][0].json.source.file_id.should.equal(this.fileId)
|
||||
})
|
||||
|
||||
it('returns the url', async function () {
|
||||
const expectedUrl = this.getFileUrl(this.newProjectId, this.newFileId)
|
||||
this.request.callsArgWith(1, null, { statusCode: 200 })
|
||||
const url = await this.handler.promises.copyFile(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
this.newProjectId,
|
||||
this.newFileId
|
||||
)
|
||||
|
||||
url.should.equal(expectedUrl)
|
||||
})
|
||||
|
||||
it('should return the err', async function () {
|
||||
const expectedError = new Error('error')
|
||||
this.request.callsArgWith(1, expectedError)
|
||||
let error
|
||||
|
||||
try {
|
||||
await this.handler.promises.copyFile(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
this.newProjectId,
|
||||
this.newFileId
|
||||
)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.equal(expectedError)
|
||||
})
|
||||
|
||||
it('should return an error for a non-success statusCode', async function () {
|
||||
this.request.callsArgWith(1, null, { statusCode: 500 })
|
||||
let error
|
||||
|
||||
try {
|
||||
await this.handler.promises.copyFile(
|
||||
this.projectId,
|
||||
this.fileId,
|
||||
this.newProjectId,
|
||||
this.newFileId
|
||||
)
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
|
||||
expect(error).to.be.instanceOf(Error)
|
||||
expect(error).to.have.property(
|
||||
'message',
|
||||
'non-ok response from filestore for copyFile: 500'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -115,11 +115,6 @@ describe('ProjectDeleter', function () {
|
||||
|
||||
this.ProjectMock = sinon.mock(Project)
|
||||
this.DeletedProjectMock = sinon.mock(DeletedProject)
|
||||
this.FileStoreHandler = {
|
||||
promises: {
|
||||
deleteProject: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.Features = {
|
||||
hasFeature: sinon.stub().returns(true),
|
||||
}
|
||||
@@ -143,7 +138,6 @@ describe('ProjectDeleter', function () {
|
||||
'../DocumentUpdater/DocumentUpdaterHandler':
|
||||
this.DocumentUpdaterHandler,
|
||||
'../Tags/TagsHandler': this.TagsHandler,
|
||||
'../FileStore/FileStoreHandler': this.FileStoreHandler,
|
||||
'../Chat/ChatApiHandler': this.ChatApiHandler,
|
||||
'../Collaborators/CollaboratorsHandler': this.CollaboratorsHandler,
|
||||
'../Collaborators/CollaboratorsGetter': this.CollaboratorsGetter,
|
||||
@@ -480,12 +474,6 @@ describe('ProjectDeleter', function () {
|
||||
)
|
||||
})
|
||||
|
||||
it('should destroy the files in filestore', function () {
|
||||
expect(
|
||||
this.FileStoreHandler.promises.deleteProject
|
||||
).to.have.been.calledWith(this.deletedProjects[0].project._id)
|
||||
})
|
||||
|
||||
it('should destroy the chat threads and messages', function () {
|
||||
expect(
|
||||
this.ChatApiHandler.promises.destroyProject
|
||||
@@ -540,11 +528,6 @@ describe('ProjectDeleter', function () {
|
||||
.called
|
||||
})
|
||||
|
||||
it('should not destroy the files in filestore', function () {
|
||||
expect(this.FileStoreHandler.promises.deleteProject).to.not.have.been
|
||||
.called
|
||||
})
|
||||
|
||||
it('should not destroy the chat threads and messages', function () {
|
||||
expect(this.ChatApiHandler.promises.destroyProject).to.not.have.been
|
||||
.called
|
||||
|
||||
@@ -14,7 +14,7 @@ describe('ProjectDuplicator', function () {
|
||||
this.doc1Lines = ['one']
|
||||
this.doc2Lines = ['two']
|
||||
this.file0 = { name: 'file0', _id: 'file0', hash: 'abcde' }
|
||||
this.file1 = { name: 'file1', _id: 'file1' }
|
||||
this.file1 = { name: 'file1', _id: 'file1', hash: 'fffff' }
|
||||
this.file2 = {
|
||||
name: 'file2',
|
||||
_id: 'file2',
|
||||
@@ -105,22 +105,19 @@ describe('ProjectDuplicator', function () {
|
||||
]
|
||||
this.fileEntries = [
|
||||
{
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
path: this.file0Path,
|
||||
file: this.newFile0,
|
||||
url: this.filestoreUrl,
|
||||
},
|
||||
{
|
||||
createdBlob: false,
|
||||
createdBlob: true,
|
||||
path: this.file1Path,
|
||||
file: this.newFile1,
|
||||
url: this.filestoreUrl,
|
||||
},
|
||||
{
|
||||
createdBlob: true,
|
||||
path: this.file2Path,
|
||||
file: this.newFile2,
|
||||
url: null,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -143,15 +140,10 @@ describe('ProjectDuplicator', function () {
|
||||
updateProjectStructure: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.FileStoreHandler = {
|
||||
promises: {
|
||||
copyFile: sinon.stub().resolves(this.filestoreUrl),
|
||||
},
|
||||
}
|
||||
this.HistoryManager = {
|
||||
promises: {
|
||||
copyBlob: sinon.stub().callsFake((historyId, newHistoryId, hash) => {
|
||||
if (hash === 'abcde') {
|
||||
if (hash === '500') {
|
||||
return Promise.reject(new Error('copy blob error'))
|
||||
}
|
||||
return Promise.resolve()
|
||||
@@ -221,9 +213,6 @@ describe('ProjectDuplicator', function () {
|
||||
flushProjectToTpds: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.Features = {
|
||||
hasFeature: sinon.stub().withArgs('project-history-blobs').returns(true),
|
||||
}
|
||||
|
||||
this.ProjectDuplicator = SandboxedModule.require(MODULE_PATH, {
|
||||
requires: {
|
||||
@@ -232,7 +221,6 @@ describe('ProjectDuplicator', function () {
|
||||
'../Docstore/DocstoreManager': this.DocstoreManager,
|
||||
'../DocumentUpdater/DocumentUpdaterHandler':
|
||||
this.DocumentUpdaterHandler,
|
||||
'../FileStore/FileStoreHandler': this.FileStoreHandler,
|
||||
'./ProjectCreationHandler': this.ProjectCreationHandler,
|
||||
'./ProjectDeleter': this.ProjectDeleter,
|
||||
'./ProjectEntityMongoUpdateHandler':
|
||||
@@ -244,7 +232,6 @@ describe('ProjectDuplicator', function () {
|
||||
'../ThirdPartyDataStore/TpdsProjectFlusher': this.TpdsProjectFlusher,
|
||||
'../Tags/TagsHandler': this.TagsHandler,
|
||||
'../History/HistoryManager': this.HistoryManager,
|
||||
'../../infrastructure/Features': this.Features,
|
||||
'../Compile/ClsiCacheManager': {
|
||||
prepareClsiCache: sinon.stub().rejects(new Error('ignore this')),
|
||||
},
|
||||
@@ -281,7 +268,7 @@ describe('ProjectDuplicator', function () {
|
||||
})
|
||||
|
||||
it('should duplicate the files with hashes by copying the blobs in history v1', function () {
|
||||
for (const file of [this.file0, this.file2]) {
|
||||
for (const file of [this.file0, this.file1, this.file2]) {
|
||||
this.HistoryManager.promises.copyBlob.should.have.been.calledWith(
|
||||
this.project.overleaf.history.id,
|
||||
this.newProject.overleaf.history.id,
|
||||
@@ -290,46 +277,6 @@ describe('ProjectDuplicator', function () {
|
||||
}
|
||||
})
|
||||
|
||||
it('should ignore any errors when copying the blobs in history v1', async function () {
|
||||
await expect(
|
||||
this.HistoryManager.promises.copyBlob(
|
||||
this.project.overleaf.history.id,
|
||||
this.newProject.overleaf.history.id,
|
||||
this.file0.hash
|
||||
)
|
||||
).to.be.rejectedWith('copy blob error')
|
||||
})
|
||||
|
||||
it('should not try to copy the blobs for any files without hashes', function () {
|
||||
for (const file of [this.file1]) {
|
||||
this.HistoryManager.promises.copyBlob.should.not.have.been.calledWith(
|
||||
this.project.overleaf.history.id,
|
||||
this.newProject.overleaf.history.id,
|
||||
file.hash
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('should copy files to the filestore', function () {
|
||||
for (const file of [this.file0, this.file1]) {
|
||||
this.FileStoreHandler.promises.copyFile.should.have.been.calledWith(
|
||||
this.project._id,
|
||||
file._id,
|
||||
this.newProject._id,
|
||||
this.newFileId
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('should not copy files that have been sent to history-v1 to the filestore', function () {
|
||||
this.FileStoreHandler.promises.copyFile.should.not.have.been.calledWith(
|
||||
this.project._id,
|
||||
this.file2._id,
|
||||
this.newProject._id,
|
||||
this.newFileId
|
||||
)
|
||||
})
|
||||
|
||||
it('should create a blank project', function () {
|
||||
this.ProjectCreationHandler.promises.createBlankProject.should.have.been.calledWith(
|
||||
this.owner._id,
|
||||
@@ -421,6 +368,19 @@ describe('ProjectDuplicator', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('when cloning in history-v1 fails', function () {
|
||||
it('should fail the clone operation', async function () {
|
||||
this.file0.hash = '500'
|
||||
await expect(
|
||||
this.ProjectDuplicator.promises.duplicate(
|
||||
this.owner,
|
||||
this.project._id,
|
||||
'name'
|
||||
)
|
||||
).to.be.rejectedWith('copy blob error')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is an error', function () {
|
||||
beforeEach(async function () {
|
||||
this.ProjectEntityMongoUpdateHandler.promises.createNewFolderStructure.rejects()
|
||||
|
||||
@@ -26,7 +26,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
},
|
||||
},
|
||||
}
|
||||
this.fileUrl = 'filestore.example.com/file'
|
||||
this.user = { _id: new ObjectId() }
|
||||
|
||||
this.DocModel = class Doc {
|
||||
@@ -152,16 +151,8 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
}
|
||||
this.FileStoreHandler = {
|
||||
promises: {
|
||||
copyFile: sinon.stub(),
|
||||
uploadFileFromDisk: sinon.stub(),
|
||||
deleteFile: sinon.stub(),
|
||||
},
|
||||
|
||||
_buildUrl: sinon
|
||||
.stub()
|
||||
.callsFake(
|
||||
(projectId, fileId) => `www.filestore.test/${projectId}/${fileId}`
|
||||
),
|
||||
}
|
||||
this.FileWriter = {
|
||||
promises: {
|
||||
@@ -670,7 +661,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
linkedFileData: this.linkedFileData,
|
||||
}
|
||||
this.FileStoreHandler.promises.uploadFileFromDisk.resolves({
|
||||
url: this.fileUrl,
|
||||
fileRef: this.newFile,
|
||||
createdBlob: true,
|
||||
})
|
||||
@@ -730,7 +720,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
{
|
||||
file: this.newFile,
|
||||
path: this.path,
|
||||
url: this.fileUrl,
|
||||
createdBlob: true,
|
||||
},
|
||||
]
|
||||
@@ -1069,7 +1058,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
describe('upsertFile', function () {
|
||||
beforeEach(function () {
|
||||
this.FileStoreHandler.promises.uploadFileFromDisk.resolves({
|
||||
url: this.fileUrl,
|
||||
fileRef: this.file,
|
||||
createdBlob: true,
|
||||
})
|
||||
@@ -1181,7 +1169,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
{
|
||||
file: this.newFile,
|
||||
path: this.fileSystemPath,
|
||||
url: this.fileUrl,
|
||||
createdBlob: true,
|
||||
},
|
||||
]
|
||||
@@ -1218,7 +1205,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
element: this.folder,
|
||||
})
|
||||
this.FileStoreHandler.promises.uploadFileFromDisk.resolves({
|
||||
url: this.fileUrl,
|
||||
fileRef: this.newFile,
|
||||
createdBlob: true,
|
||||
})
|
||||
@@ -1254,7 +1240,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
folderId,
|
||||
userId,
|
||||
fileRef: this.newFile,
|
||||
fileStoreUrl: this.fileUrl,
|
||||
source: this.source,
|
||||
createdBlob: true,
|
||||
})
|
||||
@@ -1327,7 +1312,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
folder: this.folder,
|
||||
})
|
||||
|
||||
this.newFileUrl = 'new-file-url'
|
||||
this.newFile = {
|
||||
_id: newFileId,
|
||||
name: 'dummy-upload-filename',
|
||||
@@ -1339,7 +1323,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
overleaf: { history: { id: projectHistoryId } },
|
||||
}
|
||||
this.FileStoreHandler.promises.uploadFileFromDisk.resolves({
|
||||
url: this.newFileUrl,
|
||||
fileRef: this.newFile,
|
||||
createdBlob: true,
|
||||
})
|
||||
@@ -1380,7 +1363,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
{
|
||||
file: this.newFile,
|
||||
path: this.path,
|
||||
url: this.newFileUrl,
|
||||
createdBlob: true,
|
||||
},
|
||||
]
|
||||
@@ -1559,7 +1541,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
this.file = { _id: fileId }
|
||||
this.isNewFile = true
|
||||
this.FileStoreHandler.promises.uploadFileFromDisk.resolves({
|
||||
url: this.fileUrl,
|
||||
fileRef: this.newFile,
|
||||
createdBlob: true,
|
||||
})
|
||||
@@ -1601,7 +1582,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
linkedFileData: this.linkedFileData,
|
||||
userId,
|
||||
fileRef: this.newFile,
|
||||
fileStoreUrl: this.fileUrl,
|
||||
source: this.source,
|
||||
createdBlob: true,
|
||||
}
|
||||
@@ -2567,7 +2547,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
describe('_cleanUpEntity', function () {
|
||||
beforeEach(function () {
|
||||
this.entityId = '4eecaffcbffa66588e000009'
|
||||
this.FileStoreHandler.promises.deleteFile.resolves()
|
||||
this.ProjectEntityUpdateHandler.promises.unsetRootDoc = sinon
|
||||
.stub()
|
||||
.resolves()
|
||||
@@ -2590,12 +2569,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
)
|
||||
})
|
||||
|
||||
it('should not delete the file from FileStoreHandler', function () {
|
||||
this.FileStoreHandler.promises.deleteFile
|
||||
.calledWith(projectId, this.entityId)
|
||||
.should.equal(false)
|
||||
})
|
||||
|
||||
it('should not attempt to delete from the document updater', function () {
|
||||
this.DocumentUpdaterHandler.promises.deleteDoc.called.should.equal(
|
||||
false
|
||||
@@ -2862,7 +2835,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
.resolves({ lines: this.docLines, rev: this.rev })
|
||||
this.FileWriter.promises.writeLinesToDisk.resolves(this.tmpFilePath)
|
||||
this.FileStoreHandler.promises.uploadFileFromDisk.resolves({
|
||||
url: this.fileStoreUrl,
|
||||
fileRef: this.file,
|
||||
createdBlob: true,
|
||||
})
|
||||
@@ -2927,7 +2899,6 @@ describe('ProjectEntityUpdateHandler', function () {
|
||||
{
|
||||
file: this.file,
|
||||
path: this.path,
|
||||
url: this.fileStoreUrl,
|
||||
createdBlob: true,
|
||||
},
|
||||
],
|
||||
|
||||
@@ -30,8 +30,8 @@ describe('ReferencesHandler', function () {
|
||||
{
|
||||
docs: [{ name: 'three.bib', _id: 'ccc' }],
|
||||
fileRefs: [
|
||||
{ name: 'four.bib', _id: 'fff' },
|
||||
{ name: 'five.bib', _id: 'ggg', hash: 'hash' },
|
||||
{ name: 'four.bib', _id: 'fff', hash: 'abc' },
|
||||
{ name: 'five.bib', _id: 'ggg', hash: 'def' },
|
||||
],
|
||||
folders: [],
|
||||
},
|
||||
@@ -50,7 +50,6 @@ describe('ReferencesHandler', function () {
|
||||
filestore: { url: 'http://some.url/filestore' },
|
||||
project_history: { url: 'http://project-history.local' },
|
||||
},
|
||||
filestoreMigrationLevel: 2,
|
||||
}),
|
||||
}))
|
||||
|
||||
@@ -98,9 +97,10 @@ describe('ReferencesHandler', function () {
|
||||
describe('indexAll', function () {
|
||||
beforeEach(function (ctx) {
|
||||
sinon.stub(ctx.handler, '_findBibDocIds').returns(['aaa', 'ccc'])
|
||||
sinon
|
||||
.stub(ctx.handler, '_findBibFileRefs')
|
||||
.returns([{ _id: 'fff' }, { _id: 'ggg', hash: 'hash' }])
|
||||
sinon.stub(ctx.handler, '_findBibFileRefs').returns([
|
||||
{ _id: 'fff', hash: 'abc' },
|
||||
{ _id: 'ggg', hash: 'def' },
|
||||
])
|
||||
sinon.stub(ctx.handler, '_isFullIndex').callsArgWith(1, null, true)
|
||||
ctx.request.post.callsArgWith(
|
||||
1,
|
||||
@@ -164,8 +164,8 @@ describe('ReferencesHandler', function () {
|
||||
expect(arg.json.docUrls).to.deep.equal([
|
||||
`${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/aaa/raw`,
|
||||
`${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/ccc/raw`,
|
||||
`${ctx.settings.apis.filestore.url}/project/${ctx.projectId}/file/fff?from=bibFileUrls`,
|
||||
`${ctx.settings.apis.filestore.url}/project/${ctx.projectId}/file/ggg?from=bibFileUrls`,
|
||||
`${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/abc`,
|
||||
`${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/def`,
|
||||
])
|
||||
expect(arg.json.sourceURLs.length).to.equal(4)
|
||||
expect(arg.json.sourceURLs).to.deep.equal([
|
||||
@@ -176,11 +176,10 @@ describe('ReferencesHandler', function () {
|
||||
url: `${ctx.settings.apis.docstore.url}/project/${ctx.projectId}/doc/ccc/raw`,
|
||||
},
|
||||
{
|
||||
url: `${ctx.settings.apis.filestore.url}/project/${ctx.projectId}/file/fff?from=bibFileUrls`,
|
||||
url: `${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/abc`,
|
||||
},
|
||||
{
|
||||
url: `${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/hash`,
|
||||
fallbackURL: `${ctx.settings.apis.filestore.url}/project/${ctx.projectId}/file/ggg?from=bibFileUrls`,
|
||||
url: `${ctx.settings.apis.project_history.url}/project/${ctx.historyId}/blob/def`,
|
||||
},
|
||||
])
|
||||
expect(arg.json.fullIndex).to.equal(true)
|
||||
|
||||
@@ -39,7 +39,6 @@ describe('SplitTestHandler', function () {
|
||||
}
|
||||
this.SplitTestCache.get.resolves(this.cachedSplitTests)
|
||||
this.Settings = {
|
||||
filestoreMigrationLevel: 0,
|
||||
moduleImportSequence: [],
|
||||
overleaf: {},
|
||||
devToolbar: {
|
||||
|
||||
@@ -57,7 +57,6 @@ describe('TpdsUpdateSender', function () {
|
||||
url: projectHistoryUrl,
|
||||
},
|
||||
},
|
||||
filestoreMigrationLevel: true,
|
||||
}
|
||||
const getUsers = sinon.stub()
|
||||
getUsers
|
||||
@@ -116,59 +115,6 @@ describe('TpdsUpdateSender', function () {
|
||||
this.settings.apis.tpdsworker = { url: 'http://tpdsworker' }
|
||||
})
|
||||
|
||||
it('queues a post the file with user and file id', async function () {
|
||||
const fileId = '4545345'
|
||||
const hash = undefined
|
||||
const historyId = 91525
|
||||
const path = '/some/path/here.jpg'
|
||||
|
||||
await this.TpdsUpdateSender.promises.addFile({
|
||||
projectId,
|
||||
historyId,
|
||||
fileId,
|
||||
hash,
|
||||
path,
|
||||
projectName,
|
||||
})
|
||||
|
||||
expect(this.FetchUtils.fetchNothing).to.have.been.calledWithMatch(
|
||||
this.enqueueUrl,
|
||||
{
|
||||
json: {
|
||||
group: userId,
|
||||
method: 'pipeStreamFrom',
|
||||
job: {
|
||||
method: 'post',
|
||||
streamOrigin: `${filestoreUrl}/project/${projectId}/file/${fileId}?from=tpdsAddFile`,
|
||||
uri: `${thirdPartyDataStoreApiUrl}/user/${userId}/entity/${encodeURIComponent(
|
||||
projectName
|
||||
)}${encodeURIComponent(path)}`,
|
||||
headers: {},
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
expect(this.FetchUtils.fetchNothing).to.have.been.calledWithMatch(
|
||||
this.enqueueUrl,
|
||||
{
|
||||
json: {
|
||||
group: collaberatorRef,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
expect(this.FetchUtils.fetchNothing).to.have.been.calledWithMatch(
|
||||
this.enqueueUrl,
|
||||
{
|
||||
json: {
|
||||
group: readOnlyRef,
|
||||
job: {},
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('queues a post the file with user and file id and hash', async function () {
|
||||
const fileId = '4545345'
|
||||
const hash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
|
||||
@@ -193,7 +139,6 @@ describe('TpdsUpdateSender', function () {
|
||||
job: {
|
||||
method: 'post',
|
||||
streamOrigin: `${projectHistoryUrl}/project/${historyId}/blob/${hash}`,
|
||||
streamFallback: `${filestoreUrl}/project/${projectId}/file/${fileId}?from=tpdsAddFile`,
|
||||
uri: `${thirdPartyDataStoreApiUrl}/user/${userId}/entity/${encodeURIComponent(
|
||||
projectName
|
||||
)}${encodeURIComponent(path)}`,
|
||||
|
||||
@@ -61,7 +61,6 @@ describe('ProjectUploadManager', function () {
|
||||
{
|
||||
file: this.file,
|
||||
path: `/${this.file.name}`,
|
||||
url: this.fileStoreUrl,
|
||||
createdBlob: true,
|
||||
},
|
||||
]
|
||||
@@ -101,7 +100,6 @@ describe('ProjectUploadManager', function () {
|
||||
promises: {
|
||||
uploadFileFromDiskWithHistoryId: sinon.stub().resolves({
|
||||
fileRef: this.file,
|
||||
url: this.fileStoreUrl,
|
||||
createdBlob: true,
|
||||
}),
|
||||
},
|
||||
|
||||
@@ -7,7 +7,6 @@ describe('Features', function () {
|
||||
this.Features = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': (this.settings = {
|
||||
filestoreMigrationLevel: 0,
|
||||
moduleImportSequence: [],
|
||||
enabledLinkedFileTypes: [],
|
||||
}),
|
||||
|
||||
Reference in New Issue
Block a user