13 Commits

Author SHA1 Message Date
Henry Oswald
c2ad040e08 new release of sharelatex v0.2.0 2015-09-17 10:56:57 +01:00
Henry Oswald
a3bee00bdc Merge branch 'master' into release 2015-09-16 17:35:38 +01:00
Henry Oswald
afb7fd6d16 remove extra logging in migration and change file paths in migration 2 2015-09-16 16:29:26 +01:00
Henry Oswald
6aa2446eea remove process.exit from migrations 2015-09-16 16:26:51 +01:00
Henry Oswald
0980b2e6dc change timeout in migrations-1 to 0ms 2015-09-16 15:58:38 +01:00
Henry Oswald
1e073da1fd bring down migration timeout in delete doc lines to 0ms 2015-09-16 14:44:53 +01:00
Henry Oswald
feafbb5416 renamed first migration to move_doc_lines_to_doc_collection 2015-09-16 14:22:54 +01:00
Henry Oswald
e3b3b85726 add the migrate task into grunt 2015-09-16 13:41:09 +01:00
Henry Oswald
ce10ceb2d9 improved migration script to delete docs 2015-04-16 13:45:16 +01:00
Henry Oswald
dae47efa1c Merge branch 'master' of https://github.com/sharelatex/sharelatex 2015-04-14 15:21:20 +01:00
Henry Oswald
5dd40dc7cb added migration 2 to clean up projects, removing the doc lines 2015-04-14 15:21:07 +01:00
James Allen
5ea12c3de8 Update CHANGELOG.md 2015-03-20 20:32:07 +00:00
James Allen
16ba8acfca Update CHANGELOG.md 2015-03-20 20:31:12 +00:00
4 changed files with 206 additions and 12 deletions

View File

@@ -1,3 +1,13 @@
v0.1.4
------
* Move to a private registration scheme where users must be added by an admin.
* Proxy websockets connection through web to real-time service so no websocketsUrl parameter is needed.
* Use worker aspell processes in spelling to prevent excessing forking.
* Properly clean up after long running ImageMagick conversions in the filestore.
* Allow a configurable app name and email contact address.
* Switch to new PDF viewer with partial page loading for immediate preview of visible page.
v0.1.3 v0.1.3
------ ------

View File

@@ -13,43 +13,43 @@ settings = require("settings-sharelatex")
SERVICES = [{ SERVICES = [{
name: "web" name: "web"
repo: "https://github.com/sharelatex/web-sharelatex.git" repo: "https://github.com/sharelatex/web-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}, { }, {
name: "real-time" name: "real-time"
repo: "https://github.com/sharelatex/real-time-sharelatex.git" repo: "https://github.com/sharelatex/real-time-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}, { }, {
name: "document-updater" name: "document-updater"
repo: "https://github.com/sharelatex/document-updater-sharelatex.git" repo: "https://github.com/sharelatex/document-updater-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}, { }, {
name: "clsi" name: "clsi"
repo: "https://github.com/sharelatex/clsi-sharelatex.git" repo: "https://github.com/sharelatex/clsi-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}, { }, {
name: "filestore" name: "filestore"
repo: "https://github.com/sharelatex/filestore-sharelatex.git" repo: "https://github.com/sharelatex/filestore-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}, { }, {
name: "track-changes" name: "track-changes"
repo: "https://github.com/sharelatex/track-changes-sharelatex.git" repo: "https://github.com/sharelatex/track-changes-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}, { }, {
name: "docstore" name: "docstore"
repo: "https://github.com/sharelatex/docstore-sharelatex.git" repo: "https://github.com/sharelatex/docstore-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}, { }, {
name: "chat" name: "chat"
repo: "https://github.com/sharelatex/chat-sharelatex.git" repo: "https://github.com/sharelatex/chat-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}, { }, {
name: "tags" name: "tags"
repo: "https://github.com/sharelatex/tags-sharelatex.git" repo: "https://github.com/sharelatex/tags-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}, { }, {
name: "spelling" name: "spelling"
repo: "https://github.com/sharelatex/spelling-sharelatex.git" repo: "https://github.com/sharelatex/spelling-sharelatex.git"
version: "v0.1.4" version: "v0.2.0"
}] }]
module.exports = (grunt) -> module.exports = (grunt) ->
@@ -165,7 +165,7 @@ module.exports = (grunt) ->
Helpers.buildUpstartScripts() Helpers.buildUpstartScripts()
#grunt.registerTask 'migrate', "compile migrations and run them", ['coffee:migrate', 'shell:migrate'] grunt.registerTask 'migrate', "compile migrations and run them", ['coffee:migrate', 'shell:migrate']
Helpers = Helpers =

View File

@@ -139,7 +139,7 @@ processNext = (project_id, callback)->
markProjectAsProcessed project_id, (err)-> markProjectAsProcessed project_id, (err)->
setTimeout( setTimeout(
-> callback(err) -> callback(err)
,500) ,0)

View File

@@ -0,0 +1,184 @@
Settings = require "settings-sharelatex"
fs = require("fs")
mongojs = require("mongojs")
ObjectId = mongojs.ObjectId
db = mongojs(Settings.mongo.url, ['projects', 'docs'])
_ = require("lodash")
async = require("async")
exec = require("child_process").exec
finished_projects_path = "/tmp/finished-projects-2"
all_projects_path = "/tmp/all-projects-2"
unmigrated_docs_path = "/tmp/unmigrated-2"
printProgress = ->
exec "wc #{finished_projects_path}", (error, results) ->
setTimeout printProgress, 1000 * 30
checkIfFileHasBeenProccessed = (project_id, callback)->
exec "grep #{project_id} #{finished_projects_path}", (error, results) ->
hasBeenProcessed = _.include(results, project_id)
callback(error, hasBeenProcessed)
loadProjectIds = (callback)->
console.log "loading project ids from #{all_projects_path}"
fs.readFile all_projects_path, "utf-8", (err, data)->
ids = data.split("\n")
console.log "loaded #{ids.length} project ids from #{all_projects_path}"
callback err, ids
getAndWriteProjectids = (callback)->
console.log "finding all project id's - #{new Date().toString()}"
db.projects.find {}, {_id:1}, (err, ids)->
console.log "total found projects in mongo #{ids.length} - #{new Date().toString()}"
ids = _.pluck ids, '_id'
ids = _.filter ids, (id)-> id?
fileData = ids.join("\n")
fs.writeFile all_projects_path, fileData, ->
callback(err, ids)
markDocAsUnmigrated = (project_id, doc_id, callback)->
console.log "#{project_id} #{doc_id} unmigrated"
markProjectAsProcessed project_id, (err)->
fs.appendFile unmigrated_docs_path, "#{project_id} #{doc_id}\n", callback
markUnmigratedDocs = (project_id, docs, callback)->
console.log docs.length, project_id, "unmigrated"
jobs = _.map docs, (doc)->
(cb)->
markDocAsUnmigrated project_id, doc._id, cb
async.series jobs, callback
getProjectIds = (callback)->
exists = fs.existsSync all_projects_path
if exists
loadProjectIds callback
else
getAndWriteProjectids callback
markProjectAsProcessed = (project_id, callback)->
fs.appendFile finished_projects_path, "#{project_id}\n", callback
getAllDocs = (project_id, callback = (error, docs) ->) ->
excludes = {}
for i in [0..12]
excludes["rootFolder#{Array(i).join(".folders")}.docs.lines"] = 0
db.projects.findOne _id: ObjectId(project_id.toString()), excludes, (error, project) ->
return callback(error) if error?
if !project?
console.log "no such project #{project_id}"
return callback()
findAllDocsInProject project, (error, docs) ->
return callback(error) if error?
return callback null, docs, project
findAllDocsInProject = (project, callback = (error, docs) ->) ->
callback null, _findAllDocsInFolder project.rootFolder[0]
findDocInProject = (project, doc_id, callback = (error, doc, mongoPath) ->) ->
result = _findDocInFolder project.rootFolder[0], doc_id, "rootFolder.0"
if result?
callback null, result.doc, result.mongoPath
else
callback null, null, null
_findDocInFolder = (folder = {}, doc_id, currentPath) ->
for doc, i in folder.docs or []
if doc?._id? and doc._id.toString() == doc_id.toString()
return {
doc: doc
mongoPath: "#{currentPath}.docs.#{i}"
}
for childFolder, i in folder.folders or []
result = _findDocInFolder childFolder, doc_id, "#{currentPath}.folders.#{i}"
return result if result?
return null
_findAllDocsInFolder = (folder = {}) ->
docs = folder.docs or []
for childFolder in folder.folders or []
docs = docs.concat _findAllDocsInFolder childFolder
return docs
isDocInDocCollection = (doc, callback)->
if !doc?._id? or doc._id.length == 0
return callback(null, true)
db.docs.find({_id: ObjectId(doc._id+"")}, {_id: 1}).limit 1, (err, foundDocs)->
exists = foundDocs.length > 0
callback err, exists
getWhichDocsCanBeDeleted = (docs, callback = (err, docsToBeDeleted, unmigratedDocs)->)->
docsToBeDeleted = []
unmigratedDocs = []
jobs = _.map docs, (doc)->
return (cb)->
isDocInDocCollection doc, (err, exists)->
if exists
docsToBeDeleted.push doc
else
unmigratedDocs.push doc
cb(err)
async.series jobs, (err)->
callback err, docsToBeDeleted, unmigratedDocs
whipeDocLines = (project_id, mongoPath, callback)->
update =
$unset: {}
update.$unset["#{mongoPath}.lines"] = ""
update.$unset["#{mongoPath}.rev"] = ""
db.projects.update _id: ObjectId(project_id+''), update, callback
removeDocLinesFromProject = (docs, project, callback)->
jobs = _.map docs, (doc)->
(cb)->
findDocInProject project, doc._id, (err, doc, mongoPath)->
whipeDocLines project._id, mongoPath, cb
async.parallelLimit jobs, 5, callback
processNext = (project_id, callback)->
if !project_id? or project_id.length == 0
return callback()
checkIfFileHasBeenProccessed project_id, (err, hasBeenProcessed)->
if hasBeenProcessed
console.log "#{project_id} already procssed, skipping"
return callback()
console.log "#{project_id} processing"
getAllDocs project_id, (err, docs, project)->
if err?
console.error err, project_id, "could not get all docs"
return callback(err)
else
getWhichDocsCanBeDeleted docs, (err, docsToBeDeleted, unmigratedDocs)->
if err?
console.error err, project_id, "could not save docs into mongo"
return callback(err)
markUnmigratedDocs project_id, unmigratedDocs, (err)->
removeDocLinesFromProject docsToBeDeleted, project, (err)->
if err?
return callback(err)
markProjectAsProcessed project_id, (err)->
setTimeout(
-> callback(err)
,0)
exports.migrate = (client, done = ->)->
getProjectIds (err, ids)->
printProgress()
jobs = _.map ids, (id)->
return (cb)->
processNext(id, cb)
async.series jobs, (err)->
if err?
console.error err, "at end of jobs"
else
console.log "finished"
done(err)
exports.rollback = (next)->
next()