2011-03-26 14:10:41 +01:00
/ * *
2013-12-16 16:32:32 +01:00
* The MessageHandler handles all Messages that comes from Socket . IO and controls the sessions
* /
2011-05-30 16:53:11 +02:00
/ *
2011-08-11 16:26:41 +02:00
* Copyright 2009 Google Inc . , 2011 Peter 'Pita' Martischka ( Primary Technology Ltd )
2011-03-26 14:10:41 +01:00
*
* Licensed under the Apache License , Version 2.0 ( the "License" ) ;
* you may not use this file except in compliance with the License .
* You may obtain a copy of the License at
*
* http : //www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing , software
* distributed under the License is distributed on an "AS-IS" BASIS ,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
* See the License for the specific language governing permissions and
* limitations under the License .
* /
2012-02-28 21:19:10 +01:00
2011-07-27 19:52:23 +02:00
var padManager = require ( "../db/PadManager" ) ;
2012-02-26 17:48:17 +01:00
var Changeset = require ( "ep_etherpad-lite/static/js/Changeset" ) ;
2012-03-18 09:05:46 +01:00
var AttributePool = require ( "ep_etherpad-lite/static/js/AttributePool" ) ;
2012-04-08 21:21:30 +02:00
var AttributeManager = require ( "ep_etherpad-lite/static/js/AttributeManager" ) ;
2011-07-27 19:52:23 +02:00
var authorManager = require ( "../db/AuthorManager" ) ;
var readOnlyManager = require ( "../db/ReadOnlyManager" ) ;
var settings = require ( '../utils/Settings' ) ;
2011-08-13 23:07:21 +02:00
var securityManager = require ( "../db/SecurityManager" ) ;
2020-09-06 21:27:18 +02:00
var plugins = require ( "ep_etherpad-lite/static/js/pluginfw/plugin_defs.js" ) ;
2011-08-17 18:28:30 +02:00
var log4js = require ( 'log4js' ) ;
var messageLogger = log4js . getLogger ( "message" ) ;
2013-02-10 16:03:49 +01:00
var accessLogger = log4js . getLogger ( "access" ) ;
2012-04-07 01:05:25 +02:00
var _ = require ( 'underscore' ) ;
2012-06-26 23:10:42 +02:00
var hooks = require ( "ep_etherpad-lite/static/js/pluginfw/hooks.js" ) ;
2013-04-17 14:25:23 +02:00
var channels = require ( "channels" ) ;
2013-10-27 17:42:55 +01:00
var stats = require ( '../stats' ) ;
2014-11-05 00:25:18 +01:00
var remoteAddress = require ( "../utils/RemoteAddress" ) . remoteAddress ;
2020-09-02 23:16:02 +02:00
const assert = require ( 'assert' ) . strict ;
2019-01-30 11:43:01 +01:00
const nodeify = require ( "nodeify" ) ;
2020-07-19 23:44:24 +02:00
const { RateLimiterMemory } = require ( 'rate-limiter-flexible' ) ;
const rateLimiter = new RateLimiterMemory ( {
points : settings . commitRateLimiting . points ,
duration : settings . commitRateLimiting . duration
} ) ;
2011-03-26 14:10:41 +01:00
2011-03-26 22:29:33 +01:00
/ * *
2012-04-23 16:18:14 +02:00
* A associative array that saves informations about a session
2011-03-26 22:29:33 +01:00
* key = sessionId
2012-04-23 16:18:14 +02:00
* values = padId , readonlyPadId , readonly , author , rev
* padId = the real padId of the pad
* readonlyPadId = The readonly pad id of the pad
* readonly = Wether the client has only read access ( true ) or read / write access ( false )
2011-03-26 22:29:33 +01:00
* rev = That last revision that was send to this client
* author = the author name of this session
* /
2011-03-26 14:10:41 +01:00
var sessioninfos = { } ;
2013-06-17 16:48:58 +02:00
exports . sessioninfos = sessioninfos ;
2011-03-26 14:10:41 +01:00
2013-10-27 17:42:55 +01:00
// Measure total amount of users
stats . gauge ( 'totalUsers' , function ( ) {
2019-02-08 23:20:57 +01:00
return Object . keys ( socketio . sockets . sockets ) . length ;
} ) ;
2013-10-27 17:42:55 +01:00
2013-04-17 14:25:23 +02:00
/ * *
* A changeset queue per pad that is processed by handleUserChanges ( )
* /
2019-01-30 16:27:42 +01:00
var padChannels = new channels . channels ( function ( data , callback ) {
2019-01-30 11:43:01 +01:00
return nodeify ( handleUserChanges ( data ) , callback ) ;
2019-01-30 16:27:42 +01:00
} ) ;
2013-04-17 14:25:23 +02:00
2011-03-26 22:29:33 +01:00
/ * *
2019-02-08 23:20:57 +01:00
* Saves the Socket class we need to send and receive data from the client
2011-03-26 22:29:33 +01:00
* /
2019-01-30 16:27:42 +01:00
let socketio ;
2011-03-26 14:10:41 +01:00
2011-03-26 22:29:33 +01:00
/ * *
* This Method is called by server . js to tell the message handler on which socket it should send
* @ param socket _io The Socket
* /
2011-03-26 14:10:41 +01:00
exports . setSocketIO = function ( socket _io )
{
socketio = socket _io ;
}
2011-03-26 22:29:33 +01:00
/ * *
* Handles the connection of a new user
* @ param client the new client
* /
2011-03-26 14:10:41 +01:00
exports . handleConnect = function ( client )
2013-10-27 17:42:55 +01:00
{
stats . meter ( 'connects' ) . mark ( ) ;
2019-02-08 23:20:57 +01:00
// Initalize sessioninfos for this new session
2011-06-23 15:08:18 +02:00
sessioninfos [ client . id ] = { } ;
2011-03-26 14:10:41 +01:00
}
2011-08-16 21:02:30 +02:00
/ * *
* Kicks all sessions from a pad
* @ param client the new client
* /
exports . kickSessionsFromPad = function ( padID )
{
2014-11-22 16:39:42 +01:00
if ( typeof socketio . sockets [ 'clients' ] !== 'function' )
return ;
2019-02-08 23:20:57 +01:00
// skip if there is nobody on this pad
2020-09-03 19:30:16 +02:00
if ( _getRoomClients ( padID ) . length === 0 )
2011-08-16 21:02:30 +02:00
return ;
2019-02-08 23:20:57 +01:00
// disconnect everyone from this pad
2013-02-05 20:33:44 +01:00
socketio . sockets . in ( padID ) . json . send ( { disconnect : "deleted" } ) ;
2011-08-16 21:02:30 +02:00
}
2011-03-26 22:29:33 +01:00
/ * *
* Handles the disconnection of a user
* @ param client the client that leaves
* /
2019-01-30 16:27:42 +01:00
exports . handleDisconnect = async function ( client )
2013-12-16 16:32:32 +01:00
{
2013-10-27 17:42:55 +01:00
stats . meter ( 'disconnects' ) . mark ( ) ;
2013-12-16 16:32:32 +01:00
2019-02-08 23:20:57 +01:00
// save the padname of this session
2019-01-30 16:27:42 +01:00
let session = sessioninfos [ client . id ] ;
2013-12-16 16:32:32 +01:00
2019-02-08 23:20:57 +01:00
// if this connection was already etablished with a handshake, send a disconnect message to the others
if ( session && session . author ) {
2014-11-05 00:25:18 +01:00
// Get the IP address from our persistant object
2019-01-30 16:27:42 +01:00
let ip = remoteAddress [ client . id ] ;
2014-11-05 00:25:18 +01:00
// Anonymize the IP address if IP logging is disabled
2019-02-08 23:20:57 +01:00
if ( settings . disableIPlogging ) {
2014-11-05 00:25:18 +01:00
ip = 'ANONYMOUS' ;
}
2019-02-08 23:20:57 +01:00
accessLogger . info ( '[LEAVE] Pad "' + session . padId + '": Author "' + session . author + '" on client ' + client . id + ' with IP "' + ip + '" left the pad' ) ;
2013-10-27 17:42:55 +01:00
2019-02-08 23:20:57 +01:00
// get the author color out of the db
2019-01-30 16:27:42 +01:00
let color = await authorManager . getAuthorColorId ( session . author ) ;
// prepare the notification for the other users on the pad, that this user left
let messageToTheOtherUsers = {
"type" : "COLLABROOM" ,
"data" : {
type : "USER_LEAVE" ,
userInfo : {
"ip" : "127.0.0.1" ,
"colorId" : color ,
"userAgent" : "Anonymous" ,
"userId" : session . author
2011-05-23 21:11:57 +02:00
}
2019-01-30 16:27:42 +01:00
}
} ;
2013-12-16 16:32:32 +01:00
2019-01-30 16:27:42 +01:00
// Go through all user that are still on the pad, and send them the USER_LEAVE message
client . broadcast . to ( session . padId ) . json . send ( messageToTheOtherUsers ) ;
2014-08-08 16:49:15 +02:00
2019-01-30 16:27:42 +01:00
// Allow plugins to hook into users leaving the pad
hooks . callAll ( "userLeave" , session ) ;
2011-05-23 21:11:57 +02:00
}
2013-12-16 16:32:32 +01:00
2019-02-08 23:20:57 +01:00
// Delete the sessioninfos entrys of this session
2013-12-16 16:32:32 +01:00
delete sessioninfos [ client . id ] ;
2011-03-26 14:10:41 +01:00
}
2011-03-26 22:29:33 +01:00
/ * *
* Handles a message from a user
* @ param client the client that send this message
* @ param message the message from the client
* /
2019-01-28 14:13:24 +01:00
exports . handleMessage = async function ( client , message )
2013-12-16 16:32:32 +01:00
{
2020-07-19 23:44:24 +02:00
var env = process . env . NODE _ENV || 'development' ;
if ( env === 'production' ) {
try {
await rateLimiter . consume ( client . handshake . address ) ; // consume 1 point per event from IP
} catch ( e ) {
console . warn ( "Rate limited: " , client . handshake . address , " to reduce the amount of rate limiting that happens edit the rateLimit values in settings.json" ) ;
stats . meter ( 'rateLimited' ) . mark ( ) ;
client . json . send ( { disconnect : "rateLimited" } ) ;
return ;
}
}
2019-02-08 23:20:57 +01:00
if ( message == null ) {
2011-08-20 19:56:38 +02:00
return ;
2011-03-26 14:10:41 +01:00
}
2019-02-08 23:20:57 +01:00
if ( ! message . type ) {
2013-10-12 18:51:04 +02:00
return ;
}
2019-02-08 23:20:57 +01:00
2019-01-30 16:27:42 +01:00
let thisSession = sessioninfos [ client . id ] ;
2019-02-08 23:20:57 +01:00
if ( ! thisSession ) {
2013-10-12 18:51:04 +02:00
messageLogger . warn ( "Dropped message from an unknown connection." )
2011-08-20 19:56:38 +02:00
return ;
2011-03-26 14:10:41 +01:00
}
2012-06-27 00:28:18 +02:00
2019-01-28 14:13:24 +01:00
async function handleMessageHook ( ) {
2015-05-20 02:05:53 +02:00
// Allow plugins to bypass the readonly message blocker
2019-01-28 14:13:24 +01:00
let messages = await hooks . aCallAll ( "handleMessageSecurity" , { client : client , message : message } ) ;
for ( let message of messages ) {
if ( message === true ) {
thisSession . readonly = false ;
break ;
}
}
let dropMessage = false ;
2015-05-20 02:05:53 +02:00
2013-12-16 16:32:32 +01:00
// Call handleMessage hook. If a plugin returns null, the message will be dropped. Note that for all messages
2012-07-08 21:06:19 +02:00
// handleMessage will be called, even if the client is not authorized
2019-01-28 14:13:24 +01:00
messages = await hooks . aCallAll ( "handleMessage" , { client : client , message : message } ) ;
for ( let message of messages ) {
if ( message === null ) {
dropMessage = true ;
break ;
}
}
2015-05-20 02:05:53 +02:00
2019-01-28 14:13:24 +01:00
return dropMessage ;
2012-07-08 21:06:19 +02:00
}
2019-01-28 14:13:24 +01:00
function finalHandler ( ) {
2019-02-08 23:20:57 +01:00
// Check what type of message we get and delegate to the other methods
2020-09-03 19:30:16 +02:00
if ( message . type === "CLIENT_READY" ) {
2012-06-27 00:28:18 +02:00
handleClientReady ( client , message ) ;
2020-09-03 19:30:16 +02:00
} else if ( message . type === "CHANGESET_REQ" ) {
2012-06-27 00:28:18 +02:00
handleChangesetRequest ( client , message ) ;
2020-09-03 19:30:16 +02:00
} else if ( message . type === "COLLABROOM" ) {
2014-02-20 17:38:25 +01:00
if ( thisSession . readonly ) {
2012-06-27 00:28:18 +02:00
messageLogger . warn ( "Dropped message, COLLABROOM for readonly pad" ) ;
2020-09-03 19:30:16 +02:00
} else if ( message . data . type === "USER_CHANGES" ) {
2013-10-27 21:43:32 +01:00
stats . counter ( 'pendingEdits' ) . inc ( )
2019-02-08 23:20:57 +01:00
padChannels . emit ( message . padId , { client : client , message : message } ) ; // add to pad queue
2020-09-03 19:30:16 +02:00
} else if ( message . data . type === "USERINFO_UPDATE" ) {
2012-06-27 00:28:18 +02:00
handleUserInfoUpdate ( client , message ) ;
2020-09-03 19:30:16 +02:00
} else if ( message . data . type === "CHAT_MESSAGE" ) {
2012-06-27 00:28:18 +02:00
handleChatMessage ( client , message ) ;
2020-09-03 19:30:16 +02:00
} else if ( message . data . type === "GET_CHAT_MESSAGES" ) {
2013-01-06 16:11:48 +01:00
handleGetChatMessages ( client , message ) ;
2020-09-03 19:30:16 +02:00
} else if ( message . data . type === "SAVE_REVISION" ) {
2012-06-27 00:28:18 +02:00
handleSaveRevisionMessage ( client , message ) ;
2020-09-03 19:30:16 +02:00
} else if ( message . data . type === "CLIENT_MESSAGE" &&
2013-01-30 15:28:54 +01:00
message . data . payload != null &&
2020-09-03 19:30:16 +02:00
message . data . payload . type === "suggestUserName" ) {
2012-06-27 00:28:18 +02:00
handleSuggestUserName ( client , message ) ;
} else {
messageLogger . warn ( "Dropped message, unknown COLLABROOM Data Type " + message . data . type ) ;
}
2020-09-03 19:30:16 +02:00
} else if ( message . type === "SWITCH_TO_PAD" ) {
2014-07-12 22:27:00 +02:00
handleSwitchToPad ( client , message ) ;
2012-04-23 16:18:14 +02:00
} else {
2012-06-27 00:28:18 +02:00
messageLogger . warn ( "Dropped message, unknown Message Type " + message . type ) ;
2012-04-23 16:18:14 +02:00
}
2019-02-08 23:20:57 +01:00
}
2012-06-27 00:28:18 +02:00
2019-01-28 14:13:24 +01:00
let dropMessage = await handleMessageHook ( ) ;
2020-09-03 02:44:26 +02:00
if ( dropMessage ) return ;
2012-06-27 00:28:18 +02:00
2020-09-03 19:30:16 +02:00
if ( message . type === "CLIENT_READY" ) {
2020-09-03 02:44:26 +02:00
// client tried to auth for the first time (first msg from the client)
2020-09-03 02:32:54 +02:00
createSessionInfoAuth ( client , message ) ;
2020-09-03 02:44:26 +02:00
}
2019-01-31 16:46:25 +01:00
2020-09-03 02:44:26 +02:00
// the session may have been dropped during earlier processing
if ( ! sessioninfos [ client . id ] ) {
messageLogger . warn ( "Dropping message from a connection that has gone away." )
return ;
}
2018-08-29 00:57:28 +02:00
2020-09-03 02:44:26 +02:00
// Simulate using the load testing tool
if ( ! sessioninfos [ client . id ] . auth ) {
console . error ( "Auth was never applied to a session. If you are using the stress-test tool then restart Etherpad and the Stress test tool." )
return ;
}
2018-08-29 01:23:38 +02:00
2020-09-03 02:44:26 +02:00
let auth = sessioninfos [ client . id ] . auth ;
2018-08-29 01:23:38 +02:00
2020-09-03 02:44:26 +02:00
// check if pad is requested via readOnly
let padId = auth . padID ;
2019-01-28 14:13:24 +01:00
2020-09-03 02:44:26 +02:00
if ( padId . indexOf ( "r." ) === 0 ) {
// Pad is readOnly, first get the real Pad ID
padId = await readOnlyManager . getPadId ( padId ) ;
}
2019-01-28 14:13:24 +01:00
2020-09-03 02:44:26 +02:00
let { accessStatus } = await securityManager . checkAccess ( padId , auth . sessionID , auth . token , auth . password ) ;
2019-01-28 14:13:24 +01:00
2020-09-03 02:44:26 +02:00
if ( accessStatus !== "grant" ) {
// no access, send the client a message that tells him why
client . json . send ( { accessStatus } ) ;
return ;
2019-01-28 14:13:24 +01:00
}
2020-09-03 02:44:26 +02:00
// access was granted
finalHandler ( ) ;
2011-03-26 14:10:41 +01:00
}
2012-04-23 12:52:30 +02:00
2012-02-29 20:40:14 +01:00
/ * *
* Handles a save revision message
* @ param client the client that send this message
* @ param message the message from the client
* /
2019-01-30 16:27:42 +01:00
async function handleSaveRevisionMessage ( client , message )
{
2012-04-23 16:18:14 +02:00
var padId = sessioninfos [ client . id ] . padId ;
2012-02-29 20:40:14 +01:00
var userId = sessioninfos [ client . id ] . author ;
2013-12-16 16:32:32 +01:00
2019-01-30 16:27:42 +01:00
let pad = await padManager . getPad ( padId ) ;
pad . addSavedRevision ( pad . head , userId ) ;
2012-02-29 20:40:14 +01:00
}
2013-03-12 17:59:15 +01:00
/ * *
2019-02-08 23:20:57 +01:00
* Handles a custom message , different to the function below as it handles
* objects not strings and you can direct the message to specific sessionID
2013-03-12 17:59:15 +01:00
*
* @ param msg { Object } the message we ' re sending
* @ param sessionID { string } the socketIO session to which we ' re sending this message
* /
2019-01-30 16:27:42 +01:00
exports . handleCustomObjectMessage = function ( msg , sessionID ) {
2019-02-08 23:20:57 +01:00
if ( msg . data . type === "CUSTOM" ) {
if ( sessionID ) {
// a sessionID is targeted: directly to this sessionID
socketio . sockets . socket ( sessionID ) . json . send ( msg ) ;
} else {
// broadcast to all clients on this pad
socketio . sockets . in ( msg . data . payload . padId ) . json . send ( msg ) ;
2013-03-16 14:19:12 +01:00
}
2013-03-12 17:59:15 +01:00
}
2019-01-25 19:07:01 +01:00
}
2013-03-12 17:59:15 +01:00
2012-08-08 19:12:11 +02:00
/ * *
* Handles a custom message ( sent via HTTP API request )
*
* @ param padID { Pad } the pad to which we ' re sending this message
2019-02-26 22:19:49 +01:00
* @ param msgString { String } the message we ' re sending
2012-08-08 19:12:11 +02:00
* /
2019-01-30 16:27:42 +01:00
exports . handleCustomMessage = function ( padID , msgString ) {
let time = Date . now ( ) ;
let msg = {
2012-08-08 19:12:11 +02:00
type : 'COLLABROOM' ,
data : {
2019-02-26 22:19:49 +01:00
type : msgString ,
2012-08-08 19:12:11 +02:00
time : time
}
} ;
2013-02-05 20:33:44 +01:00
socketio . sockets . in ( padID ) . json . send ( msg ) ;
2019-01-30 16:27:42 +01:00
}
2012-08-08 19:12:11 +02:00
2011-07-14 17:15:38 +02:00
/ * *
* Handles a Chat Message
* @ param client the client that send this message
* @ param message the message from the client
* /
function handleChatMessage ( client , message )
{
2019-02-26 23:25:15 +01:00
var time = Date . now ( ) ;
2011-07-14 17:15:38 +02:00
var userId = sessioninfos [ client . id ] . author ;
var text = message . data . text ;
2012-04-23 16:18:14 +02:00
var padId = sessioninfos [ client . id ] . padId ;
2013-12-16 16:32:32 +01:00
2015-08-15 22:05:31 +02:00
exports . sendChatMessageToPadClients ( time , userId , text , padId ) ;
}
/ * *
* Sends a chat message to all clients of this pad
* @ param time the timestamp of the chat message
* @ param userId the author id of the chat message
* @ param text the text of the chat message
* @ param padId the padId to send the chat message to
* /
2019-01-30 16:27:42 +01:00
exports . sendChatMessageToPadClients = async function ( time , userId , text , padId )
{
// get the pad
let pad = await padManager . getPad ( padId ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 16:27:42 +01:00
// get the author
let userName = await authorManager . getAuthorName ( userId ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 16:27:42 +01:00
// save the chat message
pad . appendChatMessage ( text , userId , time ) ;
let msg = {
type : "COLLABROOM" ,
data : { type : "CHAT_MESSAGE" , userId , userName , time , text }
} ;
// broadcast the chat message to everyone on the pad
socketio . sockets . in ( padId ) . json . send ( msg ) ;
2011-07-14 17:15:38 +02:00
}
2013-01-06 16:11:48 +01:00
/ * *
2013-01-07 19:15:55 +01:00
* Handles the clients request for more chat - messages
2013-01-06 16:11:48 +01:00
* @ param client the client that send this message
* @ param message the message from the client
* /
2019-01-30 16:27:42 +01:00
async function handleGetChatMessages ( client , message )
2013-01-06 16:11:48 +01:00
{
2019-02-08 23:20:57 +01:00
if ( message . data . start == null ) {
2013-01-06 16:11:48 +01:00
messageLogger . warn ( "Dropped message, GetChatMessages Message has no start!" ) ;
return ;
}
2019-02-08 23:20:57 +01:00
if ( message . data . end == null ) {
2013-01-06 16:11:48 +01:00
messageLogger . warn ( "Dropped message, GetChatMessages Message has no start!" ) ;
return ;
}
2013-12-16 16:32:32 +01:00
2019-01-30 16:27:42 +01:00
let start = message . data . start ;
let end = message . data . end ;
let count = end - start ;
2013-12-16 16:32:32 +01:00
2019-02-08 23:20:57 +01:00
if ( count < 0 || count > 100 ) {
2019-02-15 22:52:53 +01:00
messageLogger . warn ( "Dropped message, GetChatMessages Message, client requested invalid amount of messages!" ) ;
2013-01-06 16:11:48 +01:00
return ;
}
2013-12-16 16:32:32 +01:00
2019-01-30 16:27:42 +01:00
let padId = sessioninfos [ client . id ] . padId ;
let pad = await padManager . getPad ( padId ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 16:27:42 +01:00
let chatMessages = await pad . getChatMessages ( start , end ) ;
let infoMsg = {
type : "COLLABROOM" ,
data : {
type : "CHAT_MESSAGES" ,
messages : chatMessages
}
} ;
2013-12-05 08:41:29 +01:00
2019-01-30 16:27:42 +01:00
// send the messages back to the client
client . json . send ( infoMsg ) ;
2013-01-06 16:11:48 +01:00
}
2011-07-14 17:15:38 +02:00
2011-06-30 15:19:30 +02:00
/ * *
* Handles a handleSuggestUserName , that means a user have suggest a userName for a other user
* @ param client the client that send this message
* @ param message the message from the client
* /
function handleSuggestUserName ( client , message )
{
2019-02-08 23:20:57 +01:00
// check if all ok
if ( message . data . payload . newName == null ) {
2011-11-09 17:19:00 +01:00
messageLogger . warn ( "Dropped message, suggestUserName Message has no newName!" ) ;
2011-08-17 18:28:30 +02:00
return ;
2011-06-30 15:19:30 +02:00
}
2019-02-08 23:20:57 +01:00
if ( message . data . payload . unnamedId == null ) {
2011-11-09 17:19:00 +01:00
messageLogger . warn ( "Dropped message, suggestUserName Message has no unnamedId!" ) ;
2011-08-17 18:28:30 +02:00
return ;
2011-06-30 15:19:30 +02:00
}
2013-12-16 16:32:32 +01:00
2014-11-21 01:11:50 +01:00
var padId = sessioninfos [ client . id ] . padId ;
2016-04-26 18:55:58 +02:00
var roomClients = _getRoomClients ( padId ) ;
2013-12-16 16:32:32 +01:00
2019-02-08 23:20:57 +01:00
// search the author and send him this message
2016-04-26 18:55:58 +02:00
roomClients . forEach ( function ( client ) {
var session = sessioninfos [ client . id ] ;
2020-09-03 19:30:16 +02:00
if ( session && session . author === message . data . payload . unnamedId ) {
2016-04-26 18:55:58 +02:00
client . json . send ( message ) ;
2011-06-30 15:19:30 +02:00
}
2016-04-26 18:55:58 +02:00
} ) ;
2011-06-30 15:19:30 +02:00
}
2011-03-26 22:29:33 +01:00
/ * *
* Handles a USERINFO _UPDATE , that means that a user have changed his color or name . Anyway , we get both informations
* @ param client the client that send this message
* @ param message the message from the client
* /
2011-03-26 14:10:41 +01:00
function handleUserInfoUpdate ( client , message )
{
2019-02-08 23:20:57 +01:00
// check if all ok
if ( message . data . userInfo == null ) {
2013-01-30 15:21:25 +01:00
messageLogger . warn ( "Dropped message, USERINFO_UPDATE Message has no userInfo!" ) ;
return ;
}
2019-02-08 23:20:57 +01:00
if ( message . data . userInfo . colorId == null ) {
2011-11-09 17:19:00 +01:00
messageLogger . warn ( "Dropped message, USERINFO_UPDATE Message has no colorId!" ) ;
2011-08-17 18:28:30 +02:00
return ;
2011-03-26 14:10:41 +01:00
}
2013-12-16 16:32:32 +01:00
2015-05-21 17:46:54 +02:00
// Check that we have a valid session and author to update.
var session = sessioninfos [ client . id ] ;
2019-02-08 23:20:57 +01:00
if ( ! session || ! session . author || ! session . padId ) {
2015-05-21 17:46:54 +02:00
messageLogger . warn ( "Dropped message, USERINFO_UPDATE Session not ready." + message . data ) ;
return ;
}
2019-02-08 23:20:57 +01:00
// Find out the author name of this session
2015-05-21 17:46:54 +02:00
var author = session . author ;
2013-12-16 16:32:32 +01:00
2015-11-26 16:55:26 +01:00
// Check colorId is a Hex color
var isColor = /(^#[0-9A-F]{6}$)|(^#[0-9A-F]{3}$)/i . test ( message . data . userInfo . colorId ) // for #f00 (Thanks Smamatti)
2019-02-08 23:20:57 +01:00
if ( ! isColor ) {
2015-11-26 16:55:26 +01:00
messageLogger . warn ( "Dropped message, USERINFO_UPDATE Color is malformed." + message . data ) ;
return ;
}
2019-02-08 23:20:57 +01:00
// Tell the authorManager about the new attributes
2011-03-26 14:10:41 +01:00
authorManager . setAuthorColorId ( author , message . data . userInfo . colorId ) ;
authorManager . setAuthorName ( author , message . data . userInfo . name ) ;
2013-12-16 16:32:32 +01:00
2015-05-21 17:46:54 +02:00
var padId = session . padId ;
2012-10-02 22:27:30 +02:00
var infoMsg = {
type : "COLLABROOM" ,
data : {
// The Client doesn't know about USERINFO_UPDATE, use USER_NEWINFO
type : "USER_NEWINFO" ,
userInfo : {
userId : author ,
2019-02-08 23:20:57 +01:00
// set a null name, when there is no name set. cause the client wants it null
2013-02-05 20:33:44 +01:00
name : message . data . userInfo . name || null ,
2012-10-02 22:27:30 +02:00
colorId : message . data . userInfo . colorId ,
userAgent : "Anonymous" ,
ip : "127.0.0.1" ,
}
}
} ;
2013-12-16 16:32:32 +01:00
2019-02-08 23:20:57 +01:00
// Send the other clients on the pad the update message
2013-02-05 20:33:44 +01:00
client . broadcast . to ( padId ) . json . send ( infoMsg ) ;
2011-03-26 14:10:41 +01:00
}
2011-03-26 22:29:33 +01:00
/ * *
2012-09-28 21:49:20 +02:00
* Handles a USER _CHANGES message , where the client submits its local
* edits as a changeset .
*
* This handler ' s job is to update the incoming changeset so that it applies
* to the latest revision , then add it to the pad , broadcast the changes
* to all other clients , and send a confirmation to the submitting client .
*
* This function is based on a similar one in the original Etherpad .
* See https : //github.com/ether/pad/blob/master/etherpad/src/etherpad/collab/collab_server.js in the function applyUserChanges()
*
2011-03-26 22:29:33 +01:00
* @ param client the client that send this message
* @ param message the message from the client
* /
2019-01-30 11:43:01 +01:00
async function handleUserChanges ( data )
2011-03-26 14:10:41 +01:00
{
2013-04-17 14:25:23 +02:00
var client = data . client
, message = data . message
2013-10-27 21:43:32 +01:00
// This one's no longer pending, as we're gonna process it now
stats . counter ( 'pendingEdits' ) . dec ( )
2012-09-28 21:49:20 +02:00
// Make sure all required fields are present
2019-02-08 23:20:57 +01:00
if ( message . data . baseRev == null ) {
2011-11-09 17:19:00 +01:00
messageLogger . warn ( "Dropped message, USER_CHANGES Message has no baseRev!" ) ;
2019-01-30 11:43:01 +01:00
return ;
2011-03-26 14:10:41 +01:00
}
2019-02-08 23:20:57 +01:00
if ( message . data . apool == null ) {
2011-11-09 17:19:00 +01:00
messageLogger . warn ( "Dropped message, USER_CHANGES Message has no apool!" ) ;
2019-01-30 11:43:01 +01:00
return ;
2011-03-26 14:10:41 +01:00
}
2019-02-08 23:20:57 +01:00
if ( message . data . changeset == null ) {
2011-11-09 17:19:00 +01:00
messageLogger . warn ( "Dropped message, USER_CHANGES Message has no changeset!" ) ;
2019-01-30 11:43:01 +01:00
return ;
2011-03-26 14:10:41 +01:00
}
2019-02-08 23:20:57 +01:00
// TODO: this might happen with other messages too => find one place to copy the session
// and always use the copy. atm a message will be ignored if the session is gone even
// if the session was valid when the message arrived in the first place
if ( ! sessioninfos [ client . id ] ) {
2014-02-20 16:33:42 +01:00
messageLogger . warn ( "Dropped message, disconnect happened in the mean time" ) ;
2019-01-30 11:43:01 +01:00
return ;
2011-03-26 14:10:41 +01:00
}
2013-12-16 16:32:32 +01:00
2019-02-08 23:20:57 +01:00
// get all Vars we need
2011-03-26 14:10:41 +01:00
var baseRev = message . data . baseRev ;
2012-03-18 09:05:46 +01:00
var wireApool = ( new AttributePool ( ) ) . fromJsonable ( message . data . apool ) ;
2011-03-26 14:10:41 +01:00
var changeset = message . data . changeset ;
2019-02-08 23:20:57 +01:00
2012-09-27 22:05:18 +02:00
// The client might disconnect between our callbacks. We should still
// finish processing the changeset, so keep a reference to the session.
var thisSession = sessioninfos [ client . id ] ;
2013-12-16 16:32:32 +01:00
2013-10-27 21:43:32 +01:00
// Measure time to process edit
var stopWatch = stats . timer ( 'edits' ) . start ( ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 11:43:01 +01:00
// get the pad
let pad = await padManager . getPad ( thisSession . padId ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 11:43:01 +01:00
// create the changeset
try {
try {
// Verify that the changeset has valid syntax and is in canonical form
Changeset . checkRep ( changeset ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 11:43:01 +01:00
// Verify that the attribute indexes used in the changeset are all
// defined in the accompanying attribute pool.
Changeset . eachAttribNumber ( changeset , function ( n ) {
if ( ! wireApool . getAttrib ( n ) ) {
throw new Error ( "Attribute pool is missing attribute " + n + " for changeset " + changeset ) ;
}
} ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 11:43:01 +01:00
// Validate all added 'author' attribs to be the same value as the current user
var iterator = Changeset . opIterator ( Changeset . unpack ( changeset ) . ops )
, op ;
2013-03-13 22:23:35 +01:00
2019-01-30 11:43:01 +01:00
while ( iterator . hasNext ( ) ) {
op = iterator . next ( )
2015-03-03 15:20:33 +01:00
2019-01-30 11:43:01 +01:00
// + can add text with attribs
// = can change or add attribs
// - can have attribs, but they are discarded and don't show up in the attribs - but do show up in the pool
2015-03-03 15:20:33 +01:00
2019-01-30 11:43:01 +01:00
op . attribs . split ( '*' ) . forEach ( function ( attr ) {
if ( ! attr ) return ;
2019-02-08 23:20:57 +01:00
2019-01-30 11:43:01 +01:00
attr = wireApool . getAttrib ( attr ) ;
if ( ! attr ) return ;
2019-02-08 23:20:57 +01:00
2019-01-30 11:43:01 +01:00
// the empty author is used in the clearAuthorship functionality so this should be the only exception
2020-09-03 19:30:16 +02:00
if ( 'author' === attr [ 0 ] && ( attr [ 1 ] !== thisSession . author && attr [ 1 ] !== '' ) ) {
2019-01-30 11:43:01 +01:00
throw new Error ( "Trying to submit changes as another author in changeset " + changeset ) ;
}
} ) ;
}
2013-12-16 16:32:32 +01:00
2019-01-30 11:43:01 +01:00
// ex. adoptChangesetAttribs
2013-12-16 16:32:32 +01:00
2019-01-30 11:43:01 +01:00
// Afaik, it copies the new attributes from the changeset, to the global Attribute Pool
changeset = Changeset . moveOpsToNewPool ( changeset , wireApool , pad . pool ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 11:43:01 +01:00
} catch ( e ) {
// There is an error in this changeset, so just refuse it
client . json . send ( { disconnect : "badChangeset" } ) ;
stats . meter ( 'failedChangesets' ) . mark ( ) ;
throw new Error ( "Can't apply USER_CHANGES, because " + e . message ) ;
}
2019-02-08 23:20:57 +01:00
2019-01-30 11:43:01 +01:00
// ex. applyUserChanges
let apool = pad . pool ;
let r = baseRev ;
2019-02-08 23:20:57 +01:00
2019-01-30 11:43:01 +01:00
// The client's changeset might not be based on the latest revision,
// since other clients are sending changes at the same time.
// Update the changeset so that it can be applied to the latest revision.
while ( r < pad . getHeadRevisionNumber ( ) ) {
r ++ ;
2013-12-16 16:32:32 +01:00
2019-01-30 11:43:01 +01:00
let c = await pad . getRevisionChangeset ( r ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 11:43:01 +01:00
// At this point, both "c" (from the pad) and "changeset" (from the
// client) are relative to revision r - 1. The follow function
// rebases "changeset" so that it is relative to revision r
// and can be applied after "c".
2013-12-16 16:32:32 +01:00
2019-02-08 23:20:57 +01:00
try {
2019-01-30 11:43:01 +01:00
// a changeset can be based on an old revision with the same changes in it
// prevent eplite from accepting it TODO: better send the client a NEW_CHANGES
// of that revision
2020-09-03 19:30:16 +02:00
if ( baseRev + 1 === r && c === changeset ) {
2019-01-30 11:43:01 +01:00
client . json . send ( { disconnect : "badChangeset" } ) ;
stats . meter ( 'failedChangesets' ) . mark ( ) ;
throw new Error ( "Won't apply USER_CHANGES, because it contains an already accepted changeset" ) ;
}
changeset = Changeset . follow ( c , changeset , false , apool ) ;
2019-02-08 23:20:57 +01:00
} catch ( e ) {
2014-12-04 16:05:02 +01:00
client . json . send ( { disconnect : "badChangeset" } ) ;
stats . meter ( 'failedChangesets' ) . mark ( ) ;
2019-01-30 11:43:01 +01:00
throw new Error ( "Can't apply USER_CHANGES, because " + e . message ) ;
2014-12-04 16:05:02 +01:00
}
2019-01-30 11:43:01 +01:00
}
2013-12-16 16:32:32 +01:00
2019-01-30 11:43:01 +01:00
let prevText = pad . text ( ) ;
2012-09-28 21:49:20 +02:00
2020-09-03 19:30:16 +02:00
if ( Changeset . oldLen ( changeset ) !== prevText . length ) {
2019-01-30 11:43:01 +01:00
client . json . send ( { disconnect : "badChangeset" } ) ;
stats . meter ( 'failedChangesets' ) . mark ( ) ;
throw new Error ( "Can't apply USER_CHANGES " + changeset + " with oldLen " + Changeset . oldLen ( changeset ) + " to document of length " + prevText . length ) ;
}
2013-12-16 16:32:32 +01:00
2019-01-30 11:43:01 +01:00
try {
pad . appendRevision ( changeset , thisSession . author ) ;
} catch ( e ) {
client . json . send ( { disconnect : "badChangeset" } ) ;
stats . meter ( 'failedChangesets' ) . mark ( ) ;
throw e ;
2011-07-21 21:13:58 +02:00
}
2019-02-08 23:20:57 +01:00
2019-01-30 11:43:01 +01:00
let correctionChangeset = _correctMarkersInPad ( pad . atext , pad . pool ) ;
if ( correctionChangeset ) {
pad . appendRevision ( correctionChangeset ) ;
2019-02-08 23:20:57 +01:00
}
2019-01-30 11:43:01 +01:00
// Make sure the pad always ends with an empty line.
2020-09-03 19:30:16 +02:00
if ( pad . text ( ) . lastIndexOf ( "\n" ) !== pad . text ( ) . length - 1 ) {
2019-01-30 11:43:01 +01:00
var nlChangeset = Changeset . makeSplice ( pad . text ( ) , pad . text ( ) . length - 1 , 0 , "\n" ) ;
pad . appendRevision ( nlChangeset ) ;
}
await exports . updatePadClients ( pad ) ;
} catch ( err ) {
console . warn ( err . stack || err ) ;
}
stopWatch . end ( ) ;
2011-07-21 21:13:58 +02:00
}
2019-01-30 16:27:42 +01:00
exports . updatePadClients = async function ( pad )
2013-12-16 16:32:32 +01:00
{
2019-02-08 23:20:57 +01:00
// skip this if no-one is on this pad
2019-01-30 16:27:42 +01:00
let roomClients = _getRoomClients ( pad . id ) ;
2014-11-04 20:55:05 +01:00
2020-09-03 19:30:16 +02:00
if ( roomClients . length === 0 ) {
2019-01-30 16:27:42 +01:00
return ;
2019-02-08 23:20:57 +01:00
}
2013-12-16 16:32:32 +01:00
2013-02-05 20:33:44 +01:00
// since all clients usually get the same set of changesets, store them in local cache
// to remove unnecessary roundtrip to the datalayer
2019-02-01 01:07:06 +01:00
// NB: note below possibly now accommodated via the change to promises/async
2013-02-05 20:33:44 +01:00
// TODO: in REAL world, if we're working without datalayer cache, all requests to revisions will be fired
2013-12-16 16:32:32 +01:00
// BEFORE first result will be landed to our cache object. The solution is to replace parallel processing
2013-02-05 20:33:44 +01:00
// via async.forEach with sequential for() loop. There is no real benefits of running this in parallel,
// but benefit of reusing cached revision object is HUGE
2019-01-30 16:27:42 +01:00
let revCache = { } ;
2013-02-05 20:33:44 +01:00
2019-02-08 23:20:57 +01:00
// go through all sessions on this pad
2019-01-30 16:27:42 +01:00
for ( let client of roomClients ) {
let sid = client . id ;
2019-02-08 23:20:57 +01:00
// send them all new changesets
2019-01-30 16:27:42 +01:00
while ( sessioninfos [ sid ] && sessioninfos [ sid ] . rev < pad . getHeadRevisionNumber ( ) ) {
let r = sessioninfos [ sid ] . rev + 1 ;
let revision = revCache [ r ] ;
if ( ! revision ) {
revision = await pad . getRevision ( r ) ;
revCache [ r ] = revision ;
}
let author = revision . meta . author ,
revChangeset = revision . changeset ,
currentTime = revision . meta . timestamp ;
// next if session has not been deleted
if ( sessioninfos [ sid ] == null ) {
continue ;
}
2020-09-03 19:30:16 +02:00
if ( author === sessioninfos [ sid ] . author ) {
2019-01-30 16:27:42 +01:00
client . json . send ( { "type" : "COLLABROOM" , "data" : { type : "ACCEPT_COMMIT" , newRev : r } } ) ;
} else {
let forWire = Changeset . prepareForWire ( revChangeset , pad . pool ) ;
let wireMsg = { "type" : "COLLABROOM" ,
"data" : { type : "NEW_CHANGES" ,
newRev : r ,
changeset : forWire . translated ,
apool : forWire . pool ,
author : author ,
currentTime : currentTime ,
timeDelta : currentTime - sessioninfos [ sid ] . time
} } ;
client . json . send ( wireMsg ) ;
}
if ( sessioninfos [ sid ] ) {
sessioninfos [ sid ] . time = currentTime ;
sessioninfos [ sid ] . rev = r ;
}
}
}
}
2011-03-26 14:10:41 +01:00
2011-03-26 22:29:33 +01:00
/ * *
2013-10-27 17:42:55 +01:00
* Copied from the Etherpad Source Code . Don ' t know what this method does excatly ...
2011-03-26 22:29:33 +01:00
* /
2011-03-26 14:10:41 +01:00
function _correctMarkersInPad ( atext , apool ) {
var text = atext . text ;
// collect char positions of line markers (e.g. bullets) in new atext
// that aren't at the start of a line
var badMarkers = [ ] ;
var iter = Changeset . opIterator ( atext . attribs ) ;
var offset = 0 ;
while ( iter . hasNext ( ) ) {
var op = iter . next ( ) ;
2013-12-16 16:32:32 +01:00
2019-02-08 23:20:57 +01:00
var hasMarker = _ . find ( AttributeManager . lineAttributes , function ( attribute ) {
2012-04-07 01:05:25 +02:00
return Changeset . opAttributeValue ( op , attribute , apool ) ;
} ) !== undefined ;
2013-12-16 16:32:32 +01:00
2012-04-07 01:05:25 +02:00
if ( hasMarker ) {
2019-02-08 23:20:57 +01:00
for ( var i = 0 ; i < op . chars ; i ++ ) {
2020-09-03 19:30:16 +02:00
if ( offset > 0 && text . charAt ( offset - 1 ) !== '\n' ) {
2011-03-26 14:10:41 +01:00
badMarkers . push ( offset ) ;
}
offset ++ ;
}
2019-02-08 23:20:57 +01:00
} else {
2011-03-26 14:10:41 +01:00
offset += op . chars ;
}
}
2020-09-03 19:30:16 +02:00
if ( badMarkers . length === 0 ) {
2011-03-26 14:10:41 +01:00
return null ;
}
// create changeset that removes these bad markers
offset = 0 ;
2019-02-08 23:20:57 +01:00
2011-03-26 14:10:41 +01:00
var builder = Changeset . builder ( text . length ) ;
2019-02-08 23:20:57 +01:00
2011-03-26 14:10:41 +01:00
badMarkers . forEach ( function ( pos ) {
builder . keepText ( text . substring ( offset , pos ) ) ;
builder . remove ( 1 ) ;
offset = pos + 1 ;
} ) ;
2019-02-08 23:20:57 +01:00
2011-03-26 14:10:41 +01:00
return builder . toString ( ) ;
}
2014-07-12 22:27:00 +02:00
function handleSwitchToPad ( client , message )
2014-06-14 20:24:54 +02:00
{
// clear the session and leave the room
2019-01-30 16:27:42 +01:00
let currentSession = sessioninfos [ client . id ] ;
let padId = currentSession . padId ;
let roomClients = _getRoomClients ( padId ) ;
2017-04-04 16:09:24 +02:00
2019-01-30 16:27:42 +01:00
roomClients . forEach ( client => {
let sinfo = sessioninfos [ client . id ] ;
2020-09-03 19:30:16 +02:00
if ( sinfo && sinfo . author === currentSession . author ) {
2014-06-14 20:24:54 +02:00
// fix user's counter, works on page refresh or if user closes browser window and then rejoins
2016-04-26 18:55:58 +02:00
sessioninfos [ client . id ] = { } ;
client . leave ( padId ) ;
2014-06-14 20:24:54 +02:00
}
2016-04-26 18:55:58 +02:00
} ) ;
2015-10-13 23:39:23 +02:00
2014-07-12 22:27:00 +02:00
// start up the new pad
2020-09-03 02:32:54 +02:00
createSessionInfoAuth ( client , message ) ;
2014-07-12 22:27:00 +02:00
handleClientReady ( client , message ) ;
}
2020-09-03 02:32:54 +02:00
// Creates/replaces the auth object in the client's session info. Session info for the client must
// already exist.
function createSessionInfoAuth ( client , message )
2014-07-12 22:27:00 +02:00
{
// Remember this information since we won't
// have the cookie in further socket.io messages.
// This information will be used to check if
// the sessionId of this connection is still valid
// since it could have been deleted by the API.
sessioninfos [ client . id ] . auth =
{
sessionID : message . sessionID ,
padID : message . padId ,
token : message . token ,
password : message . password
} ;
2014-06-14 20:24:54 +02:00
}
2011-03-26 22:29:33 +01:00
/ * *
2013-12-16 16:32:32 +01:00
* Handles a CLIENT _READY . A CLIENT _READY is the first message from the client to the server . The Client sends his token
2011-03-26 22:29:33 +01:00
* and the pad it wants to enter . The Server answers with the inital values ( clientVars ) of the pad
* @ param client the client that send this message
* @ param message the message from the client
* /
2019-01-28 14:13:24 +01:00
async function handleClientReady ( client , message )
2011-03-26 14:10:41 +01:00
{
2019-02-08 23:20:57 +01:00
// check if all ok
if ( ! message . token ) {
2011-11-19 20:21:23 +01:00
messageLogger . warn ( "Dropped message, CLIENT_READY Message has no token!" ) ;
2011-08-17 18:28:30 +02:00
return ;
2011-03-26 14:10:41 +01:00
}
2019-02-08 23:20:57 +01:00
if ( ! message . padId ) {
2011-11-19 20:21:23 +01:00
messageLogger . warn ( "Dropped message, CLIENT_READY Message has no padId!" ) ;
2011-08-17 18:28:30 +02:00
return ;
2011-03-26 14:10:41 +01:00
}
2019-02-08 23:20:57 +01:00
if ( ! message . protocolVersion ) {
2011-11-19 20:21:23 +01:00
messageLogger . warn ( "Dropped message, CLIENT_READY Message has no protocolVersion!" ) ;
2011-08-17 18:28:30 +02:00
return ;
2011-03-26 14:10:41 +01:00
}
2019-02-08 23:20:57 +01:00
2020-09-03 19:30:16 +02:00
if ( message . protocolVersion !== 2 ) {
2011-11-19 20:21:23 +01:00
messageLogger . warn ( "Dropped message, CLIENT_READY Message has a unknown protocolVersion '" + message . protocolVersion + "'!" ) ;
2011-08-17 18:28:30 +02:00
return ;
2011-03-26 14:10:41 +01:00
}
2015-07-14 23:08:35 +02:00
hooks . callAll ( "clientReady" , message ) ;
2019-01-28 14:13:24 +01:00
// Get ro/rw id:s
let padIds = await readOnlyManager . getIds ( message . padId ) ;
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// FIXME: Allow to override readwrite access with readonly
2020-09-03 01:33:08 +02:00
const { accessStatus , authorID } = await securityManager . checkAccess (
padIds . padId , message . sessionID , message . token , message . password ) ;
2013-12-16 16:32:32 +01:00
2019-01-28 14:13:24 +01:00
// no access, send the client a message that tells him why
if ( accessStatus !== "grant" ) {
client . json . send ( { accessStatus } ) ;
return ;
}
2019-02-08 23:20:57 +01:00
2019-02-01 10:57:50 +01:00
// get all authordata of this new user
2020-09-03 01:33:08 +02:00
assert ( authorID ) ;
let value = await authorManager . getAuthor ( authorID ) ;
2019-01-28 14:13:24 +01:00
let authorColorId = value . colorId ;
let authorName = value . name ;
2019-02-08 23:20:57 +01:00
2019-02-01 10:57:50 +01:00
// load the pad-object from the database
2019-01-28 14:13:24 +01:00
let pad = await padManager . getPad ( padIds . padId ) ;
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// these db requests all need the pad object (timestamp of latest revision, author data)
let authors = pad . getAllAuthors ( ) ;
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// get timestamp of latest revision needed for timeslider
let currentTime = await pad . getRevisionDate ( pad . getHeadRevisionNumber ( ) ) ;
2013-12-16 16:32:32 +01:00
2019-02-01 01:07:06 +01:00
// get all author data out of the database (in parallel)
2019-02-01 10:57:50 +01:00
let historicalAuthorData = { } ;
2019-02-01 01:07:06 +01:00
await Promise . all ( authors . map ( authorId => {
return authorManager . getAuthor ( authorId ) . then ( author => {
if ( ! author ) {
2020-04-07 00:39:43 +02:00
messageLogger . error ( "There is no author for authorId: " , authorId , ". This is possibly related to https://github.com/ether/etherpad-lite/issues/2802" ) ;
2019-02-01 01:07:06 +01:00
} else {
historicalAuthorData [ authorId ] = { name : author . name , colorId : author . colorId } ; // Filter author attribs (e.g. don't send author's pads to all clients)
}
} ) ;
} ) ) ;
2019-02-08 23:20:57 +01:00
2020-04-14 13:00:47 +02:00
let thisUserHasEditedThisPad = false ;
2020-09-03 01:33:08 +02:00
if ( historicalAuthorData [ authorID ] ) {
2020-04-14 13:00:47 +02:00
/ *
* This flag is set to true when a user contributes to a specific pad for
* the first time . It is used for deciding if importing to that pad is
* allowed or not .
* /
thisUserHasEditedThisPad = true ;
}
2019-01-28 14:13:24 +01:00
// glue the clientVars together, send them and tell the other clients that a new one is there
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Check that the client is still here. It might have disconnected between callbacks.
if ( sessioninfos [ client . id ] === undefined ) {
return ;
}
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Check if this author is already on the pad, if yes, kick the other sessions!
let roomClients = _getRoomClients ( pad . id ) ;
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
for ( let client of roomClients ) {
let sinfo = sessioninfos [ client . id ] ;
2020-09-03 19:30:16 +02:00
if ( sinfo && sinfo . author === authorID ) {
2019-01-28 14:13:24 +01:00
// fix user's counter, works on page refresh or if user closes browser window and then rejoins
sessioninfos [ client . id ] = { } ;
client . leave ( padIds . padId ) ;
client . json . send ( { disconnect : "userdup" } ) ;
}
}
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Save in sessioninfos that this session belonges to this pad
sessioninfos [ client . id ] . padId = padIds . padId ;
sessioninfos [ client . id ] . readOnlyPadId = padIds . readOnlyPadId ;
sessioninfos [ client . id ] . readonly = padIds . readonly ;
2013-12-16 16:32:32 +01:00
2019-01-28 14:13:24 +01:00
// Log creation/(re-)entering of a pad
let ip = remoteAddress [ client . id ] ;
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Anonymize the IP address if IP logging is disabled
if ( settings . disableIPlogging ) {
ip = 'ANONYMOUS' ;
}
2012-09-28 22:03:42 +02:00
2019-01-28 14:13:24 +01:00
if ( pad . head > 0 ) {
accessLogger . info ( '[ENTER] Pad "' + padIds . padId + '": Client ' + client . id + ' with IP "' + ip + '" entered the pad' ) ;
2020-09-03 19:30:16 +02:00
} else if ( pad . head === 0 ) {
2019-01-28 14:13:24 +01:00
accessLogger . info ( '[CREATE] Pad "' + padIds . padId + '": Client ' + client . id + ' with IP "' + ip + '" created the pad' ) ;
}
2017-04-04 16:09:24 +02:00
2019-01-28 14:13:24 +01:00
if ( message . reconnect ) {
// If this is a reconnect, we don't have to send the client the ClientVars again
// Join the pad and start receiving updates
client . join ( padIds . padId ) ;
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Save the revision in sessioninfos, we take the revision from the info the client send to us
sessioninfos [ client . id ] . rev = message . client _rev ;
2013-12-16 16:32:32 +01:00
2019-01-28 14:13:24 +01:00
// During the client reconnect, client might miss some revisions from other clients. By using client revision,
// this below code sends all the revisions missed during the client reconnect
var revisionsNeeded = [ ] ;
var changesets = { } ;
2013-12-16 16:32:32 +01:00
2019-01-28 14:13:24 +01:00
var startNum = message . client _rev + 1 ;
var endNum = pad . getHeadRevisionNumber ( ) + 1 ;
2013-10-19 21:37:11 +02:00
2019-01-28 14:13:24 +01:00
var headNum = pad . getHeadRevisionNumber ( ) ;
2014-11-05 00:29:45 +01:00
2019-01-28 14:13:24 +01:00
if ( endNum > headNum + 1 ) {
endNum = headNum + 1 ;
}
2013-02-10 16:03:49 +01:00
2019-01-28 14:13:24 +01:00
if ( startNum < 0 ) {
startNum = 0 ;
}
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
for ( let r = startNum ; r < endNum ; r ++ ) {
revisionsNeeded . push ( r ) ;
changesets [ r ] = { } ;
}
2018-02-10 18:00:22 +01:00
2019-02-01 10:57:50 +01:00
// get changesets, author and timestamp needed for pending revisions (in parallel)
let promises = [ ] ;
2019-01-28 14:13:24 +01:00
for ( let revNum of revisionsNeeded ) {
2019-02-01 10:57:50 +01:00
let cs = changesets [ revNum ] ;
promises . push ( pad . getRevisionChangeset ( revNum ) . then ( result => cs . changeset = result ) ) ;
promises . push ( pad . getRevisionAuthor ( revNum ) . then ( result => cs . author = result ) ) ;
promises . push ( pad . getRevisionDate ( revNum ) . then ( result => cs . timestamp = result ) ) ;
2019-01-28 14:13:24 +01:00
}
2019-02-01 10:57:50 +01:00
await Promise . all ( promises ) ;
2018-02-10 18:00:22 +01:00
2019-01-28 14:13:24 +01:00
// return pending changesets
for ( let r of revisionsNeeded ) {
let forWire = Changeset . prepareForWire ( changesets [ r ] [ 'changeset' ] , pad . pool ) ;
let wireMsg = { "type" : "COLLABROOM" ,
"data" : { type : "CLIENT_RECONNECT" ,
headRev : pad . getHeadRevisionNumber ( ) ,
newRev : r ,
changeset : forWire . translated ,
apool : forWire . pool ,
author : changesets [ r ] [ 'author' ] ,
currentTime : changesets [ r ] [ 'timestamp' ]
} } ;
client . json . send ( wireMsg ) ;
}
2018-02-10 18:00:22 +01:00
2020-09-03 19:30:16 +02:00
if ( startNum === endNum ) {
2019-01-28 14:13:24 +01:00
var Msg = { "type" : "COLLABROOM" ,
"data" : { type : "CLIENT_RECONNECT" ,
noChanges : true ,
newRev : pad . getHeadRevisionNumber ( )
} } ;
client . json . send ( Msg ) ;
}
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
} else {
// This is a normal first connect
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// prepare all values for the wire, there's a chance that this throws, if the pad is corrupted
try {
var atext = Changeset . cloneAText ( pad . atext ) ;
var attribsForWire = Changeset . prepareForWire ( atext . attribs , pad . pool ) ;
var apool = attribsForWire . pool . toJsonable ( ) ;
atext . attribs = attribsForWire . translated ;
} catch ( e ) {
console . error ( e . stack || e )
client . json . send ( { disconnect : "corruptPad" } ) ; // pull the brakes
2018-04-03 15:21:14 +02:00
2019-01-28 14:13:24 +01:00
return ;
}
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Warning: never ever send padIds.padId to the client. If the
// client is read only you would open a security hole 1 swedish
// mile wide...
var clientVars = {
"skinName" : settings . skinName ,
2020-04-07 17:33:40 +02:00
"skinVariants" : settings . skinVariants ,
2020-05-14 01:54:57 +02:00
"randomVersionString" : settings . randomVersionString ,
2019-01-28 14:13:24 +01:00
"accountPrivs" : {
"maxRevisions" : 100
} ,
"automaticReconnectionTimeout" : settings . automaticReconnectionTimeout ,
"initialRevisionList" : [ ] ,
"initialOptions" : {
"guestPolicy" : "deny"
} ,
"savedRevisions" : pad . getSavedRevisions ( ) ,
"collab_client_vars" : {
"initialAttributedText" : atext ,
"clientIp" : "127.0.0.1" ,
"padId" : message . padId ,
"historicalAuthorData" : historicalAuthorData ,
"apool" : apool ,
"rev" : pad . getHeadRevisionNumber ( ) ,
"time" : currentTime ,
} ,
"colorPalette" : authorManager . getColorPalette ( ) ,
"clientIp" : "127.0.0.1" ,
"userIsGuest" : true ,
"userColor" : authorColorId ,
"padId" : message . padId ,
"padOptions" : settings . padOptions ,
"padShortcutEnabled" : settings . padShortcutEnabled ,
"initialTitle" : "Pad: " + message . padId ,
"opts" : { } ,
// tell the client the number of the latest chat-message, which will be
// used to request the latest 100 chat-messages later (GET_CHAT_MESSAGES)
"chatHead" : pad . chatHead ,
"numConnectedUsers" : roomClients . length ,
"readOnlyId" : padIds . readOnlyPadId ,
"readonly" : padIds . readonly ,
"serverTimestamp" : Date . now ( ) ,
2020-09-03 01:33:08 +02:00
"userId" : authorID ,
2019-01-28 14:13:24 +01:00
"abiwordAvailable" : settings . abiwordAvailable ( ) ,
"sofficeAvailable" : settings . sofficeAvailable ( ) ,
"exportAvailable" : settings . exportAvailable ( ) ,
"plugins" : {
"plugins" : plugins . plugins ,
"parts" : plugins . parts ,
} ,
"indentationOnNewLine" : settings . indentationOnNewLine ,
"scrollWhenFocusLineIsOutOfViewport" : {
"percentage" : {
"editionAboveViewport" : settings . scrollWhenFocusLineIsOutOfViewport . percentage . editionAboveViewport ,
"editionBelowViewport" : settings . scrollWhenFocusLineIsOutOfViewport . percentage . editionBelowViewport ,
} ,
"duration" : settings . scrollWhenFocusLineIsOutOfViewport . duration ,
"scrollWhenCaretIsInTheLastLineOfViewport" : settings . scrollWhenFocusLineIsOutOfViewport . scrollWhenCaretIsInTheLastLineOfViewport ,
"percentageToScrollWhenUserPressesArrowUp" : settings . scrollWhenFocusLineIsOutOfViewport . percentageToScrollWhenUserPressesArrowUp ,
} ,
2020-04-14 13:00:47 +02:00
"initialChangesets" : [ ] , // FIXME: REMOVE THIS SHIT
"thisUserHasEditedThisPad" : thisUserHasEditedThisPad ,
2020-06-01 19:19:06 +02:00
"allowAnyoneToImport" : settings . allowAnyoneToImport
2019-01-28 14:13:24 +01:00
}
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Add a username to the clientVars if one avaiable
if ( authorName != null ) {
clientVars . userName = authorName ;
}
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// call the clientVars-hook so plugins can modify them before they get sent to the client
2020-09-05 11:51:39 +02:00
let messages = await hooks . aCallAll ( 'clientVars' , { clientVars , pad , socket : client } ) ;
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// combine our old object with the new attributes from the hook
for ( let msg of messages ) {
Object . assign ( clientVars , msg ) ;
}
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Join the pad and start receiving updates
client . join ( padIds . padId ) ;
2013-12-16 16:32:32 +01:00
2019-01-28 14:13:24 +01:00
// Send the clientVars to the Client
client . json . send ( { type : "CLIENT_VARS" , data : clientVars } ) ;
// Save the current revision in sessioninfos, should be the same as in clientVars
sessioninfos [ client . id ] . rev = pad . getHeadRevisionNumber ( ) ;
2013-01-14 22:08:33 +01:00
2020-09-03 01:33:08 +02:00
sessioninfos [ client . id ] . author = authorID ;
2019-01-28 14:13:24 +01:00
// prepare the notification for the other users on the pad, that this user joined
let messageToTheOtherUsers = {
"type" : "COLLABROOM" ,
"data" : {
type : "USER_NEWINFO" ,
userInfo : {
"ip" : "127.0.0.1" ,
"colorId" : authorColorId ,
"userAgent" : "Anonymous" ,
2020-09-03 01:33:08 +02:00
"userId" : authorID ,
2013-01-14 22:08:33 +01:00
}
2019-01-28 14:13:24 +01:00
}
} ;
2013-12-16 16:32:32 +01:00
2019-01-28 14:13:24 +01:00
// Add the authorname of this new User, if avaiable
if ( authorName != null ) {
messageToTheOtherUsers . data . userInfo . name = authorName ;
}
2013-12-16 16:32:32 +01:00
2019-01-28 14:13:24 +01:00
// notify all existing users about new user
client . broadcast . to ( padIds . padId ) . json . send ( messageToTheOtherUsers ) ;
2013-12-16 16:32:32 +01:00
2019-02-01 01:07:06 +01:00
// Get sessions for this pad and update them (in parallel)
2019-01-28 14:13:24 +01:00
roomClients = _getRoomClients ( pad . id ) ;
2019-02-01 01:07:06 +01:00
await Promise . all ( _getRoomClients ( pad . id ) . map ( async roomClient => {
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Jump over, if this session is the connection session
2020-09-03 19:30:16 +02:00
if ( roomClient . id === client . id ) {
2019-02-01 01:07:06 +01:00
return ;
2019-01-28 14:13:24 +01:00
}
2019-02-08 23:20:57 +01:00
2019-01-28 14:13:24 +01:00
// Since sessioninfos might change while being enumerated, check if the
// sessionID is still assigned to a valid session
if ( sessioninfos [ roomClient . id ] === undefined ) {
2019-02-01 01:07:06 +01:00
return ;
2012-09-28 22:03:42 +02:00
}
2013-12-16 16:32:32 +01:00
2019-01-28 14:13:24 +01:00
// get the authorname & colorId
2019-02-01 01:07:06 +01:00
let author = sessioninfos [ roomClient . id ] . author ;
let cached = historicalAuthorData [ author ] ;
2019-01-28 14:13:24 +01:00
// reuse previously created cache of author's data
2020-09-11 00:12:20 +02:00
const authorInfo = await ( cached ? Promise . resolve ( cached ) : authorManager . getAuthor ( author ) ) ;
2019-02-01 01:07:06 +01:00
2020-09-11 00:12:20 +02:00
// default fallback color to use if authorInfo.colorId is null
const defaultColor = "#daf0b2" ;
2020-03-16 15:43:05 +01:00
2020-09-11 00:12:20 +02:00
if ( ! authorInfo ) {
console . warn ( ` handleClientReady(): no authorInfo parameter was received. Default values are going to be used. See issue #3612. This can be caused by a user clicking undo after clearing all authorship colors see #2802 ` ) ;
authorInfo = { } ;
}
2020-03-16 15:43:05 +01:00
2020-09-11 00:12:20 +02:00
// For some reason sometimes name isn't set
// Catch this issue here and use a fixed name.
if ( ! authorInfo . name ) {
console . warn ( ` handleClientReady(): client submitted no author name. Using "Anonymous". See: issue #3612 ` ) ;
authorInfo . name = "Anonymous" ;
}
2020-03-16 15:43:05 +01:00
2020-09-11 00:12:20 +02:00
// For some reason sometimes colorId isn't set
// Catch this issue here and use a fixed color.
if ( ! authorInfo . colorId ) {
console . warn ( ` handleClientReady(): author " ${ authorInfo . name } " has no property colorId. Using the default color ${ defaultColor } . See issue #3612 ` ) ;
authorInfo . colorId = defaultColor ;
}
2020-03-16 15:43:05 +01:00
2020-09-11 00:12:20 +02:00
// Send the new User a Notification about this other user
let msg = {
"type" : "COLLABROOM" ,
"data" : {
type : "USER_NEWINFO" ,
userInfo : {
"ip" : "127.0.0.1" ,
"colorId" : authorInfo . colorId ,
"name" : authorInfo . name ,
"userAgent" : "Anonymous" ,
"userId" : author
2011-03-26 14:10:41 +01:00
}
2020-09-11 00:12:20 +02:00
}
} ;
2013-12-16 16:32:32 +01:00
2020-09-11 00:12:20 +02:00
client . json . send ( msg ) ;
2019-02-01 01:07:06 +01:00
} ) ) ;
2019-01-28 14:13:24 +01:00
}
2011-03-26 14:10:41 +01:00
}
2012-04-23 12:52:30 +02:00
/ * *
2013-12-16 16:32:32 +01:00
* Handles a request for a rough changeset , the timeslider client needs it
2012-04-23 12:52:30 +02:00
* /
2019-01-30 14:55:49 +01:00
async function handleChangesetRequest ( client , message )
2012-04-23 12:52:30 +02:00
{
2019-02-08 23:20:57 +01:00
// check if all ok
if ( message . data == null ) {
2012-04-23 12:52:30 +02:00
messageLogger . warn ( "Dropped message, changeset request has no data!" ) ;
return ;
}
2019-02-08 23:20:57 +01:00
if ( message . padId == null ) {
2012-04-23 12:52:30 +02:00
messageLogger . warn ( "Dropped message, changeset request has no padId!" ) ;
return ;
}
2019-02-08 23:20:57 +01:00
if ( message . data . granularity == null ) {
2012-04-23 12:52:30 +02:00
messageLogger . warn ( "Dropped message, changeset request has no granularity!" ) ;
return ;
}
2019-02-08 23:20:57 +01:00
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isInteger#Polyfill
if ( Math . floor ( message . data . granularity ) !== message . data . granularity ) {
2015-10-09 14:55:19 +02:00
messageLogger . warn ( "Dropped message, changeset request granularity is not an integer!" ) ;
return ;
}
2019-02-08 23:20:57 +01:00
if ( message . data . start == null ) {
2012-04-23 12:52:30 +02:00
messageLogger . warn ( "Dropped message, changeset request has no start!" ) ;
return ;
}
2019-02-08 23:20:57 +01:00
if ( message . data . requestID == null ) {
2012-04-23 12:52:30 +02:00
messageLogger . warn ( "Dropped message, changeset request has no requestID!" ) ;
return ;
}
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
let granularity = message . data . granularity ;
let start = message . data . start ;
let end = start + ( 100 * granularity ) ;
2012-07-05 19:33:20 +02:00
2019-01-30 14:55:49 +01:00
let padIds = await readOnlyManager . getIds ( message . padId ) ;
2012-07-05 19:33:20 +02:00
2019-01-30 14:55:49 +01:00
// build the requested rough changesets and send them back
try {
let data = await getChangesetInfo ( padIds . padId , start , end , granularity ) ;
data . requestID = message . data . requestID ;
client . json . send ( { type : "CHANGESET_REQ" , data } ) ;
} catch ( err ) {
2020-07-17 00:54:15 +02:00
console . error ( 'Error while handling a changeset request for ' + padIds . padId , err . toString ( ) , message . data ) ;
2019-01-30 14:55:49 +01:00
}
2012-04-23 12:52:30 +02:00
}
/ * *
* Tries to rebuild the getChangestInfo function of the original Etherpad
* https : //github.com/ether/pad/blob/master/etherpad/src/etherpad/control/pad/pad_changeset_control.js#L144
* /
2019-01-30 14:55:49 +01:00
async function getChangesetInfo ( padId , startNum , endNum , granularity )
2012-04-23 12:52:30 +02:00
{
2019-01-30 14:55:49 +01:00
let pad = await padManager . getPad ( padId ) ;
let head _revision = pad . getHeadRevisionNumber ( ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
// calculate the last full endnum
if ( endNum > head _revision + 1 ) {
endNum = head _revision + 1 ;
}
endNum = Math . floor ( endNum / granularity ) * granularity ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
let compositesChangesetNeeded = [ ] ;
let revTimesNeeded = [ ] ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
// figure out which composite Changeset and revTimes we need, to load them in bulk
for ( let start = startNum ; start < endNum ; start += granularity ) {
let end = start + granularity ;
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
// add the composite Changeset we needed
compositesChangesetNeeded . push ( { start , end } ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
// add the t1 time we need
2020-09-03 19:30:16 +02:00
revTimesNeeded . push ( start === 0 ? 0 : start - 1 ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
// add the t2 time we need
revTimesNeeded . push ( end - 1 ) ;
}
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
// get all needed db values parallel - no await here since
// it would make all the lookups run in series
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
// get all needed composite Changesets
let composedChangesets = { } ;
let p1 = Promise . all ( compositesChangesetNeeded . map ( item => {
return composePadChangesets ( padId , item . start , item . end ) . then ( changeset => {
composedChangesets [ item . start + "/" + item . end ] = changeset ;
} ) ;
} ) ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
// get all needed revision Dates
let revisionDate = [ ] ;
let p2 = Promise . all ( revTimesNeeded . map ( revNum => {
return pad . getRevisionDate ( revNum ) . then ( revDate => {
revisionDate [ revNum ] = Math . floor ( revDate / 1000 ) ;
} ) ;
} ) ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
// get the lines
let lines ;
let p3 = getPadLines ( padId , startNum - 1 ) . then ( _lines => {
lines = _lines ;
} ) ;
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
// wait for all of the above to complete
await Promise . all ( [ p1 , p2 , p3 ] ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
// doesn't know what happens here exactly :/
let timeDeltas = [ ] ;
let forwardsChangesets = [ ] ;
let backwardsChangesets = [ ] ;
let apool = new AttributePool ( ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
for ( let compositeStart = startNum ; compositeStart < endNum ; compositeStart += granularity ) {
let compositeEnd = compositeStart + granularity ;
if ( compositeEnd > endNum || compositeEnd > head _revision + 1 ) {
break ;
}
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
let forwards = composedChangesets [ compositeStart + "/" + compositeEnd ] ;
let backwards = Changeset . inverse ( forwards , lines . textlines , lines . alines , pad . apool ( ) ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
Changeset . mutateAttributionLines ( forwards , lines . alines , pad . apool ( ) ) ;
Changeset . mutateTextLines ( forwards , lines . textlines ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
let forwards2 = Changeset . moveOpsToNewPool ( forwards , pad . apool ( ) , apool ) ;
let backwards2 = Changeset . moveOpsToNewPool ( backwards , pad . apool ( ) , apool ) ;
2013-12-16 16:32:32 +01:00
2020-09-03 19:30:16 +02:00
let t1 = ( compositeStart === 0 ) ? revisionDate [ 0 ] : revisionDate [ compositeStart - 1 ] ;
2019-01-30 14:55:49 +01:00
let t2 = revisionDate [ compositeEnd - 1 ] ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
timeDeltas . push ( t2 - t1 ) ;
forwardsChangesets . push ( forwards2 ) ;
backwardsChangesets . push ( backwards2 ) ;
}
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
return { forwardsChangesets , backwardsChangesets ,
apool : apool . toJsonable ( ) , actualEndNum : endNum ,
timeDeltas , start : startNum , granularity } ;
}
2012-04-23 12:52:30 +02:00
/ * *
* Tries to rebuild the getPadLines function of the original Etherpad
* https : //github.com/ether/pad/blob/master/etherpad/src/etherpad/control/pad/pad_changeset_control.js#L263
* /
2019-01-30 14:55:49 +01:00
async function getPadLines ( padId , revNum )
2012-04-23 12:52:30 +02:00
{
2019-03-12 18:10:24 +01:00
let pad = await padManager . getPad ( padId ) ;
2012-04-23 12:52:30 +02:00
2019-01-30 14:55:49 +01:00
// get the atext
let atext ;
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
if ( revNum >= 0 ) {
atext = await pad . getInternalRevisionAText ( revNum ) ;
} else {
atext = Changeset . makeAText ( "\n" ) ;
}
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
return {
textlines : Changeset . splitTextLines ( atext . text ) ,
alines : Changeset . splitAttributionLines ( atext . attribs , atext . text )
} ;
}
2012-04-23 12:52:30 +02:00
/ * *
* Tries to rebuild the composePadChangeset function of the original Etherpad
* https : //github.com/ether/pad/blob/master/etherpad/src/etherpad/control/pad/pad_changeset_control.js#L241
* /
2019-01-30 14:55:49 +01:00
async function composePadChangesets ( padId , startNum , endNum )
2012-04-23 12:52:30 +02:00
{
2019-01-30 14:55:49 +01:00
let pad = await padManager . getPad ( padId ) ;
// fetch all changesets we need
let headNum = pad . getHeadRevisionNumber ( ) ;
endNum = Math . min ( endNum , headNum + 1 ) ;
startNum = Math . max ( startNum , 0 ) ;
// create an array for all changesets, we will
// replace the values with the changeset later
let changesetsNeeded = [ ] ;
for ( let r = startNum ; r < endNum ; r ++ ) {
changesetsNeeded . push ( r ) ;
}
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
// get all changesets
let changesets = { } ;
await Promise . all ( changesetsNeeded . map ( revNum => {
return pad . getRevisionChangeset ( revNum ) . then ( changeset => changesets [ revNum ] = changeset ) ;
} ) ) ;
2013-12-16 16:32:32 +01:00
2019-01-30 14:55:49 +01:00
// compose Changesets
2020-04-15 00:49:11 +02:00
let r ;
2019-01-30 14:55:49 +01:00
try {
let changeset = changesets [ startNum ] ;
let pool = pad . apool ( ) ;
2013-12-16 16:32:32 +01:00
2020-04-15 00:49:11 +02:00
for ( r = startNum + 1 ; r < endNum ; r ++ ) {
2019-01-30 14:55:49 +01:00
let cs = changesets [ r ] ;
changeset = Changeset . compose ( changeset , cs , pool ) ;
2012-04-23 12:52:30 +02:00
}
2019-01-30 14:55:49 +01:00
return changeset ;
2019-02-08 23:20:57 +01:00
2019-01-30 14:55:49 +01:00
} catch ( e ) {
// r-1 indicates the rev that was build starting with startNum, applying startNum+1, +2, +3
console . warn ( "failed to compose cs in pad:" , padId , " startrev:" , startNum , " current rev:" , r ) ;
throw e ;
}
}
2012-06-29 20:26:12 +02:00
2016-04-26 18:55:58 +02:00
function _getRoomClients ( padID ) {
2019-02-08 23:20:57 +01:00
var roomClients = [ ] ;
var room = socketio . sockets . adapter . rooms [ padID ] ;
2017-04-04 16:09:24 +02:00
2014-11-23 23:33:56 +01:00
if ( room ) {
2016-04-26 18:55:58 +02:00
for ( var id in room . sockets ) {
roomClients . push ( socketio . sockets . sockets [ id ] ) ;
2014-11-23 23:33:56 +01:00
}
}
2017-04-04 16:09:24 +02:00
2016-04-26 18:55:58 +02:00
return roomClients ;
}
2014-11-23 23:33:56 +01:00
2016-04-26 18:55:58 +02:00
/ * *
* Get the number of users in a pad
* /
2019-01-25 19:07:01 +01:00
exports . padUsersCount = function ( padID ) {
return {
2016-04-26 18:55:58 +02:00
padUsersCount : _getRoomClients ( padID ) . length
2019-01-25 19:07:01 +01:00
}
}
2012-08-17 20:46:34 +02:00
/ * *
* Get the list of users in a pad
* /
2019-01-25 19:07:01 +01:00
exports . padUsers = async function ( padID ) {
2013-02-05 20:33:44 +01:00
2019-01-25 19:07:01 +01:00
let padUsers = [ ] ;
let roomClients = _getRoomClients ( padID ) ;
2014-11-23 23:33:56 +01:00
2019-02-01 01:07:06 +01:00
// iterate over all clients (in parallel)
await Promise . all ( roomClients . map ( async roomClient => {
2019-01-25 19:07:01 +01:00
let s = sessioninfos [ roomClient . id ] ;
if ( s ) {
2019-02-01 01:07:06 +01:00
return authorManager . getAuthor ( s . author ) . then ( author => {
2020-06-17 11:54:10 +02:00
// Fixes: https://github.com/ether/etherpad-lite/issues/4120
// On restart author might not be populated?
if ( author ) {
author . id = s . author ;
padUsers . push ( author ) ;
}
2019-02-01 01:07:06 +01:00
} ) ;
2012-08-17 20:46:34 +02:00
}
2019-02-01 01:07:06 +01:00
} ) ) ;
2013-02-05 20:33:44 +01:00
2019-01-25 19:07:01 +01:00
return { padUsers } ;
}
2013-03-19 17:21:04 +01:00
exports . sessioninfos = sessioninfos ;