Fix more errors after ot

This commit is contained in:
yflory 2020-04-14 17:11:47 +02:00
parent a70233d492
commit d41c362d46
2 changed files with 158 additions and 65 deletions

View file

@ -375,7 +375,7 @@ define([
startLine: array[1], startLine: array[1],
startCh: array[2], startCh: array[2],
endLine: multiline ? array[3] : array[1], endLine: multiline ? array[3] : array[1],
endCh: singleChar ? (array[2]+1) : array[3] endCh: singleChar ? (array[2]+1) : (multiline ? array[4] : array[3])
}; };
}; };
@ -406,127 +406,214 @@ define([
// first: data about the change with the lowest offset // first: data about the change with the lowest offset
// last: data about the change with the latest offset // last: data about the change with the latest offset
// in the comments, "I" am "first" // in the comments, "I" am "first"
var fixMarks = function (first, last) { var fixMarks = function (first, last, content, toKeepEnd) {
var toKeepEnd = []; console.log(first, last);
var toKeepStart = []; var toKeep = [];
// Get their start position compared to the authdoc
var lastOldOffset = last.offset - first.size; // (their offset minus my size) // Get their start position compared to the authDoc
var lastOldPos = SFCodeMirror.posToCursor(lastOldOffset, last.doc); var lastAuthOffset = last.offset + last.total;
var lastAuthPos = SFCodeMirror.posToCursor(lastAuthOffset, last.doc);
// Get their start position compared to the localDoc
var lastLocalOffset = last.offset + first.total;
var lastLocalPos = SFCodeMirror.posToCursor(lastLocalOffset, first.doc);
console.log(lastAuthPos, lastAuthOffset);
console.log(lastLocalPos, lastLocalOffset);
// Keep their changes in the marks (after their offset) // Keep their changes in the marks (after their offset)
last.marks.some(function (array, i) { last.marks.some(function (array, i) {
var p = parseMark(array); var p = parseMark(array);
// End of the mark before offset? ignore // End of the mark before offset? ignore
if (p.endLine < lastOldPos.line) { return; } if (p.endLine < lastAuthPos.line) { return; }
// Take everything from the first mark ending after the pos // Take everything from the first mark ending after the pos
if (p.endLine > lastOldPos.line || p.endCh >= lastOldPos.ch) { if (p.endLine > lastAuthPos.line || p.endCh >= lastAuthPos.ch) {
toKeepEnd = last.marks.slice(i); toKeep = last.marks.slice(i);
last.marks.splice(i);
return true; return true;
} }
}); });
// Keep my marks (based on currentDoc) before their changes // Keep my marks (based on currentDoc) before their changes
var toJoin = {};
first.marks.some(function (array, i) { first.marks.some(function (array, i) {
var p = parseMark(array); var p = parseMark(array);
// End of the mark before offset? ignore // End of the mark before offset? ignore
if (p.endLine < last.startLine) { return; } if (p.endLine < lastLocalPos.line) { return; }
// Take everything from the first mark ending after the pos // Take everything from the first mark ending after the pos
if (p.endLine > last.startLine || p.endCh >= last.startCh) { if (p.endLine > lastLocalPos.line || p.endCh >= lastLocalPos.ch) {
toKeepStart = first.marks.slice(0,i); first.marks.splice(i);
return true; return true;
} }
}); });
if (first.marks.length) {
var toJoinMark = first.marks[first.marks.length - 1].slice();
toJoin = parseMark(toJoinMark);
}
console.info('to keep'); console.info('to keep');
console.info(JSON.stringify(toKeepStart)); console.info(JSON.stringify(toKeep));
console.info(JSON.stringify(toKeepEnd));
// Fix their offset // Add the new markers to the result
var addLine = first.endLine - first.startLine; Array.prototype.unshift.apply(toKeepEnd, toKeep);
var addCh = first.endCh - first.startCh;
// Fix their offset: compute added lines and added characters on the last line
// using the chainpad operation data (toInsert and toRemove)
var pos = SFCodeMirror.posToCursor(first.offset, content);
var removed = content.slice(first.offset, first.offset + first.toRemove);
var removedS = removed.split('\n');
var addedS = first.toInsert.split('\n');
var addLine = addedS.length - removedS.length;
var addCh = addedS[addedS.length - 1].length - removedS[removedS.length - 1].length;
if (addLine > 0) { addCh -= pos.ch; }
toKeepEnd.forEach(function (array) { toKeepEnd.forEach(function (array) {
// Push to correct lines // Push to correct lines
array[1] += addLine; array[1] += addLine;
if (typeof(array[4]) !== "undefined") { array[3] += addLine; } if (typeof(array[4]) !== "undefined") { array[3] += addLine; }
// If they have markers on my end line, push their "ch" // If they have markers on my end line, push their "ch"
if (array[1] === first.endLine) { if (array[1] === toJoin[1]) {
array[2] += addCh; array[2] += addCh;
// If they have no end line, it means end line === start line, // If they have no end line, it means end line === start line,
// so we also push their end offset // so we also push their end offset
if (!array[4] && array[3]) { array[3] += addCh; } if (!array[4] && array[3]) { array[3] += addCh; }
} }
}); });
Array.prototype.push.apply(toKeepStart, toKeepEnd);
authormarks.marks = toKeepStart; if (toKeep.length && toJoin) {
// Make sure the marks are joined correctly:
// fix the start position of the marks to keep
toKeepEnd[0][1] = toJoin.endLine;
toKeepEnd[0][2] = toJoin.endCh;
}
console.info(JSON.stringify(toJoin));
console.info(JSON.stringify(first.marks));
console.info(JSON.stringify(last.marks));
console.info(JSON.stringify(toKeepEnd));
}; };
var checkAuthors = function (userDoc) { var checkAuthors = function (userDoc) {
var chainpad = framework._.cpNfInner.chainpad; var chainpad = framework._.cpNfInner.chainpad;
var authDoc = JSON.parse(chainpad.getAuthDoc() || '{}'); var authDoc = JSON.parse(chainpad.getAuthDoc() || '{}');
if (!authDoc.content || !userDoc.content) { return; } if (!authDoc.content || !userDoc.content) { return; }
if (authDoc.content === userDoc.content) { return; } // No uncommitted work
if (!authormarks || !Array.isArray(authormarks.marks)) { return; } if (!authormarks || !Array.isArray(authormarks.marks)) { return; }
var localDoc = CodeMirror.canonicalize(editor.getValue()); var localDoc = CodeMirror.canonicalize(editor.getValue());
var commonParent = chainpad.getAuthBlock().getParent().getContent().doc;
console.log(chainpad);
console.log(commonParent);
var content = JSON.parse(commonParent || '{}').content || '';
// Their changes are the diff between my local doc (my local changes only) // Their changes are the diff between my local doc (my local changes only)
// and the userDoc (my local changes + their changes pushed to the authdoc) // and the userDoc (my local changes + their changes pushed to the authdoc)
var theirOps = ChainPad.Diff.diff(localDoc, userDoc.content); //var theirOps = ChainPad.Diff.diff(localDoc, userDoc.content);
var theirOps = ChainPad.Diff.diff(content, authDoc.content);
// My changes are the diff between my userDoc (my local changes + their changes) // My changes are the diff between my userDoc (my local changes + their changes)
// and the authDoc (their changes only) // and the authDoc (their changes only)
var myOps = ChainPad.Diff.diff(authDoc.content, userDoc.content); //var myOps = ChainPad.Diff.diff(authDoc.content, userDoc.content);
var myOps = ChainPad.Diff.diff(content, localDoc);
if (!myOps.length || !theirOps.length) { return; }
// If I have uncommited content when receiving a remote patch, and they have // If I have uncommited content when receiving a remote patch, and they have
// pushed content to the same line as me, I need to update all the authormarks // pushed content to the same line as me, I need to update all the authormarks
// after their changes to push them by the length of the text I added // after their changes to push them by the length of the text I added
console.log(JSON.stringify(authDoc.authormarks)); console.log(JSON.stringify(oldMarks.marks));
console.log(JSON.stringify(authormarks)); console.log(JSON.stringify(authDoc.authormarks.marks));
console.log(JSON.stringify(authormarks.marks));
console.warn(myOps); console.warn(myOps);
console.warn(theirOps); console.warn(theirOps);
var marks = authormarks.marks; var marks = authormarks.marks;
myOps.forEach(function (op) { var ops = {};
var pos = SFCodeMirror.posToCursor(op.offset, userDoc.content);
var size = (op.toInsert.length - op.toRemove);
var pos2 = SFCodeMirror.posToCursor(op.offset+size, userDoc.content);
var me = { var myTotal = 0;
offset: op.offset, var theirTotal = 0;
size: size, var parseOp = function (me) {
startLine: pos.line, return function (op) {
startCh: pos.ch, var size = (op.toInsert.length - op.toRemove);
endLine: pos2.line, /*
endCh: pos2.ch, var pos = SFCodeMirror.posToCursor(op.offset, content);
marks: (oldMarks && oldMarks.marks) || [], var pos2 = SFCodeMirror.posToCursor(op.offset+size, content);
doc: localDoc */
};
theirOps.some(function (_op) { ops[op.offset] = {
// XXX we need the take the first operation after my changes and the one just before my change me: me,
// XXX if they have multiple operations and one of them (not the first) offset: op.offset,
// is multiline... toInsert: op.toInsert,
toRemove: op.toRemove,
var _size = (_op.toInsert.length - _op.toRemove); size: size,
var _pos = SFCodeMirror.posToCursor(_op.offset, userDoc.content); /*
var _pos2 = SFCodeMirror.posToCursor(_op.offset+_size, userDoc.content); size: size,
startLine: pos.line,
var them = { startCh: pos.ch,
offset: _op.offset, endLine: pos2.line,
size: _size, endCh: pos2.ch,
startLine: _pos.line, addLine: pos2.line - pos.line,
startCh: _pos.ch, addCh: pos2.ch - pos.ch,
endLine: _pos2.line, */
endCh: _pos2.ch, marks: (me ? (oldMarks && oldMarks.marks)
marks: (authDoc.authormarks && authDoc.authormarks.marks) || [], : (authDoc.authormarks && authDoc.authormarks.marks)) || [],
doc: authDoc.content doc: me ? localDoc : authDoc.content
}; };
if (_op.offset > op.offset) { if (me) {
console.error('me first', me, them); myTotal += size;
fixMarks(me, them);
} else { } else {
console.error('them first', me, them); theirTotal += size;
fixMarks(them, me);
} }
console.warn(JSON.stringify(authormarks.marks)); };
return true; };
}); myOps.forEach(parseOp(true));
theirOps.forEach(parseOp(false));
console.error(myTotal, theirTotal);
/*
theirOps.map(function (_op) {
var _pos = SFCodeMirror.posToCursor(_op.offset, content);
var _size = (_op.toInsert.length - _op.toRemove);
var _pos2 = SFCodeMirror.posToCursor(_op.offset+_size, content);
ops[_op.offset] = {
me: false,
offset: _op.offset,
size: _size,
startLine: _pos.line,
startCh: _pos.ch,
endLine: _pos2.line,
endCh: _pos2.ch,
marks: (authDoc.authormarks && authDoc.authormarks.marks) || [],
doc: authDoc.content
};
theirTotal += _size;
}); });
*/
var sorted = Object.keys(ops).map(Number);
sorted.sort().reverse();
console.log(sorted);
// We start from the end so that we don't have to fix the offsets everytime
var prev;
var toKeepEnd = [];
sorted.forEach(function (offset) {
var op = ops[offset];
// Not the same author? fix!
if (prev && prev.me !== op.me) {
prev.total = prev.me ? myTotal : theirTotal;
op.total = op.me ? myTotal : theirTotal;
fixMarks(op, prev, content, toKeepEnd);
}
if (op.me) { myTotal -= op.size }
else { theirTotal -= op.size }
prev = op;
});
var first = ops[sorted[sorted.length - 1]];
console.error(JSON.stringify(first.marks));
if (first) {
Array.prototype.unshift.apply(toKeepEnd, first.marks);
}
console.error(JSON.stringify(toKeepEnd));
authormarks.marks = toKeepEnd;
}; };
framework.onContentUpdate(function (newContent) { framework.onContentUpdate(function (newContent) {
@ -535,6 +622,10 @@ define([
CodeMirror.setMode(highlightMode, evModeChange.fire); CodeMirror.setMode(highlightMode, evModeChange.fire);
} }
var chainpad = framework._.cpNfInner.chainpad;
console.error(chainpad._.authDoc);
console.warn(chainpad._.uncommitted);
console.error(authormarks.marks, oldMarks.marks);
if (newContent.authormarks) { if (newContent.authormarks) {
oldMarks = authormarks; oldMarks = authormarks;
authormarks = newContent.authormarks; authormarks = newContent.authormarks;

View file

@ -65,6 +65,7 @@ define([
sframeChan.query('Q_RT_MESSAGE', message, function (_err, obj) { sframeChan.query('Q_RT_MESSAGE', message, function (_err, obj) {
var err = _err || (obj && obj.error); var err = _err || (obj && obj.error);
if (!err) { evPatchSent.fire(); } if (!err) { evPatchSent.fire(); }
console.error('cb', message);
cb(err); cb(err);
}, { timeout: -1 }); }, { timeout: -1 });
}); });
@ -137,6 +138,7 @@ define([
if (isReady) { if (isReady) {
onLocal(true); // should be onBeforeMessage onLocal(true); // should be onBeforeMessage
} }
console.error('received', content);
chainpad.message(content); chainpad.message(content);
if (isHistory && updateLoadingProgress) { if (isHistory && updateLoadingProgress) {
updateLoadingProgress({ updateLoadingProgress({