From 551f6e13434d9e730bc7a90debde97206c7228a8 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 17 May 2022 13:22:18 -0400 Subject: [PATCH 01/17] convert automerge-js to typescript --- automerge-js/.gitignore | 1 + automerge-js/package.json | 15 +- automerge-js/src/{columnar.js => columnar.ts} | 128 ++--- automerge-js/src/{common.js => common.ts} | 19 +- automerge-js/src/constants.js | 18 - automerge-js/src/constants.ts | 15 + automerge-js/src/{counter.js => counter.ts} | 43 +- automerge-js/src/{encoding.js => encoding.ts} | 69 ++- automerge-js/src/index.js | 372 ------------- automerge-js/src/index.ts | 496 ++++++++++++++++++ automerge-js/src/{numbers.js => numbers.ts} | 19 +- automerge-js/src/{proxies.js => proxies.ts} | 79 +-- automerge-js/src/{sync.js => sync.ts} | 48 +- automerge-js/src/{text.js => text.ts} | 43 +- automerge-js/src/uuid.js | 16 - automerge-js/src/uuid.ts | 16 + .../test/{basic_test.js => basic_test.ts} | 7 +- .../{columnar_test.js => columnar_test.ts} | 8 +- automerge-js/test/{helpers.js => helpers.ts} | 4 +- .../test/{legacy_tests.js => legacy_tests.ts} | 13 +- .../test/{sync_test.js => sync_test.ts} | 11 +- .../test/{text_test.js => text_test.ts} | 6 +- .../test/{uuid_test.js => uuid_test.ts} | 4 +- automerge-js/tsconfig.json | 16 + automerge-js/tslint.json | 3 + automerge-wasm/index.d.ts | 5 + 26 files changed, 845 insertions(+), 629 deletions(-) rename automerge-js/src/{columnar.js => columnar.ts} (94%) rename automerge-js/src/{common.js => common.ts} (78%) delete mode 100644 automerge-js/src/constants.js create mode 100644 automerge-js/src/constants.ts rename automerge-js/src/{counter.js => counter.ts} (72%) rename automerge-js/src/{encoding.js => encoding.ts} (97%) delete mode 100644 automerge-js/src/index.js create mode 100644 automerge-js/src/index.ts rename automerge-js/src/{numbers.js => numbers.ts} (76%) rename automerge-js/src/{proxies.js => proxies.ts} (90%) rename automerge-js/src/{sync.js => sync.ts} (94%) rename automerge-js/src/{text.js => text.ts} (82%) delete mode 100644 automerge-js/src/uuid.js create mode 100644 automerge-js/src/uuid.ts rename automerge-js/test/{basic_test.js => basic_test.ts} (98%) rename automerge-js/test/{columnar_test.js => columnar_test.ts} (96%) rename automerge-js/test/{helpers.js => helpers.ts} (93%) rename automerge-js/test/{legacy_tests.js => legacy_tests.ts} (99%) rename automerge-js/test/{sync_test.js => sync_test.ts} (99%) rename automerge-js/test/{text_test.js => text_test.ts} (99%) rename automerge-js/test/{uuid_test.js => uuid_test.ts} (89%) create mode 100644 automerge-js/tsconfig.json create mode 100644 automerge-js/tslint.json diff --git a/automerge-js/.gitignore b/automerge-js/.gitignore index 5add9449..05065cf0 100644 --- a/automerge-js/.gitignore +++ b/automerge-js/.gitignore @@ -1,2 +1,3 @@ /node_modules /yarn.lock +dist diff --git a/automerge-js/package.json b/automerge-js/package.json index 17018429..4b3b2b55 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -4,10 +4,21 @@ "main": "src/index.js", "license": "MIT", "scripts": { - "test": "mocha --bail --full-trace" + "lint": "tslint --project tsconfig.json", + "test": "ts-mocha -p tsconfig.json test/**/*.ts" + }, + "directories": { + "src": "./src", + "test": "./test" }, "devDependencies": { - "mocha": "^9.1.1" + "@types/expect": "^24.3.0", + "@types/mocha": "^9.1.1", + "@types/uuid": "^8.3.4", + "mocha": "^10.0.0", + "ts-mocha": "^10.0.0", + "tslint": "^6.1.3", + "typescript": "^4.6.4" }, "dependencies": { "automerge-wasm": "file:../automerge-wasm", diff --git a/automerge-js/src/columnar.js b/automerge-js/src/columnar.ts similarity index 94% rename from automerge-js/src/columnar.js rename to automerge-js/src/columnar.ts index 8d266f5b..fd203333 100644 --- a/automerge-js/src/columnar.js +++ b/automerge-js/src/columnar.ts @@ -1,9 +1,9 @@ -const pako = require('pako') -const { copyObject, parseOpId, equalBytes } = require('./common') -const { +import * as pako from 'pako' +import { copyObject, parseOpId, equalBytes } from './common' +import { utf8ToString, hexStringToBytes, bytesToHexString, Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder -} = require('./encoding') +} from './encoding' // Maybe we should be using the platform's built-in hash implementation? // Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have @@ -18,7 +18,7 @@ const { // - It does not need a secure source of random bits and does not need to be // constant-time; // - I have reviewed the source code and it seems pretty reasonable. -const { Hash } = require('fast-sha256') +import { Hash } from 'fast-sha256' // These bytes don't mean anything, they were generated randomly const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83]) @@ -32,7 +32,7 @@ const CHUNK_TYPE_DEFLATE = 2 // like CHUNK_TYPE_CHANGE but with DEFLATE compress const DEFLATE_MIN_SIZE = 256 // The least-significant 3 bits of a columnId indicate its datatype -const COLUMN_TYPE = { +export const COLUMN_TYPE = { GROUP_CARD: 0, ACTOR_ID: 1, INT_RLE: 2, INT_DELTA: 3, BOOLEAN: 4, STRING_RLE: 5, VALUE_LEN: 6, VALUE_RAW: 7 } @@ -43,15 +43,15 @@ const COLUMN_TYPE_DEFLATE = 8 // In the values in a column of type VALUE_LEN, the bottom four bits indicate the type of the value, // one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the // associated VALUE_RAW column (in bytes). -const VALUE_TYPE = { +export const VALUE_TYPE = { NULL: 0, FALSE: 1, TRUE: 2, LEB128_UINT: 3, LEB128_INT: 4, IEEE754: 5, UTF8: 6, BYTES: 7, COUNTER: 8, TIMESTAMP: 9, MIN_UNKNOWN: 10, MAX_UNKNOWN: 15 } // make* actions must be at even-numbered indexes in this list -const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] +export const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] -const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} +export const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} const COMMON_COLUMNS = [ {columnName: 'objActor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, @@ -69,13 +69,13 @@ const COMMON_COLUMNS = [ {columnName: 'chldCtr', columnId: 6 << 4 | COLUMN_TYPE.INT_DELTA} ] -const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ +export const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ {columnName: 'predNum', columnId: 7 << 4 | COLUMN_TYPE.GROUP_CARD}, {columnName: 'predActor', columnId: 7 << 4 | COLUMN_TYPE.ACTOR_ID}, {columnName: 'predCtr', columnId: 7 << 4 | COLUMN_TYPE.INT_DELTA} ]) -const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ +export const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ {columnName: 'succNum', columnId: 8 << 4 | COLUMN_TYPE.GROUP_CARD}, {columnName: 'succActor', columnId: 8 << 4 | COLUMN_TYPE.ACTOR_ID}, {columnName: 'succCtr', columnId: 8 << 4 | COLUMN_TYPE.INT_DELTA} @@ -131,7 +131,7 @@ function compareParsedOpIds(id1, id2) { * false. */ function parseAllOpIds(changes, single) { - const actors = {}, newChanges = [] + const actors : any = {}, newChanges : any = [] for (let change of changes) { change = copyObject(change) actors[change.actor] = true @@ -294,7 +294,7 @@ function encodeValue(op, columns) { * form `{value: value, datatype: datatypeTag}` where `value` is a JavaScript primitive datatype * corresponding to the value, and `datatypeTag` is a datatype annotation such as 'counter'. */ -function decodeValue(sizeTag, bytes) { +export function decodeValue(sizeTag, bytes) { if (sizeTag === VALUE_TYPE.NULL) { return {value: null} } else if (sizeTag === VALUE_TYPE.FALSE) { @@ -367,7 +367,7 @@ function decodeValueColumns(columns, colIndex, actorIds, result) { * objects. */ function encodeOps(ops, forDocument) { - const columns = { + const columns : any = { objActor : new RLEEncoder('uint'), objCtr : new RLEEncoder('uint'), keyActor : new RLEEncoder('uint'), @@ -427,7 +427,7 @@ function encodeOps(ops, forDocument) { } } - let columnList = [] + let columnList : any = [] for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]}) } @@ -436,7 +436,7 @@ function encodeOps(ops, forDocument) { function expandMultiOps(ops, startOp, actor) { let opNum = startOp - let expandedOps = [] + let expandedOps : any = [] for (const op of ops) { if (op.action === 'set' && op.values && op.insert) { if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') @@ -470,12 +470,12 @@ function expandMultiOps(ops, startOp, actor) { * individual change. */ function decodeOps(ops, forDocument) { - const newOps = [] + const newOps : any = [] for (let op of ops) { const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) const action = ACTIONS[op.action] || op.action - const newOp = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} + const newOp : any = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} newOp.insert = !!op.insert if (ACTIONS[op.action] === 'set' || ACTIONS[op.action] === 'inc') { newOp.value = op.valLen @@ -511,7 +511,7 @@ function checkSortedOpIds(opIds) { } } -function encoderByColumnId(columnId) { +export function encoderByColumnId(columnId) { if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { return new DeltaEncoder() } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { @@ -525,7 +525,7 @@ function encoderByColumnId(columnId) { } } -function decoderByColumnId(columnId, buffer) { +export function decoderByColumnId(columnId, buffer) { if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { return new DeltaDecoder(buffer) } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { @@ -539,9 +539,9 @@ function decoderByColumnId(columnId, buffer) { } } -function makeDecoders(columns, columnSpec) { +export function makeDecoders(columns, columnSpec) { const emptyBuf = new Uint8Array(0) - let decoders = [], columnIndex = 0, specIndex = 0 + let decoders : any = [], columnIndex = 0, specIndex = 0 while (columnIndex < columns.length || specIndex < columnSpec.length) { if (columnIndex === columns.length || @@ -565,7 +565,7 @@ function makeDecoders(columns, columnSpec) { function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) - let parsedRows = [] + let parsedRows : any = [] while (columns.some(col => !col.decoder.done)) { let row = {}, col = 0 while (col < columns.length) { @@ -576,7 +576,7 @@ function decodeColumns(columns, actorIds, columnSpec) { } if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { - const values = [], count = columns[col].decoder.readValue() + const values : any = [], count = columns[col].decoder.readValue() for (let i = 0; i < count; i++) { let value = {} for (let colOffset = 1; colOffset < groupCols; colOffset++) { @@ -600,7 +600,7 @@ function decodeColumnInfo(decoder) { // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 - let lastColumnId = -1, columns = [], numColumns = decoder.readUint53() + let lastColumnId = -1, columns : any = [], numColumns = decoder.readUint53() for (let i = 0; i < numColumns; i++) { const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { @@ -622,11 +622,11 @@ function encodeColumnInfo(encoder, columns) { } function decodeChangeHeader(decoder) { - const numDeps = decoder.readUint53(), deps = [] + const numDeps = decoder.readUint53(), deps : any = [] for (let i = 0; i < numDeps; i++) { deps.push(bytesToHexString(decoder.readRawBytes(32))) } - let change = { + let change : any = { actor: decoder.readHexString(), seq: decoder.readUint53(), startOp: decoder.readUint53(), @@ -682,7 +682,7 @@ function decodeContainerHeader(decoder, computeHash) { const hashStartOffset = decoder.offset const chunkType = decoder.readByte() const chunkLength = decoder.readUint53() - const header = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} + const header : any = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} if (computeHash) { const sha256 = new Hash() @@ -699,7 +699,7 @@ function decodeContainerHeader(decoder, computeHash) { /** * Returns the checksum of a change (bytes 4 to 7) as a 32-bit unsigned integer. */ -function getChangeChecksum(change) { +export function getChangeChecksum(change) { if (change[0] !== MAGIC_BYTES[0] || change[1] !== MAGIC_BYTES[1] || change[2] !== MAGIC_BYTES[2] || change[3] !== MAGIC_BYTES[3]) { throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') @@ -707,9 +707,9 @@ function getChangeChecksum(change) { return ((change[4] << 24) | (change[5] << 16) | (change[6] << 8) | change[7]) >>> 0 } -function encodeChange(changeObj) { +export function encodeChange(changeObj) { const { changes, actorIds } = parseAllOpIds([changeObj], true) - const change = changes[0] + const change : any = changes[0] const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') @@ -725,7 +725,7 @@ function encodeChange(changeObj) { encoder.appendUint53(actorIds.length - 1) for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) - const columns = encodeOps(change.ops, false) + const columns : any = encodeOps(change.ops, false) encodeColumnInfo(encoder, columns) for (let column of columns) encoder.appendRawBytes(column.encoder.buffer) if (change.extraBytes) encoder.appendRawBytes(change.extraBytes) @@ -738,16 +738,16 @@ function encodeChange(changeObj) { return (bytes.byteLength >= DEFLATE_MIN_SIZE) ? deflateChange(bytes) : bytes } -function decodeChangeColumns(buffer) { +export function decodeChangeColumns(buffer) { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) const decoder = new Decoder(buffer) - const header = decodeContainerHeader(decoder, true) + const header : any = decodeContainerHeader(decoder, true) const chunkDecoder = new Decoder(header.chunkData) if (!decoder.done) throw new RangeError('Encoded change has trailing data') if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const change = decodeChangeHeader(chunkDecoder) - const columns = decodeColumnInfo(chunkDecoder) + const change : any = decodeChangeHeader(chunkDecoder) + const columns : any = decodeColumnInfo(chunkDecoder) for (let i = 0; i < columns.length; i++) { if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) { throw new RangeError('change must not contain deflated columns') @@ -767,8 +767,8 @@ function decodeChangeColumns(buffer) { /** * Decodes one change in binary format into its JS object representation. */ -function decodeChange(buffer) { - const change = decodeChangeColumns(buffer) +export function decodeChange(buffer) { + const change : any = decodeChangeColumns(buffer) change.ops = decodeOps(decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), false) delete change.actorIds delete change.columns @@ -780,13 +780,13 @@ function decodeChange(buffer) { * the operations. Saves work when we only need to inspect the headers. Only * computes the hash of the change if `computeHash` is true. */ -function decodeChangeMeta(buffer, computeHash) { +export function decodeChangeMeta(buffer, computeHash) : any { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) - const header = decodeContainerHeader(new Decoder(buffer), computeHash) + const header : any = decodeContainerHeader(new Decoder(buffer), computeHash) if (header.chunkType !== CHUNK_TYPE_CHANGE) { throw new RangeError('Buffer chunk type is not a change') } - const meta = decodeChangeHeader(new Decoder(header.chunkData)) + const meta : any = decodeChangeHeader(new Decoder(header.chunkData)) meta.change = buffer if (computeHash) meta.hash = header.hash return meta @@ -826,8 +826,8 @@ function inflateChange(buffer) { * Takes an Uint8Array that may contain multiple concatenated changes, and * returns an array of subarrays, each subarray containing one change. */ -function splitContainers(buffer) { - let decoder = new Decoder(buffer), chunks = [], startOffset = 0 +export function splitContainers(buffer) { + let decoder = new Decoder(buffer), chunks : any = [], startOffset = 0 while (!decoder.done) { decodeContainerHeader(decoder, false) chunks.push(buffer.subarray(startOffset, decoder.offset)) @@ -840,8 +840,8 @@ function splitContainers(buffer) { * Decodes a list of changes from the binary format into JS objects. * `binaryChanges` is an array of `Uint8Array` objects. */ -function decodeChanges(binaryChanges) { - let decoded = [] +export function decodeChanges(binaryChanges) { + let decoded : any = [] for (let binaryChange of binaryChanges) { for (let chunk of splitContainers(binaryChange)) { if (chunk[8] === CHUNK_TYPE_DOCUMENT) { @@ -914,11 +914,11 @@ function groupDocumentOps(changes) { let ops = [] for (let objectId of Object.keys(byObjectId).sort(sortOpIds)) { - let keys = [] + let keys : string[] = [] if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { let stack = ['_head'] while (stack.length > 0) { - const key = stack.pop() + const key : any = stack.pop() if (key !== '_head') keys.push(key) for (let opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) } @@ -931,6 +931,7 @@ function groupDocumentOps(changes) { for (let key of keys) { for (let opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { const op = byObjectId[objectId][key][opId] + // @ts-ignore if (op.action !== 'del') ops.push(op) } } @@ -976,6 +977,7 @@ function groupChangeOps(changes, ops) { delete op.succ } for (let op of Object.values(opsById)) { + // @ts-ignore if (op.action === 'del') ops.push(op) } @@ -1055,7 +1057,7 @@ function encodeDocumentChanges(changes) { } } - let changesColumns = [] + let changesColumns : any = [] for (let {columnName, columnId} of DOCUMENT_COLUMNS) { changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]}) } @@ -1104,7 +1106,7 @@ function decodeDocumentChanges(changes, expectedHeads) { /** * Transforms a list of changes into a binary representation of the document state. */ -function encodeDocument(binaryChanges) { +export function encodeDocument(binaryChanges) { const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false) const { changesColumns, heads } = encodeDocumentChanges(changes) const opsColumns = encodeOps(groupDocumentOps(changes), true) @@ -1122,29 +1124,31 @@ function encodeDocument(binaryChanges) { } encodeColumnInfo(encoder, changesColumns) encodeColumnInfo(encoder, opsColumns) + // @ts-ignore for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + // @ts-ignore for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) }).bytes } -function decodeDocumentHeader(buffer) { +export function decodeDocumentHeader(buffer) { const documentDecoder = new Decoder(buffer) const header = decodeContainerHeader(documentDecoder, true) const decoder = new Decoder(header.chunkData) if (!documentDecoder.done) throw new RangeError('Encoded document has trailing data') if (header.chunkType !== CHUNK_TYPE_DOCUMENT) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const actorIds = [], numActors = decoder.readUint53() + const actorIds : string[] = [], numActors = decoder.readUint53() for (let i = 0; i < numActors; i++) { actorIds.push(decoder.readHexString()) } - const heads = [], numHeads = decoder.readUint53() + const heads : string[] = [], numHeads = decoder.readUint53() for (let i = 0; i < numHeads; i++) { heads.push(bytesToHexString(decoder.readRawBytes(32))) } - const changesColumns = decodeColumnInfo(decoder) - const opsColumns = decodeColumnInfo(decoder) + const changesColumns : any = decodeColumnInfo(decoder) + const opsColumns : any = decodeColumnInfo(decoder) for (let i = 0; i < changesColumns.length; i++) { changesColumns[i].buffer = decoder.readRawBytes(changesColumns[i].bufferLen) inflateColumn(changesColumns[i]) @@ -1158,7 +1162,7 @@ function decodeDocumentHeader(buffer) { return { changesColumns, opsColumns, actorIds, heads, extraBytes } } -function decodeDocument(buffer) { +export function decodeDocument(buffer) { const { changesColumns, opsColumns, actorIds, heads } = decodeDocumentHeader(buffer) const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS) const ops = decodeOps(decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), true) @@ -1196,7 +1200,7 @@ function inflateColumn(column) { * or false if the property has been deleted. */ function addPatchProperty(objects, property) { - let values = {}, counter = null + let values : any = {}, counter : any = null for (let op of property.ops) { // Apply counters and their increments regardless of the number of successor operations if (op.actionName === 'set' && op.value.datatype === 'counter') { @@ -1290,7 +1294,7 @@ function condenseEdits(diff) { * Appends a list edit operation (insert, update, remove) to an array of existing operations. If the * last existing operation can be extended (as a multi-op), we do that. */ -function appendEdit(existingEdits, nextEdit) { +export function appendEdit(existingEdits, nextEdit) { if (existingEdits.length === 0) { existingEdits.push(nextEdit) return @@ -1336,13 +1340,13 @@ function opIdDelta(id1, id2, delta = 1) { * and returns a patch that can be sent to the frontend to instantiate the * current state of that document. */ -function constructPatch(documentBuffer) { +export function constructPatch(documentBuffer) { const { opsColumns, actorIds } = decodeDocumentHeader(documentBuffer) - const col = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( - (acc, col) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) + const col : any = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( + (acc, col: any) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) let objects = {_root: {objectId: '_root', type: 'map', props: {}}} - let property = null + let property : any = null while (!col.idActor.done) { const opId = `${col.idCtr.readValue()}@${actorIds[col.idActor.readValue()]}` @@ -1369,7 +1373,7 @@ function constructPatch(documentBuffer) { const rawValue = col.valRaw.readRawBytes(sizeTag >> 4) const value = decodeValue(sizeTag, rawValue) const succNum = col.succNum.readValue() - let succ = [] + let succ : string[] = [] for (let i = 0; i < succNum; i++) { succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`) } diff --git a/automerge-js/src/common.js b/automerge-js/src/common.ts similarity index 78% rename from automerge-js/src/common.js rename to automerge-js/src/common.ts index b41cadc8..5f1b53d1 100644 --- a/automerge-js/src/common.js +++ b/automerge-js/src/common.ts @@ -1,4 +1,4 @@ -function isObject(obj) { +export function isObject(obj: any) : boolean { return typeof obj === 'object' && obj !== null } @@ -6,9 +6,9 @@ function isObject(obj) { * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`. * https://jsperf.com/cloning-large-objects/1 */ -function copyObject(obj) { +export function copyObject(obj: any) : any { if (!isObject(obj)) return {} - let copy = {} + let copy : any = {} for (let key of Object.keys(obj)) { copy[key] = obj[key] } @@ -19,7 +19,13 @@ function copyObject(obj) { * Takes a string in the form that is used to identify operations (a counter concatenated * with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`. */ -function parseOpId(opId) { + +interface OpIdObj { + counter: number, + actorId: string +} + +export function parseOpId(opId: string) : OpIdObj { const match = /^(\d+)@(.*)$/.exec(opId || '') if (!match) { throw new RangeError(`Not a valid opId: ${opId}`) @@ -30,7 +36,7 @@ function parseOpId(opId) { /** * Returns true if the two byte arrays contain the same data, false if not. */ -function equalBytes(array1, array2) { +export function equalBytes(array1: Uint8Array, array2: Uint8Array) : boolean { if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) { throw new TypeError('equalBytes can only compare Uint8Arrays') } @@ -41,6 +47,3 @@ function equalBytes(array1, array2) { return true } -module.exports = { - isObject, copyObject, parseOpId, equalBytes -} diff --git a/automerge-js/src/constants.js b/automerge-js/src/constants.js deleted file mode 100644 index ea92228c..00000000 --- a/automerge-js/src/constants.js +++ /dev/null @@ -1,18 +0,0 @@ -// Properties of the document root object -//const OPTIONS = Symbol('_options') // object containing options passed to init() -//const CACHE = Symbol('_cache') // map from objectId to immutable object -const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers) -const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers) -const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers) -const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers) -const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers) - -// Properties of all Automerge objects -//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string) -//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts -//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback -//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element - -module.exports = { - STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN -} diff --git a/automerge-js/src/constants.ts b/automerge-js/src/constants.ts new file mode 100644 index 00000000..597bfa1c --- /dev/null +++ b/automerge-js/src/constants.ts @@ -0,0 +1,15 @@ +// Properties of the document root object +//const OPTIONS = Symbol('_options') // object containing options passed to init() +//const CACHE = Symbol('_cache') // map from objectId to immutable object +export const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers) +export const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers) +export const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers) +export const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers) +export const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers) + +// Properties of all Automerge objects +//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string) +//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts +//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback +//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element + diff --git a/automerge-js/src/counter.js b/automerge-js/src/counter.ts similarity index 72% rename from automerge-js/src/counter.js rename to automerge-js/src/counter.ts index 6ca54f6d..fba2d8d0 100644 --- a/automerge-js/src/counter.js +++ b/automerge-js/src/counter.ts @@ -1,12 +1,14 @@ +import { Automerge, ObjID, Prop } from "automerge-wasm" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, * the value trivially converges. */ -class Counter { - constructor(value) { +export class Counter { + value : number; + + constructor(value?: number) { this.value = value || 0 - Object.freeze(this) } /** @@ -17,7 +19,7 @@ class Counter { * concatenating it with another string, as in `x + ''`. * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf */ - valueOf() { + valueOf() : number { return this.value } @@ -26,7 +28,7 @@ class Counter { * this method is called e.g. when you do `['value: ', x].join('')` or when * you use string interpolation: `value: ${x}`. */ - toString() { + toString() : string { return this.valueOf().toString() } @@ -34,7 +36,7 @@ class Counter { * Returns the counter value, so that a JSON serialization of an Automerge * document represents the counter simply as an integer. */ - toJSON() { + toJSON() : number { return this.value } } @@ -44,11 +46,24 @@ class Counter { * callback. */ class WriteableCounter extends Counter { + context: Automerge + path: string[] + objectId: ObjID + key: Prop + + constructor(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) { + super(value) + this.context = context + this.path = path + this.objectId = objectId + this.key = key + } + /** * Increases the value of the counter by `delta`. If `delta` is not given, * increases the value of the counter by 1. */ - increment(delta) { + increment(delta: number) : number { delta = typeof delta === 'number' ? delta : 1 this.context.increment(this.objectId, this.key, delta) this.value += delta @@ -59,7 +74,7 @@ class WriteableCounter extends Counter { * Decreases the value of the counter by `delta`. If `delta` is not given, * decreases the value of the counter by 1. */ - decrement(delta) { + decrement(delta: number) : number { return this.increment(typeof delta === 'number' ? -delta : -1) } } @@ -71,14 +86,8 @@ class WriteableCounter extends Counter { * the property name (key in map, or index in list) where the counter is * located. */ -function getWriteableCounter(value, context, path, objectId, key) { - const instance = Object.create(WriteableCounter.prototype) - instance.value = value - instance.context = context - instance.path = path - instance.objectId = objectId - instance.key = key - return instance +export function getWriteableCounter(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) { + return new WriteableCounter(value, context, path, objectId, key) } -module.exports = { Counter, getWriteableCounter } +//module.exports = { Counter, getWriteableCounter } diff --git a/automerge-js/src/encoding.js b/automerge-js/src/encoding.ts similarity index 97% rename from automerge-js/src/encoding.js rename to automerge-js/src/encoding.ts index 92b62df6..55ba679d 100644 --- a/automerge-js/src/encoding.js +++ b/automerge-js/src/encoding.ts @@ -8,18 +8,18 @@ const utf8encoder = new TextEncoder() const utf8decoder = new TextDecoder('utf-8') -function stringToUtf8(string) { - return utf8encoder.encode(string) +export function stringToUtf8(s: string) : BufferSource { + return utf8encoder.encode(s) } -function utf8ToString(buffer) { +export function utf8ToString(buffer: BufferSource) : string { return utf8decoder.decode(buffer) } /** * Converts a string consisting of hexadecimal digits into an Uint8Array. */ -function hexStringToBytes(value) { +export function hexStringToBytes(value: string) : Uint8Array { if (typeof value !== 'string') { throw new TypeError('value is not a string') } @@ -29,6 +29,7 @@ function hexStringToBytes(value) { if (value === '') { return new Uint8Array(0) } else { + // @ts-ignore return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) } } @@ -42,7 +43,7 @@ for (let i = 0; i < 256; i++) { /** * Converts a Uint8Array into the equivalent hexadecimal string. */ -function bytesToHexString(bytes) { +export function bytesToHexString(bytes: Uint8Array) : string { let hex = '', len = bytes.byteLength for (let i = 0; i < len; i++) { hex += BYTE_TO_HEX[bytes[i]] @@ -54,7 +55,10 @@ function bytesToHexString(bytes) { * Wrapper around an Uint8Array that allows values to be appended to the buffer, * and that automatically grows the buffer when space runs out. */ -class Encoder { +export class Encoder { + buf: Uint8Array; + offset: number; + constructor() { this.buf = new Uint8Array(16) this.offset = 0 @@ -290,7 +294,10 @@ class Encoder { * the current decoding position, and allows values to be incrementally read by * decoding the bytes at the current position. */ -class Decoder { +export class Decoder { + buf: Uint8Array; + offset: number; + constructor(buffer) { if (!(buffer instanceof Uint8Array)) { throw new TypeError(`Not a byte array: ${buffer}`) @@ -555,7 +562,13 @@ class Decoder { * After one of these three has completed, the process repeats, starting again * with a repetition count, until we reach the end of the buffer. */ -class RLEEncoder extends Encoder { +export class RLEEncoder extends Encoder { + type: any + state: string + lastValue: any + count: number + literal: any + constructor(type) { super() this.type = type @@ -664,7 +677,7 @@ class RLEEncoder extends Encoder { * Returns an object of the form `{nonNullValues, sum}` where `nonNullValues` is the number of * non-null values copied, and `sum` is the sum (only if the `sumValues` option is set). */ - copyFrom(decoder, options = {}) { + copyFrom(decoder, options: any = {}) : any { const { count, sumValues, sumShift } = options if (!(decoder instanceof RLEDecoder) || (decoder.type !== this.type)) { throw new TypeError('incompatible type of decoder') @@ -707,7 +720,7 @@ class RLEEncoder extends Encoder { nonNullValues += numValues for (let i = 0; i < numValues; i++) { if (decoder.done) throw new RangeError('incomplete literal') - const value = decoder.readRawValue() + const value : any = decoder.readRawValue() if (value === decoder.lastValue) throw new RangeError('Repetition of values is not allowed in literal') decoder.lastValue = value this._appendValue(value) @@ -786,7 +799,12 @@ class RLEEncoder extends Encoder { * Counterpart to RLEEncoder: reads values from an RLE-compressed sequence, * returning nulls and repeated values as required. */ -class RLEDecoder extends Decoder { +export class RLEDecoder extends Decoder { + type: any; + lastValue: any; + count: number; + state: any; + constructor(type, buffer) { super(buffer) this.type = type @@ -929,7 +947,9 @@ class RLEDecoder extends Decoder { * * Null values are also allowed, as with RLEEncoder. */ -class DeltaEncoder extends RLEEncoder { +export class DeltaEncoder extends RLEEncoder { + absoluteValue: number + constructor() { super('int') this.absoluteValue = 0 @@ -955,7 +975,7 @@ class DeltaEncoder extends RLEEncoder { * contain the key `count`, indicating the number of values to copy. If not specified, copies * all remaining values in the decoder. */ - copyFrom(decoder, options = {}) { + copyFrom(decoder, options: any = {}) : any { if (options.sumValues) { throw new RangeError('unsupported options for DeltaEncoder.copyFrom()') } @@ -991,7 +1011,9 @@ class DeltaEncoder extends RLEEncoder { if (remaining !== undefined) remaining -= nulls + 1 const { nonNullValues, sum } = super.copyFrom(decoder, {count: remaining, sumValues: true}) if (nonNullValues > 0) { + // @ts-ignore this.absoluteValue = sum + // @ts-ignore decoder.absoluteValue = sum } } @@ -1001,7 +1023,9 @@ class DeltaEncoder extends RLEEncoder { * Counterpart to DeltaEncoder: reads values from a delta-compressed sequence of * numbers (may include null values). */ -class DeltaDecoder extends RLEDecoder { +export class DeltaDecoder extends RLEDecoder { + absoluteValue : number; + constructor(buffer) { super('int', buffer) this.absoluteValue = 0 @@ -1058,7 +1082,10 @@ class DeltaDecoder extends RLEDecoder { * only encode the repetition count but not the actual value, since the values * just alternate between false and true (starting with false). */ -class BooleanEncoder extends Encoder { +export class BooleanEncoder extends Encoder { + lastValue: boolean; + count: number; + constructor() { super() this.lastValue = false @@ -1088,7 +1115,7 @@ class BooleanEncoder extends Encoder { * contain the key `count`, indicating the number of values to copy. If not specified, copies * all remaining values in the decoder. */ - copyFrom(decoder, options = {}) { + copyFrom(decoder, options: any = {}) : any { if (!(decoder instanceof BooleanDecoder)) { throw new TypeError('incompatible type of decoder') } @@ -1138,7 +1165,11 @@ class BooleanEncoder extends Encoder { * Counterpart to BooleanEncoder: reads boolean values from a runlength-encoded * sequence. */ -class BooleanDecoder extends Decoder { +export class BooleanDecoder extends Decoder { + lastValue: boolean; + firstRun: boolean; + count: number; + constructor(buffer) { super(buffer) this.lastValue = true // is negated the first time we read a count @@ -1203,7 +1234,3 @@ class BooleanDecoder extends Decoder { } } -module.exports = { - stringToUtf8, utf8ToString, hexStringToBytes, bytesToHexString, - Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder -} diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js deleted file mode 100644 index 04cee89b..00000000 --- a/automerge-js/src/index.js +++ /dev/null @@ -1,372 +0,0 @@ -const AutomergeWASM = require("automerge-wasm") -const uuid = require('./uuid') - -let { rootProxy, listProxy, textProxy, mapProxy } = require("./proxies") -let { Counter } = require("./counter") -let { Text } = require("./text") -let { Int, Uint, Float64 } = require("./numbers") -let { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } = require("./constants") - -function init(actor) { - if (typeof actor != 'string') { - actor = null - } - const state = AutomergeWASM.create(actor) - return rootProxy(state, true); -} - -function clone(doc) { - const state = doc[STATE].clone() - return rootProxy(state, true); -} - -function free(doc) { - return doc[STATE].free() -} - -function from(data, actor) { - let doc1 = init(actor) - let doc2 = change(doc1, (d) => Object.assign(d, data)) - return doc2 -} - -function change(doc, options, callback) { - if (callback === undefined) { - // FIXME implement options - callback = options - options = {} - } - if (typeof options === "string") { - options = { message: options } - } - if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { - throw new RangeError("must be the document root"); - } - if (doc[FROZEN] === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (!!doc[HEADS] === true) { - throw new RangeError("Attempting to change an out of date document"); - } - if (doc[READ_ONLY] === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const state = doc[STATE] - const heads = state.getHeads() - try { - doc[HEADS] = heads - doc[FROZEN] = true - let root = rootProxy(state); - callback(root) - if (state.pendingOps() === 0) { - doc[FROZEN] = false - doc[HEADS] = undefined - return doc - } else { - state.commit(options.message, options.time) - return rootProxy(state, true); - } - } catch (e) { - //console.log("ERROR: ",e) - doc[FROZEN] = false - doc[HEADS] = undefined - state.rollback() - throw e - } -} - -function emptyChange(doc, options) { - if (options === undefined) { - options = {} - } - if (typeof options === "string") { - options = { message: options } - } - - if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { - throw new RangeError("must be the document root"); - } - if (doc[FROZEN] === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (doc[READ_ONLY] === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - - const state = doc[STATE] - state.commit(options.message, options.time) - return rootProxy(state, true); -} - -function load(data, actor) { - const state = AutomergeWASM.load(data, actor) - return rootProxy(state, true); -} - -function save(doc) { - const state = doc[STATE] - return state.save() -} - -function merge(local, remote) { - if (local[HEADS] === true) { - throw new RangeError("Attempting to change an out of date document"); - } - const localState = local[STATE] - const heads = localState.getHeads() - const remoteState = remote[STATE] - const changes = localState.getChangesAdded(remoteState) - localState.applyChanges(changes) - local[HEADS] = heads - return rootProxy(localState, true) -} - -function getActorId(doc) { - const state = doc[STATE] - return state.getActorId() -} - -function conflictAt(context, objectId, prop) { - let values = context.getAll(objectId, prop) - if (values.length <= 1) { - return - } - let result = {} - for (const conflict of values) { - const datatype = conflict[0] - const value = conflict[1] - switch (datatype) { - case "map": - result[value] = mapProxy(context, value, [ prop ], true) - break; - case "list": - result[value] = listProxy(context, value, [ prop ], true) - break; - case "text": - result[value] = textProxy(context, value, [ prop ], true) - break; - //case "table": - //case "cursor": - case "str": - case "uint": - case "int": - case "f64": - case "boolean": - case "bytes": - case "null": - result[conflict[2]] = value - break; - case "counter": - result[conflict[2]] = new Counter(value) - break; - case "timestamp": - result[conflict[2]] = new Date(value) - break; - default: - throw RangeError(`datatype ${datatype} unimplemented`) - } - } - return result -} - -function getConflicts(doc, prop) { - const state = doc[STATE] - const objectId = doc[OBJECT_ID] - return conflictAt(state, objectId, prop) -} - -function getLastLocalChange(doc) { - const state = doc[STATE] - try { - return state.getLastLocalChange() - } catch (e) { - return - } -} - -function getObjectId(doc) { - return doc[OBJECT_ID] -} - -function getChanges(oldState, newState) { - const o = oldState[STATE] - const n = newState[STATE] - const heads = oldState[HEADS] - return n.getChanges(heads || o.getHeads()) -} - -function getAllChanges(doc) { - const state = doc[STATE] - return state.getChanges([]) -} - -function applyChanges(doc, changes) { - if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { - throw new RangeError("must be the document root"); - } - if (doc[FROZEN] === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (doc[READ_ONLY] === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const state = doc[STATE] - const heads = state.getHeads() - state.applyChanges(changes) - doc[HEADS] = heads - return [rootProxy(state, true)]; -} - -function getHistory(doc) { - const actor = getActorId(doc) - const history = getAllChanges(doc) - return history.map((change, index) => ({ - get change () { - return decodeChange(change) - }, - get snapshot () { - const [state] = applyChanges(init(), history.slice(0, index + 1)) - return state - } - }) - ) -} - -function equals() { - if (!isObject(val1) || !isObject(val2)) return val1 === val2 - const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() - if (keys1.length !== keys2.length) return false - for (let i = 0; i < keys1.length; i++) { - if (keys1[i] !== keys2[i]) return false - if (!equals(val1[keys1[i]], val2[keys2[i]])) return false - } - return true -} - -function encodeSyncMessage(msg) { - return AutomergeWASM.encodeSyncMessage(msg) -} - -function decodeSyncMessage(msg) { - return AutomergeWASM.decodeSyncMessage(msg) -} - -function encodeSyncState(state) { - return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state)) -} - -function decodeSyncState(state) { - return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state)) -} - -function generateSyncMessage(doc, inState) { - const state = doc[STATE] - const syncState = AutomergeWASM.importSyncState(inState) - const message = state.generateSyncMessage(syncState) - const outState = AutomergeWASM.exportSyncState(syncState) - return [ outState, message ] -} - -function receiveSyncMessage(doc, inState, message) { - const syncState = AutomergeWASM.importSyncState(inState) - if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { - throw new RangeError("must be the document root"); - } - if (doc[FROZEN] === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (!!doc[HEADS] === true) { - throw new RangeError("Attempting to change an out of date document"); - } - if (doc[READ_ONLY] === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const state = doc[STATE] - const heads = state.getHeads() - state.receiveSyncMessage(syncState, message) - const outState = AutomergeWASM.exportSyncState(syncState) - doc[HEADS] = heads - return [rootProxy(state, true), outState, null]; -} - -function initSyncState() { - return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState(change)) -} - -function encodeChange(change) { - return AutomergeWASM.encodeChange(change) -} - -function decodeChange(data) { - return AutomergeWASM.decodeChange(data) -} - -function encodeSyncMessage(change) { - return AutomergeWASM.encodeSyncMessage(change) -} - -function decodeSyncMessage(data) { - return AutomergeWASM.decodeSyncMessage(data) -} - -function getMissingDeps(doc, heads) { - const state = doc[STATE] - return state.getMissingDeps(heads) -} - -function getHeads(doc) { - const state = doc[STATE] - return doc[HEADS] || state.getHeads() -} - -function dump(doc) { - const state = doc[STATE] - state.dump() -} - -function toJS(doc) { - if (typeof doc === "object") { - if (doc instanceof Uint8Array) { - return doc - } - if (doc === null) { - return doc - } - if (doc instanceof Array) { - return doc.map((a) => toJS(a)) - } - if (doc instanceof Text) { - return doc.map((a) => toJS(a)) - } - let tmp = {} - for (index in doc) { - tmp[index] = toJS(doc[index]) - } - return tmp - } else { - return doc - } -} - -module.exports = { - init, from, change, emptyChange, clone, free, - load, save, merge, getChanges, getAllChanges, applyChanges, - getLastLocalChange, getObjectId, getActorId, getConflicts, - encodeChange, decodeChange, equals, getHistory, getHeads, uuid, - generateSyncMessage, receiveSyncMessage, initSyncState, - decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, - getMissingDeps, - dump, Text, Counter, Int, Uint, Float64, toJS, -} - -// depricated -// Frontend, setDefaultBackend, Backend - -// more... -/* -for (let name of ['getObjectId', 'getObjectById', - 'setActorId', - 'Text', 'Table', 'Counter', 'Observable' ]) { - module.exports[name] = Frontend[name] -} -*/ diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts new file mode 100644 index 00000000..1f86580e --- /dev/null +++ b/automerge-js/src/index.ts @@ -0,0 +1,496 @@ +import * as AutomergeWASM from "automerge-wasm" + +import { uuid } from './uuid' +export { uuid } from './uuid' + +import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" +import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" +import { Counter } from "./counter" +//@ts-ignore +import { Text } from "./text" +import { Int, Uint, Float64 } from "./numbers" +import { isObject } from "./common" + +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" + +export { Counter } from "./counter" +export { Int, Uint, Float64 } from "./numbers" +//@ts-ignore +export { Text } from "./text" + +function _state(doc: Doc) : Automerge { + let state = (doc)[STATE] + if (state == undefined) { + throw new RangeError("must be the document root") + } + return state +} + +function _frozen(doc: Doc) : boolean { + return (doc)[FROZEN] === true +} + +function _heads(doc: Doc) : Heads | undefined { + return (doc)[HEADS] +} + +function _obj(doc: Doc) : ObjID { + return (doc)[OBJECT_ID] +} + +function _readonly(doc: Doc) : boolean { + return (doc)[READ_ONLY] === true +} + +export function init(actor?: ActorId) : Doc{ + if (typeof actor !== "string") { + actor = undefined + } + const state = AutomergeWASM.create(actor) + return rootProxy(state, true); +} + +export function clone(doc: Doc) : Doc { + const state = _state(doc).clone() + return rootProxy(state, true); +} + +export function free(doc: Doc) { + return _state(doc).free() +} + +export function from(initialState: T | Doc, actor?: ActorId): Doc { + return change(init(actor), (d) => Object.assign(d, initialState)) +} + +export function change>(doc: D, options: ChangeOptions | ChangeFn, callback?: ChangeFn): D { + + if (typeof options === 'function') { + callback = options + options = {} + } + + if (typeof options === "string") { + options = { message: options } + } + + if (typeof callback !== "function") { + throw new RangeError("invalid change function"); + } + + if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") { + throw new RangeError("must be the document root"); + } + if (_frozen(doc) === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (!!_heads(doc) === true) { + throw new RangeError("Attempting to change an out of date document"); + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const state = _state(doc) + const heads = state.getHeads() + try { + //@ts-ignore + doc[HEADS] = heads + //Object.defineProperty(doc, HEADS, { value: heads, configurable: true, writable: true }) + //@ts-ignore + doc[FROZEN] = true + let root = rootProxy(state); + callback(root) + if (state.pendingOps() === 0) { + //@ts-ignore + doc[FROZEN] = false + //@ts-ignore + doc[HEADS] = undefined + return doc + } else { + state.commit(options.message, options.time) + return rootProxy(state, true); + } + } catch (e) { + //console.log("ERROR: ",e) + //@ts-ignore + doc[FROZEN] = false + //@ts-ignore + doc[HEADS] = undefined + state.rollback() + throw e + } +} + +export function emptyChange(doc: Doc, options: ChangeOptions) { + if (options === undefined) { + options = {} + } + if (typeof options === "string") { + options = { message: options } + } + + if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") { + throw new RangeError("must be the document root"); + } + if (_frozen(doc) === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + + const state = _state(doc) + state.commit(options.message, options.time) + return rootProxy(state, true); +} + +export function load(data: Uint8Array, actor: ActorId) : Doc { + const state = AutomergeWASM.load(data, actor) + return rootProxy(state, true); +} + +export function save(doc: Doc) : Uint8Array { + const state = _state(doc) + return state.save() +} + +export function merge(local: Doc, remote: Doc) : Doc { + if (!!_heads(local) === true) { + throw new RangeError("Attempting to change an out of date document"); + } + const localState = _state(local) + const heads = localState.getHeads() + const remoteState = _state(remote) + const changes = localState.getChangesAdded(remoteState) + localState.applyChanges(changes) + //@ts-ignore + local[HEADS] = heads + return rootProxy(localState, true) +} + +export function getActorId(doc: Doc) : ActorId { + const state = _state(doc) + return state.getActorId() +} + +function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { + let values = context.getAll(objectId, prop) + if (values.length <= 1) { + return + } + let result = {} + for (const conflict of values) { + const datatype = conflict[0] + const value = conflict[1] + switch (datatype) { + case "map": + //@ts-ignore + result[value] = mapProxy(context, value, [ prop ], true) + break; + case "list": + //@ts-ignore + result[value] = listProxy(context, value, [ prop ], true) + break; + case "text": + //@ts-ignore + result[value] = textProxy(context, value, [ prop ], true) + break; + //case "table": + //case "cursor": + case "str": + case "uint": + case "int": + case "f64": + case "boolean": + case "bytes": + case "null": + //@ts-ignore + result[conflict[2]] = value + break; + case "counter": + //@ts-ignore + result[conflict[2]] = new Counter(value) + break; + case "timestamp": + //@ts-ignore + result[conflict[2]] = new Date(value) + break; + default: + throw RangeError(`datatype ${datatype} unimplemented`) + } + } + return result +} + +export function getConflicts(doc: Doc, prop: Prop) : any { + const state = _state(doc) + const objectId = _obj(doc) + return conflictAt(state, objectId, prop) +} + +export function getLastLocalChange(doc: Doc) : Change | undefined { + const state = _state(doc) + try { + return state.getLastLocalChange() + } catch (e) { + return + } +} + +export function getObjectId(doc: Doc) : ObjID { + return _obj(doc) +} + +export function getChanges(oldState: Doc, newState: Doc) : Change[] { + const o = _state(oldState) + const n = _state(newState) + const heads = _heads(oldState) + return n.getChanges(heads || o.getHeads()) +} + +export function getAllChanges(doc: Doc) : Change[] { + const state = _state(doc) + return state.getChanges([]) +} + +export function applyChanges(doc: Doc, changes: Change[]) : [Doc] { + if (doc === undefined || _obj(doc) !== "_root") { + throw new RangeError("must be the document root"); + } + if (_frozen(doc) === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const state = _state(doc) + const heads = state.getHeads() + state.applyChanges(changes) + //@ts-ignore + doc[HEADS] = heads + return [rootProxy(state, true)]; +} + +export function getHistory(doc: Doc) : State[] { + const actor = getActorId(doc) + const history = getAllChanges(doc) + return history.map((change, index) => ({ + get change () { + return decodeChange(change) + }, + get snapshot () { + const [state] = applyChanges(init(), history.slice(0, index + 1)) + return state + } + }) + ) +} + +// FIXME : no tests +export function equals(val1: any, val2: any) : boolean { + if (!isObject(val1) || !isObject(val2)) return val1 === val2 + const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() + if (keys1.length !== keys2.length) return false + for (let i = 0; i < keys1.length; i++) { + if (keys1[i] !== keys2[i]) return false + if (!equals(val1[keys1[i]], val2[keys2[i]])) return false + } + return true +} + +export function encodeSyncState(state: SyncState) : Uint8Array { + return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state)) +} + +export function decodeSyncState(state: Uint8Array) : SyncState { + return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state)) +} + +export function generateSyncMessage(doc: Doc, inState: SyncState) : [ SyncState, SyncMessage | null ] { + const state = _state(doc) + const syncState = AutomergeWASM.importSyncState(inState) + const message = state.generateSyncMessage(syncState) + const outState = AutomergeWASM.exportSyncState(syncState) + return [ outState, message ] +} + +export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage) : [ Doc, SyncState, null ] { + const syncState = AutomergeWASM.importSyncState(inState) + if (doc === undefined || _obj(doc) !== "_root") { + throw new RangeError("must be the document root"); + } + if (_frozen(doc) === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (!!_heads(doc) === true) { + throw new RangeError("Attempting to change an out of date document"); + } + if (_readonly(doc) === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const state = _state(doc) + const heads = state.getHeads() + state.receiveSyncMessage(syncState, message) + //@ts-ignore + doc[HEADS] = heads; + const outState = AutomergeWASM.exportSyncState(syncState) + return [rootProxy(state, true), outState, null]; +} + +export function initSyncState() : SyncState { + return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState()) +} + +export function encodeChange(change: DecodedChange) : Change { + return AutomergeWASM.encodeChange(change) +} + +export function decodeChange(data: Change) : DecodedChange { + return AutomergeWASM.decodeChange(data) +} + +export function encodeSyncMessage(message: DecodedSyncMessage) : SyncMessage { + return AutomergeWASM.encodeSyncMessage(message) +} + +export function decodeSyncMessage(message: SyncMessage) : DecodedSyncMessage { + return AutomergeWASM.decodeSyncMessage(message) +} + +export function getMissingDeps(doc: Doc, heads: Heads) : Heads { + const state = _state(doc) + return state.getMissingDeps(heads) +} + +export function getHeads(doc: Doc) : Heads { + const state = _state(doc) + return _heads(doc) || state.getHeads() +} + +export function dump(doc: Doc) { + const state = _state(doc) + state.dump() +} + +export function toJS(doc: any) : any { + if (typeof doc === "object") { + if (doc instanceof Uint8Array) { + return doc + } + if (doc === null) { + return doc + } + if (doc instanceof Array) { + return doc.map((a) => toJS(a)) + } + if (doc instanceof Text) { + //@ts-ignore + return doc.map((a: any) => toJS(a)) + } + let tmp : any = {} + for (let index in doc) { + tmp[index] = toJS(doc[index]) + } + return tmp + } else { + return doc + } +} + +type ChangeOptions = + | string // = message + | { + message?: string + time?: number + } + +type Doc = FreezeObject + +/** + * The argument pased to the callback of a `change` function is a mutable proxy of the original + * type. `Proxy` is the inverse of `Doc`: `Proxy>` is `T`, and `Doc>` is `D`. + */ +type Proxy = D extends Doc ? T : never + +type ChangeFn = (doc: T) => void + +interface State { + change: DecodedChange + snapshot: T +} + +// custom CRDT types + +/* + class TableRow { + readonly id: UUID + } + + class Table { + constructor() + add(item: T): UUID + byId(id: UUID): T & TableRow + count: number + ids: UUID[] + remove(id: UUID): void + rows: (T & TableRow)[] + } +*/ + + class List extends Array { + insertAt?(index: number, ...args: T[]): List + deleteAt?(index: number, numDelete?: number): List + } + +/* + + class Text extends List { + constructor(text?: string | string[]) + get(index: number): string + toSpans(): (string | T)[] + } + + // Note that until https://github.com/Microsoft/TypeScript/issues/2361 is addressed, we + // can't treat a Counter like a literal number without force-casting it as a number. + // This won't compile: + // `assert.strictEqual(c + 10, 13) // Operator '+' cannot be applied to types 'Counter' and '10'.ts(2365)` + // But this will: + // `assert.strictEqual(c as unknown as number + 10, 13)` + class Counter extends Number { + constructor(value?: number) + increment(delta?: number): void + decrement(delta?: number): void + toString(): string + valueOf(): number + value: number + } + + class Int { constructor(value: number) } + class Uint { constructor(value: number) } + class Float64 { constructor(value: number) } + +*/ + + // Readonly variants + + //type ReadonlyTable = ReadonlyArray & Table + type ReadonlyList = ReadonlyArray & List + type ReadonlyText = ReadonlyList & Text + +// prettier-ignore +type Freeze = + T extends Function ? T + : T extends Text ? ReadonlyText +// : T extends Table ? FreezeTable + : T extends List ? FreezeList + : T extends Array ? FreezeArray + : T extends Map ? FreezeMap + : T extends string & infer O ? string & O + : FreezeObject + +//interface FreezeTable extends ReadonlyTable> {} +interface FreezeList extends ReadonlyList> {} +interface FreezeArray extends ReadonlyArray> {} +interface FreezeMap extends ReadonlyMap, Freeze> {} +type FreezeObject = { readonly [P in keyof T]: Freeze } diff --git a/automerge-js/src/numbers.js b/automerge-js/src/numbers.ts similarity index 76% rename from automerge-js/src/numbers.js rename to automerge-js/src/numbers.ts index 1ee22dee..dbc26669 100644 --- a/automerge-js/src/numbers.js +++ b/automerge-js/src/numbers.ts @@ -1,7 +1,9 @@ // Convience classes to allow users to stricly specify the number type they want -class Int { - constructor(value) { +export class Int { + value: number; + + constructor(value: number) { if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) { throw new RangeError(`Value ${value} cannot be a uint`) } @@ -10,8 +12,10 @@ class Int { } } -class Uint { - constructor(value) { +export class Uint { + value: number; + + constructor(value: number) { if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) { throw new RangeError(`Value ${value} cannot be a uint`) } @@ -20,8 +24,10 @@ class Uint { } } -class Float64 { - constructor(value) { +export class Float64 { + value: number; + + constructor(value: number) { if (typeof value !== 'number') { throw new RangeError(`Value ${value} cannot be a float64`) } @@ -30,4 +36,3 @@ class Float64 { } } -module.exports = { Int, Uint, Float64 } diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.ts similarity index 90% rename from automerge-js/src/proxies.js rename to automerge-js/src/proxies.ts index 3bf2fbd2..4e91b2e2 100644 --- a/automerge-js/src/proxies.js +++ b/automerge-js/src/proxies.ts @@ -1,11 +1,15 @@ -const AutomergeWASM = require("automerge-wasm") -const { Int, Uint, Float64 } = require("./numbers"); -const { Counter, getWriteableCounter } = require("./counter"); -const { Text } = require("./text"); -const { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } = require("./constants") +import AutomergeWASM from "automerge-wasm" +import { Automerge, Heads, ObjID } from "automerge-wasm" +// @ts-ignore +import { Int, Uint, Float64 } from "./numbers" +// @ts-ignore +import { Counter, getWriteableCounter } from "./counter" +// @ts-ignore +import { Text } from "./text" +import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } from "./constants" -function parseListIndex(key) { +export function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) if (typeof key !== 'number') { // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key)) @@ -17,7 +21,7 @@ function parseListIndex(key) { return key } -function valueAt(target, prop) { +function valueAt(target, prop) : any { const { context, objectId, path, readonly, heads} = target let value = context.get(objectId, prop, heads) if (value === undefined) { @@ -96,8 +100,8 @@ function import_value(value) { } } -const MapHandler = { - get (target, key) { +export const MapHandler = { + get (target, key) : any { const { context, objectId, path, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (key === OBJECT_ID) return objectId @@ -119,11 +123,11 @@ const MapHandler = { } if (key === FROZEN) { target.frozen = val - return + return true } if (key === HEADS) { target.heads = val - return + return true } let [ value, datatype ] = import_value(val) if (frozen) { @@ -192,10 +196,11 @@ const MapHandler = { } -const ListHandler = { +export const ListHandler = { get (target, index) { const {context, objectId, path, readonly, frozen, heads } = target index = parseListIndex(index) + // @ts-ignore if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (index === OBJECT_ID) return objectId @@ -231,11 +236,11 @@ const ListHandler = { } if (index === FROZEN) { target.frozen = val - return + return true } if (index === HEADS) { target.heads = val - return + return true } if (typeof index == "string") { throw new RangeError('list index must be a number') @@ -322,9 +327,9 @@ const ListHandler = { }, getPrototypeOf(target) { return Object.getPrototypeOf([]) }, - ownKeys (target) { + ownKeys (target) : string[] { const {context, objectId, heads } = target - let keys = [] + let keys : string[] = [] // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array // but not uncommenting it causes for (i in list) {} to not enumerate values properly //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } @@ -333,12 +338,13 @@ const ListHandler = { } } -const TextHandler = Object.assign({}, ListHandler, { +export const TextHandler = Object.assign({}, ListHandler, { get (target, index) { // FIXME this is a one line change from ListHandler.get() const {context, objectId, path, readonly, frozen, heads } = target index = parseListIndex(index) if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } + // @ts-ignore if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } if (index === OBJECT_ID) return objectId if (index === READ_ONLY) return readonly @@ -368,24 +374,24 @@ const TextHandler = Object.assign({}, ListHandler, { }, }) -function mapProxy(context, objectId, path, readonly, heads) { +export function mapProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) } -function listProxy(context, objectId, path, readonly, heads) { +export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { let target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, ListHandler) } -function textProxy(context, objectId, path, readonly, heads) { +export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { let target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, TextHandler) } -function rootProxy(context, readonly) { - return mapProxy(context, "_root", [], readonly) +export function rootProxy(context: Automerge, readonly?: boolean) : any { + return mapProxy(context, "_root", [], !!readonly) } function listMethods(target) { @@ -400,18 +406,20 @@ function listMethods(target) { return this }, - fill(val, start, end) { - // FIXME - let list = context.getObject(objectId) - let [value, datatype] = valueAt(target, index) - for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) { - context.put(objectId, index, value, datatype) + fill(val: any, start: number, end: number) { + // FIXME needs tests + const [value, datatype] = import_value(val) + start = parseListIndex(start || 0) + end = parseListIndex(end || context.length(objectId)) + for (let i = start; i < end; i++) { + context.put(objectId, i, value, datatype) } return this }, indexOf(o, start = 0) { // FIXME + /* const id = o[OBJECT_ID] if (id) { const list = context.getObject(objectId) @@ -424,6 +432,7 @@ function listMethods(target) { } else { return context.indexOf(objectId, o, start) } + */ }, insertAt(index, ...values) { @@ -468,7 +477,7 @@ function listMethods(target) { if (readonly) { throw new RangeError("Sequence object cannot be modified outside of a change block") } - let result = [] + let result : any = [] for (let i = 0; i < del; i++) { let value = valueAt(target, index) result.push(value) @@ -527,7 +536,7 @@ function listMethods(target) { let len = context.length(objectId, heads) const iterator = { next: () => { - let value = undefined + let value : undefined | number = undefined if (i < len) { value = i; i++ } return { value, done: true } } @@ -557,7 +566,7 @@ function listMethods(target) { 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString', 'toString']) { methods[method] = (...args) => { - const list = [] + const list : any = [] while (true) { let value = valueAt(target, list.length) if (value == undefined) { @@ -575,7 +584,7 @@ function listMethods(target) { function textMethods(target) { const {context, objectId, path, readonly, frozen, heads } = target - const methods = { + const methods : any = { set (index, value) { return this[index] = value }, @@ -585,8 +594,8 @@ function textMethods(target) { toString () { return context.text(objectId, heads).replace(//g,'') }, - toSpans () { - let spans = [] + toSpans () : any[] { + let spans : any[] = [] let chars = '' let length = this.length for (let i = 0; i < length; i++) { @@ -614,4 +623,4 @@ function textMethods(target) { } -module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler } +//module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler } diff --git a/automerge-js/src/sync.js b/automerge-js/src/sync.ts similarity index 94% rename from automerge-js/src/sync.js rename to automerge-js/src/sync.ts index 2ae3f4e4..fd40e343 100644 --- a/automerge-js/src/sync.js +++ b/automerge-js/src/sync.ts @@ -16,11 +16,10 @@ * last sync to disk), and we fall back to sending the entire document in this case. */ -//const Backend = require('./backend') -const Backend = {} //require('./backend') -const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding') -const { decodeChangeMeta } = require('./columnar') -const { copyObject } = require('../src/common') +const Backend : any = {} //require('./backend') +import { hexStringToBytes, bytesToHexString, Encoder, Decoder } from './encoding' +import { decodeChangeMeta } from './columnar' +import { copyObject } from './common' const HASH_SIZE = 32 // 256 bits = 32 bytes const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification @@ -36,7 +35,12 @@ const BITS_PER_ENTRY = 10, NUM_PROBES = 7 * over a network. The entries that are added are assumed to already be SHA-256 hashes, * so this implementation does not perform its own hashing. */ -class BloomFilter { +export class BloomFilter { + numEntries: number; + numBitsPerEntry: number; + numProbes: number; + bits: Uint8Array; + constructor (arg) { if (Array.isArray(arg)) { // arg is an array of SHA256 hashes in hexadecimal encoding @@ -143,8 +147,8 @@ function encodeHashes(encoder, hashes) { * Decodes a byte array in the format returned by encodeHashes(), and returns its content as an * array of hex strings. */ -function decodeHashes(decoder) { - let length = decoder.readUint32(), hashes = [] +function decodeHashes(decoder) : string[] { + let length = decoder.readUint32(), hashes : string[] = [] for (let i = 0; i < length; i++) { hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) } @@ -155,7 +159,7 @@ function decodeHashes(decoder) { * Takes a sync message of the form `{heads, need, have, changes}` and encodes it as a byte array for * transmission. */ -function encodeSyncMessage(message) { +export function encodeSyncMessage(message) { const encoder = new Encoder() encoder.appendByte(MESSAGE_TYPE_SYNC) encodeHashes(encoder, message.heads) @@ -175,7 +179,7 @@ function encodeSyncMessage(message) { /** * Takes a binary-encoded sync message and decodes it into the form `{heads, need, have, changes}`. */ -function decodeSyncMessage(bytes) { +export function decodeSyncMessage(bytes) { const decoder = new Decoder(bytes) const messageType = decoder.readByte() if (messageType !== MESSAGE_TYPE_SYNC) { @@ -187,12 +191,14 @@ function decodeSyncMessage(bytes) { let message = {heads, need, have: [], changes: []} for (let i = 0; i < haveCount; i++) { const lastSync = decodeHashes(decoder) - const bloom = decoder.readPrefixedBytes(decoder) + const bloom = decoder.readPrefixedBytes() + // @ts-ignore message.have.push({lastSync, bloom}) } const changeCount = decoder.readUint32() for (let i = 0; i < changeCount; i++) { const change = decoder.readPrefixedBytes() + // @ts-ignore message.changes.push(change) } // Ignore any trailing bytes -- they can be used for extensions by future versions of the protocol @@ -204,7 +210,7 @@ function decodeSyncMessage(bytes) { * an application restart or disconnect and reconnect. The ephemeral parts of the state that should * be cleared on reconnect are not encoded. */ -function encodeSyncState(syncState) { +export function encodeSyncState(syncState) { const encoder = new Encoder() encoder.appendByte(PEER_STATE_TYPE) encodeHashes(encoder, syncState.sharedHeads) @@ -215,7 +221,7 @@ function encodeSyncState(syncState) { * Takes a persisted peer state as encoded by `encodeSyncState` and decodes it into a SyncState * object. The parts of the peer state that were not encoded are initialised with default values. */ -function decodeSyncState(bytes) { +export function decodeSyncState(bytes) { const decoder = new Decoder(bytes) const recordType = decoder.readByte() if (recordType !== PEER_STATE_TYPE) { @@ -249,7 +255,7 @@ function getChangesToSend(backend, have, need) { return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) } - let lastSyncHashes = {}, bloomFilters = [] + let lastSyncHashes : any = {}, bloomFilters : BloomFilter[] = [] for (let h of have) { for (let hash of h.lastSync) lastSyncHashes[hash] = true bloomFilters.push(new BloomFilter(h.bloom)) @@ -259,7 +265,7 @@ function getChangesToSend(backend, have, need) { const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) .map(change => decodeChangeMeta(change, true)) - let changeHashes = {}, dependents = {}, hashesToSend = {} + let changeHashes : any = {}, dependents : any = {}, hashesToSend : any = {} for (let change of changes) { changeHashes[change.hash] = true @@ -278,7 +284,7 @@ function getChangesToSend(backend, have, need) { // Include any changes that depend on a Bloom-negative change let stack = Object.keys(hashesToSend) while (stack.length > 0) { - const hash = stack.pop() + const hash : any = stack.pop() if (dependents[hash]) { for (let dep of dependents[hash]) { if (!hashesToSend[dep]) { @@ -290,7 +296,7 @@ function getChangesToSend(backend, have, need) { } // Include any explicitly requested changes - let changesToSend = [] + let changesToSend : any = [] for (let hash of need) { hashesToSend[hash] = true if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? @@ -306,7 +312,7 @@ function getChangesToSend(backend, have, need) { return changesToSend } -function initSyncState() { +export function initSyncState() { return { sharedHeads: [], lastSentHeads: [], @@ -325,7 +331,7 @@ function compareArrays(a, b) { * Given a backend and what we believe to be the state of our peer, generate a message which tells * them about we have and includes any changes we believe they need */ -function generateSyncMessage(backend, syncState) { +export function generateSyncMessage(backend, syncState) { if (!backend) { throw new Error("generateSyncMessage called with no Automerge document") } @@ -345,7 +351,7 @@ function generateSyncMessage(backend, syncState) { // because they (intentionally) only sent us a subset of changes. In case 1, we leave the `have` // field of the message empty because we just want to fill in the missing dependencies for now. // In case 2, or if ourNeed is empty, we send a Bloom filter to request any unsent changes. - let ourHave = [] + let ourHave : any = [] if (!theirHeads || ourNeed.every(hash => theirHeads.includes(hash))) { ourHave = [makeBloomFilter(backend, sharedHeads)] } @@ -418,7 +424,7 @@ function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { * Given a backend, a message message and the state of our peer, apply any changes, update what * we believe about the peer, and (if there were applied changes) produce a patch for the frontend */ -function receiveSyncMessage(backend, oldSyncState, binaryMessage) { +export function receiveSyncMessage(backend, oldSyncState, binaryMessage) { if (!backend) { throw new Error("generateSyncMessage called with no Automerge document") } diff --git a/automerge-js/src/text.js b/automerge-js/src/text.ts similarity index 82% rename from automerge-js/src/text.js rename to automerge-js/src/text.ts index a7f442fe..02aac54d 100644 --- a/automerge-js/src/text.js +++ b/automerge-js/src/text.ts @@ -1,39 +1,37 @@ -const { OBJECT_ID } = require('./constants') -const { isObject } = require('../src/common') +import { OBJECT_ID } from './constants' +import { isObject } from '../src/common' -class Text { - constructor (text) { - const instance = Object.create(Text.prototype) +export class Text { + elems: any[] + + constructor (text?: string | string[]) { + //const instance = Object.create(Text.prototype) if (typeof text === 'string') { - instance.elems = [...text] + this.elems = [...text] } else if (Array.isArray(text)) { - instance.elems = text + this.elems = text } else if (text === undefined) { - instance.elems = [] + this.elems = [] } else { throw new TypeError(`Unsupported initial value for Text: ${text}`) } - return instance } - get length () { + get length () : number { return this.elems.length } - get (index) { + get (index) : any { return this.elems[index] } - getElemId (index) { - return undefined - } - /** * Iterates over the text elements character by character, including any * inline objects. */ [Symbol.iterator] () { - let elems = this.elems, index = -1 + const elems = this.elems + let index = -1 return { next () { index += 1 @@ -50,7 +48,7 @@ class Text { * Returns the content of the Text object as a simple string, ignoring any * non-character elements. */ - toString() { + toString() : string { // Concatting to a string is faster than creating an array and then // .join()ing for small (<100KB) arrays. // https://jsperf.com/join-vs-loop-w-type-test @@ -68,8 +66,8 @@ class Text { * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans: * => ['ab', {x: 3}, 'cd'] */ - toSpans() { - let spans = [] + toSpans() : any[] { + const spans : any = [] let chars = '' for (const elem of this.elems) { if (typeof elem === 'string') { @@ -92,21 +90,21 @@ class Text { * Returns the content of the Text object as a simple string, so that the * JSON serialization of an Automerge document represents text nicely. */ - toJSON() { + toJSON() : string { return this.toString() } /** * Updates the list item at position `index` to a new value `value`. */ - set (index, value) { + set (index: number, value: any) { this.elems[index] = value } /** * Inserts new list items `values` starting at position `index`. */ - insertAt(index, ...values) { + insertAt(index: number, ...values) { this.elems.splice(index, 0, ... values) } @@ -129,4 +127,3 @@ for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', } } -module.exports = { Text } diff --git a/automerge-js/src/uuid.js b/automerge-js/src/uuid.js deleted file mode 100644 index 42a8cc6e..00000000 --- a/automerge-js/src/uuid.js +++ /dev/null @@ -1,16 +0,0 @@ -const { v4: uuid } = require('uuid') - -function defaultFactory() { - return uuid().replace(/-/g, '') -} - -let factory = defaultFactory - -function makeUuid() { - return factory() -} - -makeUuid.setFactory = newFactory => { factory = newFactory } -makeUuid.reset = () => { factory = defaultFactory } - -module.exports = makeUuid diff --git a/automerge-js/src/uuid.ts b/automerge-js/src/uuid.ts new file mode 100644 index 00000000..bc6c4bb1 --- /dev/null +++ b/automerge-js/src/uuid.ts @@ -0,0 +1,16 @@ +import { v4 } from 'uuid' + +function defaultFactory() { + return v4().replace(/-/g, '') +} + +let factory = defaultFactory + +export function uuid() { + return factory() +} + +// @ts-ignore +uuid.setFactory = newFactory => { factory = newFactory } +// @ts-ignore +uuid.reset = () => { factory = defaultFactory } diff --git a/automerge-js/test/basic_test.js b/automerge-js/test/basic_test.ts similarity index 98% rename from automerge-js/test/basic_test.js rename to automerge-js/test/basic_test.ts index 68d2fecf..5aff21b0 100644 --- a/automerge-js/test/basic_test.js +++ b/automerge-js/test/basic_test.ts @@ -1,7 +1,6 @@ - -const assert = require('assert') -const util = require('util') -const Automerge = require('..') +import * as assert from 'assert' +import * as util from 'util' +import * as Automerge from '../src' describe('Automerge', () => { describe('basics', () => { diff --git a/automerge-js/test/columnar_test.js b/automerge-js/test/columnar_test.ts similarity index 96% rename from automerge-js/test/columnar_test.js rename to automerge-js/test/columnar_test.ts index 8cbe1482..ca670377 100644 --- a/automerge-js/test/columnar_test.js +++ b/automerge-js/test/columnar_test.ts @@ -1,7 +1,7 @@ -const assert = require('assert') -const { checkEncoded } = require('./helpers') -const Automerge = require('..') -const { encodeChange, decodeChange } = Automerge +import * as assert from 'assert' +import { checkEncoded } from './helpers' +import * as Automerge from '../src' +import { encodeChange, decodeChange } from '../src' describe('change encoding', () => { it('should encode text edits', () => { diff --git a/automerge-js/test/helpers.js b/automerge-js/test/helpers.ts similarity index 93% rename from automerge-js/test/helpers.js rename to automerge-js/test/helpers.ts index c3fc52ae..76cae7d6 100644 --- a/automerge-js/test/helpers.js +++ b/automerge-js/test/helpers.ts @@ -1,5 +1,5 @@ -const assert = require('assert') -const { Encoder } = require('../src/encoding') +import * as assert from 'assert' +import { Encoder } from '../src/encoding' // Assertion that succeeds if the first argument deepStrictEquals at least one of the // subsequent arguments (but we don't care which one) diff --git a/automerge-js/test/legacy_tests.js b/automerge-js/test/legacy_tests.ts similarity index 99% rename from automerge-js/test/legacy_tests.js rename to automerge-js/test/legacy_tests.ts index 76348d06..4034ca25 100644 --- a/automerge-js/test/legacy_tests.js +++ b/automerge-js/test/legacy_tests.ts @@ -1,9 +1,7 @@ -const assert = require('assert') -//const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge') -const Automerge = require('../src') -const { assertEqualsOneOf } = require('./helpers') -const { decodeChange } = require('../src/columnar') -//const { decodeChange } = Automerge +import * as assert from 'assert' +import * as Automerge from '../src' +import { assertEqualsOneOf } from './helpers' +import { decodeChange } from '../src/columnar' const UUID_PATTERN = /^[0-9a-f]{32}$/ const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/ @@ -810,11 +808,12 @@ describe('Automerge', () => { }) describe('concurrent use', () => { - let s1, s2, s3 + let s1, s2, s3, s4 beforeEach(() => { s1 = Automerge.init() s2 = Automerge.init() s3 = Automerge.init() + s4 = Automerge.init() }) it('should merge concurrent updates of different properties', () => { diff --git a/automerge-js/test/sync_test.js b/automerge-js/test/sync_test.ts similarity index 99% rename from automerge-js/test/sync_test.js rename to automerge-js/test/sync_test.ts index 86c3b3fd..c7f8015b 100644 --- a/automerge-js/test/sync_test.js +++ b/automerge-js/test/sync_test.ts @@ -1,8 +1,8 @@ -const assert = require('assert') -const Automerge = require('..'); -const { BloomFilter } = require('../src/sync') -const { decodeChangeMeta } = require('../src/columnar') -const { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } = Automerge +import * as assert from 'assert' +import * as Automerge from '../src' +import { BloomFilter } from '../src/sync' +import { decodeChangeMeta } from '../src/columnar' +import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" function inspect(a) { const util = require("util"); @@ -240,6 +240,7 @@ describe('Data sync protocol', () => { it('should assume sent changes were recieved until we hear otherwise', () => { let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') let s1 = initSyncState(), message = null + let s2 n1 = Automerge.change(n1, {time: 0}, doc => doc.items = []) ;[n1, n2, s1, s2 ] = sync(n1, n2) diff --git a/automerge-js/test/text_test.js b/automerge-js/test/text_test.ts similarity index 99% rename from automerge-js/test/text_test.js rename to automerge-js/test/text_test.ts index 57e8884e..8dbfc93c 100644 --- a/automerge-js/test/text_test.js +++ b/automerge-js/test/text_test.ts @@ -1,6 +1,6 @@ -const assert = require('assert') -const Automerge = require('..') -const { assertEqualsOneOf } = require('./helpers') +import * as assert from 'assert' +import * as Automerge from '../src' +import { assertEqualsOneOf } from './helpers' function attributeStateToAttributes(accumulatedAttributes) { const attributes = {} diff --git a/automerge-js/test/uuid_test.js b/automerge-js/test/uuid_test.ts similarity index 89% rename from automerge-js/test/uuid_test.js rename to automerge-js/test/uuid_test.ts index a0f83df1..4182a8c4 100644 --- a/automerge-js/test/uuid_test.js +++ b/automerge-js/test/uuid_test.ts @@ -1,5 +1,5 @@ -const assert = require('assert') -const Automerge = require('..') +import * as assert from 'assert' +import * as Automerge from '../src' const uuid = Automerge.uuid diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json new file mode 100644 index 00000000..987f9d37 --- /dev/null +++ b/automerge-js/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "noImplicitAny": false, + "strict": true, + "allowJs": false, + "baseUrl": ".", + "lib": ["dom", "esnext.asynciterable", "es2017", "es2016", "es2015"], + "module": "commonjs", + "moduleResolution": "node", + "target": "es2016", + "skipLibCheck": true, + "outDir": "./dist" + }, + "include": [ "src/**/*" ], + "exclude": ["dist/**/*"] +} diff --git a/automerge-js/tslint.json b/automerge-js/tslint.json new file mode 100644 index 00000000..f7bb7a71 --- /dev/null +++ b/automerge-js/tslint.json @@ -0,0 +1,3 @@ +{ + "extends": "tslint:recommended" +} diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index bf23948f..47f32deb 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -89,6 +89,8 @@ export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; export function encodeSyncState(state: SyncState): Uint8Array; export function decodeSyncState(data: Uint8Array): SyncState; +export function exportSyncState(state: SyncState): JsSyncState; +export function importSyncState(state: JsSyncState): SyncState; export class Automerge { // change state @@ -153,6 +155,9 @@ export class Automerge { toJS(): any; } +export class JsSyncState { +} + export class SyncState { free(): void; clone(): SyncState; From 4f898b67b3102df2962c99938e68317032d0e2b2 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 17 May 2022 16:53:17 -0400 Subject: [PATCH 02/17] able to build npm package --- automerge-js/.gitignore | 1 + automerge-js/LICENSE | 10 ++++++++ automerge-js/README.md | 8 ++++++ automerge-js/config/cjs.json | 8 ++++++ automerge-js/config/mjs.json | 8 ++++++ automerge-js/config/types.json | 10 ++++++++ automerge-js/package.json | 47 +++++++++++++++++++++++++++++----- automerge-js/src/index.ts | 14 +++++----- automerge-js/tsconfig.json | 34 ++++++++++++++---------- automerge-wasm/web-index.js | 2 ++ 10 files changed, 116 insertions(+), 26 deletions(-) create mode 100644 automerge-js/LICENSE create mode 100644 automerge-js/README.md create mode 100644 automerge-js/config/cjs.json create mode 100644 automerge-js/config/mjs.json create mode 100644 automerge-js/config/types.json diff --git a/automerge-js/.gitignore b/automerge-js/.gitignore index 05065cf0..cfe564d7 100644 --- a/automerge-js/.gitignore +++ b/automerge-js/.gitignore @@ -1,3 +1,4 @@ /node_modules /yarn.lock dist +index.d.ts diff --git a/automerge-js/LICENSE b/automerge-js/LICENSE new file mode 100644 index 00000000..63b21502 --- /dev/null +++ b/automerge-js/LICENSE @@ -0,0 +1,10 @@ +MIT License + +Copyright 2022, Ink & Switch LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/automerge-js/README.md b/automerge-js/README.md new file mode 100644 index 00000000..3875e2b1 --- /dev/null +++ b/automerge-js/README.md @@ -0,0 +1,8 @@ + +## Todo + +1. write a readme +1. final name for package - to distinguish it from the old one +1. get a index.d.ts you like +1. publish package + diff --git a/automerge-js/config/cjs.json b/automerge-js/config/cjs.json new file mode 100644 index 00000000..890a0422 --- /dev/null +++ b/automerge-js/config/cjs.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "target": "es2016", + "module": "commonjs", + "outDir": "../dist/cjs" + } +} diff --git a/automerge-js/config/mjs.json b/automerge-js/config/mjs.json new file mode 100644 index 00000000..8f964400 --- /dev/null +++ b/automerge-js/config/mjs.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "target": "es6", + "module": "es6", + "outDir": "../dist/mjs" + } +} diff --git a/automerge-js/config/types.json b/automerge-js/config/types.json new file mode 100644 index 00000000..3e7cde18 --- /dev/null +++ b/automerge-js/config/types.json @@ -0,0 +1,10 @@ + +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "declaration": true, + "emitDeclarationOnly": true, + "outFile": "../index.d.ts" + }, + "include": [ "../src/index.ts" ] +} diff --git a/automerge-js/package.json b/automerge-js/package.json index 4b3b2b55..2bdafd6b 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -1,16 +1,51 @@ { "name": "automerge-js", + "collaborators": [ + "Orion Henry ", + "Martin Kleppmann" + ], "version": "0.1.0", - "main": "src/index.js", + "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", + "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", + "repository": "github:automerge/automerge-rs", + "files": [ + "README.md", + "LICENSE", + "package.json", + "index.d.ts", + "package.json", + "index.d.ts", + "dist/mjs/constants.js", + "dist/mjs/numbers.js", + "dist/mjs/sync.js", + "dist/mjs/index.js", + "dist/mjs/encoding.js", + "dist/mjs/columnar.js", + "dist/mjs/uuid.js", + "dist/mjs/counter.js", + "dist/mjs/common.js", + "dist/mjs/text.js", + "dist/mjs/proxies.js", + "dist/cjs/constants.js", + "dist/cjs/numbers.js", + "dist/cjs/sync.js", + "dist/cjs/index.js", + "dist/cjs/encoding.js", + "dist/cjs/columnar.js", + "dist/cjs/uuid.js", + "dist/cjs/counter.js", + "dist/cjs/common.js", + "dist/cjs/text.js", + "dist/cjs/proxies.js" + ], + "module": "./dist/mjs/index.js", + "main": "./dist/cjs/index.js", "license": "MIT", "scripts": { "lint": "tslint --project tsconfig.json", + "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/types.json", "test": "ts-mocha -p tsconfig.json test/**/*.ts" }, - "directories": { - "src": "./src", - "test": "./test" - }, "devDependencies": { "@types/expect": "^24.3.0", "@types/mocha": "^9.1.1", @@ -21,7 +56,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-wasm": "file:../automerge-wasm", + "automerge-wasm": "^0.1.2", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 1f86580e..2b81d70a 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -1,23 +1,25 @@ import * as AutomergeWASM from "automerge-wasm" import { uuid } from './uuid' + +import _init from "automerge-wasm" +export default _init + export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" import { Counter } from "./counter" -//@ts-ignore import { Text } from "./text" import { Int, Uint, Float64 } from "./numbers" import { isObject } from "./common" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" - +export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" -//@ts-ignore -export { Text } from "./text" + +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" function _state(doc: Doc) : Automerge { let state = (doc)[STATE] diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json index 987f9d37..b0e2620c 100644 --- a/automerge-js/tsconfig.json +++ b/automerge-js/tsconfig.json @@ -1,16 +1,22 @@ { - "compilerOptions": { - "noImplicitAny": false, - "strict": true, - "allowJs": false, - "baseUrl": ".", - "lib": ["dom", "esnext.asynciterable", "es2017", "es2016", "es2015"], - "module": "commonjs", - "moduleResolution": "node", - "target": "es2016", - "skipLibCheck": true, - "outDir": "./dist" - }, - "include": [ "src/**/*" ], - "exclude": ["dist/**/*"] + "compilerOptions": { + "target": "es2016", + "sourceMap": false, + "declaration": false, + "resolveJsonModule": true, + "module": "commonjs", + "moduleResolution": "node", + "noImplicitAny": false, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "noFallthroughCasesInSwitch": true, + "skipLibCheck": true, + "outDir": "./dist/cjs" + }, + "include": [ "src/**/*" ], + "exclude": [ + "./dist/**/*", + "./node_modules" + ] } diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js index ab9e8a1d..80057798 100644 --- a/automerge-wasm/web-index.js +++ b/automerge-wasm/web-index.js @@ -8,6 +8,8 @@ export { decodeSyncMessage, encodeSyncState, decodeSyncState, + exportSyncState, + importSyncState, } from "./bindgen.js" import init from "./bindgen.js" export default init; From 1eec70f11632a3800f65350e3e9a61fb1eaf724b Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 17 May 2022 17:01:06 -0400 Subject: [PATCH 03/17] example webpack for js --- automerge-js/examples/webpack/.gitignore | 5 +++ automerge-js/examples/webpack/package.json | 21 +++++++++++ .../examples/webpack/public/index.html | 10 ++++++ automerge-js/examples/webpack/src/index.js | 20 +++++++++++ .../examples/webpack/webpack.config.js | 35 +++++++++++++++++++ automerge-js/package.json | 2 +- automerge-wasm/package.json | 2 +- 7 files changed, 93 insertions(+), 2 deletions(-) create mode 100644 automerge-js/examples/webpack/.gitignore create mode 100644 automerge-js/examples/webpack/package.json create mode 100644 automerge-js/examples/webpack/public/index.html create mode 100644 automerge-js/examples/webpack/src/index.js create mode 100644 automerge-js/examples/webpack/webpack.config.js diff --git a/automerge-js/examples/webpack/.gitignore b/automerge-js/examples/webpack/.gitignore new file mode 100644 index 00000000..da9d3ff5 --- /dev/null +++ b/automerge-js/examples/webpack/.gitignore @@ -0,0 +1,5 @@ +yarn.lock +node_modules +public/*.wasm +public/main.js +dist diff --git a/automerge-js/examples/webpack/package.json b/automerge-js/examples/webpack/package.json new file mode 100644 index 00000000..474d9904 --- /dev/null +++ b/automerge-js/examples/webpack/package.json @@ -0,0 +1,21 @@ +{ + "name": "webpack-automerge-example", + "version": "0.1.0", + "description": "", + "private": true, + "scripts": { + "build": "webpack", + "start": "serve public", + "test": "node dist/node.js" + }, + "author": "", + "dependencies": { + "automerge-js": "file:automerge-js-0.1.0.tgz" + }, + "devDependencies": { + "serve": "^13.0.2", + "webpack": "^5.72.1", + "webpack-cli": "^4.9.2", + "webpack-node-externals": "^3.0.0" + } +} diff --git a/automerge-js/examples/webpack/public/index.html b/automerge-js/examples/webpack/public/index.html new file mode 100644 index 00000000..5003393a --- /dev/null +++ b/automerge-js/examples/webpack/public/index.html @@ -0,0 +1,10 @@ + + + + + Simple Webpack for automerge-wasm + + + + + diff --git a/automerge-js/examples/webpack/src/index.js b/automerge-js/examples/webpack/src/index.js new file mode 100644 index 00000000..7d0b8371 --- /dev/null +++ b/automerge-js/examples/webpack/src/index.js @@ -0,0 +1,20 @@ +import init, * as Automerge from "automerge-js" + +// hello world code that will run correctly on web or node + +init().then(_ => { + let doc = Automerge.init() + doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") + const result = JSON.stringify(doc) + + if (typeof document !== 'undefined') { + // browser + const element = document.createElement('div'); + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element); + } else { + // server + console.log("node:", result) + } +}) + diff --git a/automerge-js/examples/webpack/webpack.config.js b/automerge-js/examples/webpack/webpack.config.js new file mode 100644 index 00000000..3ab0e798 --- /dev/null +++ b/automerge-js/examples/webpack/webpack.config.js @@ -0,0 +1,35 @@ +const path = require('path'); +const nodeExternals = require('webpack-node-externals'); + +// the most basic webpack config for node or web targets for automerge-wasm + +const serverConfig = { + // basic setup for bundling a node package + target: 'node', + externals: [nodeExternals()], + externalsPresets: { node: true }, + + entry: './src/index.js', + output: { + filename: 'node.js', + path: path.resolve(__dirname, 'dist'), + }, + mode: "development", // or production +}; + +const clientConfig = { + target: 'web', + entry: './src/index.js', + output: { + filename: 'main.js', + path: path.resolve(__dirname, 'public'), + }, + mode: "development", // or production + performance: { // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000 + } +}; + +module.exports = [serverConfig, clientConfig]; diff --git a/automerge-js/package.json b/automerge-js/package.json index 2bdafd6b..508f1351 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -56,7 +56,7 @@ "typescript": "^4.6.4" }, "dependencies": { - "automerge-wasm": "^0.1.2", + "automerge-wasm": "^0.1.3", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index a7243e3e..7029688c 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -8,7 +8,7 @@ "description": "wasm-bindgen bindings to the automerge rust implementation", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "repository": "github:automerge/automerge-rs", - "version": "0.1.2", + "version": "0.1.3", "license": "MIT", "files": [ "README.md", From 226bbeb023b0b1c48f6653a7e7bcc233ec047c34 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 17 May 2022 17:16:38 -0400 Subject: [PATCH 04/17] tslint to eslint --- automerge-js/.eslintignore | 2 ++ automerge-js/.eslintrc.cjs | 11 +++++++++++ automerge-js/package.json | 6 ++++-- automerge-js/src/index.ts | 1 + 4 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 automerge-js/.eslintignore create mode 100644 automerge-js/.eslintrc.cjs diff --git a/automerge-js/.eslintignore b/automerge-js/.eslintignore new file mode 100644 index 00000000..4d6880d3 --- /dev/null +++ b/automerge-js/.eslintignore @@ -0,0 +1,2 @@ +dist +examples diff --git a/automerge-js/.eslintrc.cjs b/automerge-js/.eslintrc.cjs new file mode 100644 index 00000000..80e08d55 --- /dev/null +++ b/automerge-js/.eslintrc.cjs @@ -0,0 +1,11 @@ +module.exports = { + root: true, + parser: '@typescript-eslint/parser', + plugins: [ + '@typescript-eslint', + ], + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + ], +}; diff --git a/automerge-js/package.json b/automerge-js/package.json index 508f1351..7bfbca15 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -42,7 +42,7 @@ "main": "./dist/cjs/index.js", "license": "MIT", "scripts": { - "lint": "tslint --project tsconfig.json", + "lint": "eslint src", "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/types.json", "test": "ts-mocha -p tsconfig.json test/**/*.ts" }, @@ -50,9 +50,11 @@ "@types/expect": "^24.3.0", "@types/mocha": "^9.1.1", "@types/uuid": "^8.3.4", + "@typescript-eslint/eslint-plugin": "^5.25.0", + "@typescript-eslint/parser": "^5.25.0", + "eslint": "^8.15.0", "mocha": "^10.0.0", "ts-mocha": "^10.0.0", - "tslint": "^6.1.3", "typescript": "^4.6.4" }, "dependencies": { diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 2b81d70a..9b856833 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -3,6 +3,7 @@ import * as AutomergeWASM from "automerge-wasm" import { uuid } from './uuid' import _init from "automerge-wasm" + export default _init export { uuid } from './uuid' From 1cf8f80ba4cd25ace693fcd2f0c3bb1e36964b88 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 18 May 2022 15:38:52 -0400 Subject: [PATCH 05/17] pull wasm out of deps --- automerge-js/package.json | 2 +- automerge-js/src/counter.ts | 2 +- automerge-js/src/index.ts | 166 +++++++------------------ automerge-js/src/low_level_api.ts | 190 +++++++++++++++++++++++++++++ automerge-js/src/proxies.ts | 16 +-- automerge-js/test/basic_test.ts | 3 + automerge-js/test/columnar_test.ts | 3 + automerge-js/test/legacy_tests.ts | 3 + automerge-js/test/sync_test.ts | 3 + automerge-js/test/text_test.ts | 3 + automerge-js/test/uuid_test.ts | 3 + 11 files changed, 258 insertions(+), 136 deletions(-) create mode 100644 automerge-js/src/low_level_api.ts diff --git a/automerge-js/package.json b/automerge-js/package.json index 7bfbca15..ac6c5c5a 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -55,10 +55,10 @@ "eslint": "^8.15.0", "mocha": "^10.0.0", "ts-mocha": "^10.0.0", + "automerge-wasm": "^0.1.3", "typescript": "^4.6.4" }, "dependencies": { - "automerge-wasm": "^0.1.3", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index fba2d8d0..0539af39 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "automerge-wasm" +import { Automerge, ObjID, Prop } from "./low_level_api" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 9b856833..a1cc4968 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -1,11 +1,6 @@ -import * as AutomergeWASM from "automerge-wasm" import { uuid } from './uuid' -import _init from "automerge-wasm" - -export default _init - export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" @@ -19,8 +14,24 @@ export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" +import { ApiHandler, LowLevelApi, UseApi } from "./low_level_api" +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./low_level_api" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "./low_level_api" + +export type ChangeOptions = { message?: string, time?: number } + +export type Doc = { readonly [P in keyof T]: Doc } + +export type ChangeFn = (doc: T) => void + +export interface State { + change: DecodedChange + snapshot: T +} + +export function use(api: LowLevelApi) { + UseApi(api) +} function _state(doc: Doc) : Automerge { let state = (doc)[STATE] @@ -50,7 +61,7 @@ export function init(actor?: ActorId) : Doc{ if (typeof actor !== "string") { actor = undefined } - const state = AutomergeWASM.create(actor) + const state = ApiHandler.create(actor) return rootProxy(state, true); } @@ -67,16 +78,21 @@ export function from(initialState: T | Doc, actor?: ActorId): Doc { return change(init(actor), (d) => Object.assign(d, initialState)) } -export function change>(doc: D, options: ChangeOptions | ChangeFn, callback?: ChangeFn): D { - +export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { if (typeof options === 'function') { - callback = options - options = {} + return _change(doc, {}, options) + } else if (typeof callback === 'function') { + if (typeof options === "string") { + options = { message: options } + } + return _change(doc, options, callback) + } else { + throw RangeError("Invalid args for change") } +} + +function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { - if (typeof options === "string") { - options = { message: options } - } if (typeof callback !== "function") { throw new RangeError("invalid change function"); @@ -149,7 +165,7 @@ export function emptyChange(doc: Doc, options: ChangeOptions) { } export function load(data: Uint8Array, actor: ActorId) : Doc { - const state = AutomergeWASM.load(data, actor) + const state = ApiHandler.load(data, actor) return rootProxy(state, true); } @@ -303,23 +319,23 @@ export function equals(val1: any, val2: any) : boolean { } export function encodeSyncState(state: SyncState) : Uint8Array { - return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state)) + return ApiHandler.encodeSyncState(ApiHandler.importSyncState(state)) } export function decodeSyncState(state: Uint8Array) : SyncState { - return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state)) + return ApiHandler.exportSyncState(ApiHandler.decodeSyncState(state)) } export function generateSyncMessage(doc: Doc, inState: SyncState) : [ SyncState, SyncMessage | null ] { const state = _state(doc) - const syncState = AutomergeWASM.importSyncState(inState) + const syncState = ApiHandler.importSyncState(inState) const message = state.generateSyncMessage(syncState) - const outState = AutomergeWASM.exportSyncState(syncState) + const outState = ApiHandler.exportSyncState(syncState) return [ outState, message ] } export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage) : [ Doc, SyncState, null ] { - const syncState = AutomergeWASM.importSyncState(inState) + const syncState = ApiHandler.importSyncState(inState) if (doc === undefined || _obj(doc) !== "_root") { throw new RangeError("must be the document root"); } @@ -337,28 +353,28 @@ export function receiveSyncMessage(doc: Doc, inState: SyncState, message: state.receiveSyncMessage(syncState, message) //@ts-ignore doc[HEADS] = heads; - const outState = AutomergeWASM.exportSyncState(syncState) + const outState = ApiHandler.exportSyncState(syncState) return [rootProxy(state, true), outState, null]; } export function initSyncState() : SyncState { - return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState()) + return ApiHandler.exportSyncState(ApiHandler.initSyncState()) } export function encodeChange(change: DecodedChange) : Change { - return AutomergeWASM.encodeChange(change) + return ApiHandler.encodeChange(change) } export function decodeChange(data: Change) : DecodedChange { - return AutomergeWASM.decodeChange(data) + return ApiHandler.decodeChange(data) } export function encodeSyncMessage(message: DecodedSyncMessage) : SyncMessage { - return AutomergeWASM.encodeSyncMessage(message) + return ApiHandler.encodeSyncMessage(message) } export function decodeSyncMessage(message: SyncMessage) : DecodedSyncMessage { - return AutomergeWASM.decodeSyncMessage(message) + return ApiHandler.decodeSyncMessage(message) } export function getMissingDeps(doc: Doc, heads: Heads) : Heads { @@ -401,99 +417,3 @@ export function toJS(doc: any) : any { } } -type ChangeOptions = - | string // = message - | { - message?: string - time?: number - } - -type Doc = FreezeObject - -/** - * The argument pased to the callback of a `change` function is a mutable proxy of the original - * type. `Proxy` is the inverse of `Doc`: `Proxy>` is `T`, and `Doc>` is `D`. - */ -type Proxy = D extends Doc ? T : never - -type ChangeFn = (doc: T) => void - -interface State { - change: DecodedChange - snapshot: T -} - -// custom CRDT types - -/* - class TableRow { - readonly id: UUID - } - - class Table { - constructor() - add(item: T): UUID - byId(id: UUID): T & TableRow - count: number - ids: UUID[] - remove(id: UUID): void - rows: (T & TableRow)[] - } -*/ - - class List extends Array { - insertAt?(index: number, ...args: T[]): List - deleteAt?(index: number, numDelete?: number): List - } - -/* - - class Text extends List { - constructor(text?: string | string[]) - get(index: number): string - toSpans(): (string | T)[] - } - - // Note that until https://github.com/Microsoft/TypeScript/issues/2361 is addressed, we - // can't treat a Counter like a literal number without force-casting it as a number. - // This won't compile: - // `assert.strictEqual(c + 10, 13) // Operator '+' cannot be applied to types 'Counter' and '10'.ts(2365)` - // But this will: - // `assert.strictEqual(c as unknown as number + 10, 13)` - class Counter extends Number { - constructor(value?: number) - increment(delta?: number): void - decrement(delta?: number): void - toString(): string - valueOf(): number - value: number - } - - class Int { constructor(value: number) } - class Uint { constructor(value: number) } - class Float64 { constructor(value: number) } - -*/ - - // Readonly variants - - //type ReadonlyTable = ReadonlyArray & Table - type ReadonlyList = ReadonlyArray & List - type ReadonlyText = ReadonlyList & Text - -// prettier-ignore -type Freeze = - T extends Function ? T - : T extends Text ? ReadonlyText -// : T extends Table ? FreezeTable - : T extends List ? FreezeList - : T extends Array ? FreezeArray - : T extends Map ? FreezeMap - : T extends string & infer O ? string & O - : FreezeObject - -//interface FreezeTable extends ReadonlyTable> {} -interface FreezeList extends ReadonlyList> {} -interface FreezeArray extends ReadonlyArray> {} -interface FreezeMap extends ReadonlyMap, Freeze> {} -type FreezeObject = { readonly [P in keyof T]: Freeze } diff --git a/automerge-js/src/low_level_api.ts b/automerge-js/src/low_level_api.ts new file mode 100644 index 00000000..4f01a18b --- /dev/null +++ b/automerge-js/src/low_level_api.ts @@ -0,0 +1,190 @@ + +export type Actor = string; +export type ObjID = string; +export type Change = Uint8Array; +export type SyncMessage = Uint8Array; +export type Prop = string | number; +export type Hash = string; +export type Heads = Hash[]; +export type Value = string | number | boolean | null | Date | Uint8Array +export type ObjType = string | Array | Object +export type FullValue = + ["str", string] | + ["int", number] | + ["uint", number] | + ["f64", number] | + ["boolean", boolean] | + ["timestamp", Date] | + ["counter", number] | + ["bytes", Uint8Array] | + ["null", Uint8Array] | + ["map", ObjID] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] + +export enum ObjTypeName { + list = "list", + map = "map", + table = "table", + text = "text", +} + +export type Datatype = + "boolean" | + "str" | + "int" | + "uint" | + "f64" | + "null" | + "timestamp" | + "counter" | + "bytes" | + "map" | + "text" | + "list"; + +export type DecodedSyncMessage = { + heads: Heads, + need: Heads, + have: any[] + changes: Change[] +} + +export type DecodedChange = { + actor: Actor, + seq: number + startOp: number, + time: number, + message: string | null, + deps: Heads, + hash: Hash, + ops: Op[] +} + +export type Op = { + action: string, + obj: ObjID, + key: string, + value?: string | number | boolean, + datatype?: string, + pred: string[], +} + +export type Patch = { + obj: ObjID + action: 'assign' | 'insert' | 'delete' + key: Prop + value: Value + datatype: Datatype + conflict: boolean +} + +export interface LowLevelApi { + create(actor?: Actor): Automerge; + load(data: Uint8Array, actor?: Actor): Automerge; + encodeChange(change: DecodedChange): Change; + decodeChange(change: Change): DecodedChange; + initSyncState(): SyncState; + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; + encodeSyncState(state: SyncState): Uint8Array; + decodeSyncState(data: Uint8Array): SyncState; + exportSyncState(state: SyncState): JsSyncState; + importSyncState(state: JsSyncState): SyncState; +} + +export function UseApi(api: LowLevelApi) { + for (let k in api) { + ApiHandler[k] = api[k] + } +} + +export let ApiHandler : LowLevelApi = { + create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, + load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, + encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called") }, + decodeChange(change: Change): DecodedChange { throw new RangeError("Automerge.use() not called") }, + initSyncState(): SyncState { throw new RangeError("Automerge.use() not called") }, + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { throw new RangeError("Automerge.use() not called") }, + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { throw new RangeError("Automerge.use() not called") }, + encodeSyncState(state: SyncState): Uint8Array { throw new RangeError("Automerge.use() not called") }, + decodeSyncState(data: Uint8Array): SyncState { throw new RangeError("Automerge.use() not called") }, + exportSyncState(state: SyncState): JsSyncState { throw new RangeError("Automerge.use() not called") }, + importSyncState(state: JsSyncState): SyncState { throw new RangeError("Automerge.use() not called") }, +} + +export interface Automerge { + // change state + put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; + putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; + insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; + insertObject(obj: ObjID, index: number, value: ObjType): ObjID; + push(obj: ObjID, value: Value, datatype?: Datatype): undefined; + pushObject(obj: ObjID, value: ObjType): ObjID; + splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; + increment(obj: ObjID, prop: Prop, value: number): void; + delete(obj: ObjID, prop: Prop): void; + + // returns a single value - if there is a conflict return the winner + get(obj: ObjID, prop: any, heads?: Heads): FullValue | null; + // return all values in case of a conflict + getAll(obj: ObjID, arg: any, heads?: Heads): FullValue[]; + keys(obj: ObjID, heads?: Heads): string[]; + text(obj: ObjID, heads?: Heads): string; + length(obj: ObjID, heads?: Heads): number; + materialize(obj?: ObjID, heads?: Heads): any; + + // transactions + commit(message?: string, time?: number): Hash; + merge(other: Automerge): Heads; + getActorId(): Actor; + pendingOps(): number; + rollback(): number; + + // patches + enablePatches(enable: boolean): void; + popPatches(): Patch[]; + + // save and load to local store + save(): Uint8Array; + saveIncremental(): Uint8Array; + loadIncremental(data: Uint8Array): number; + + // sync over network + receiveSyncMessage(state: SyncState, message: SyncMessage): void; + generateSyncMessage(state: SyncState): SyncMessage | null; + + // low level change functions + applyChanges(changes: Change[]): void; + getChanges(have_deps: Heads): Change[]; + getChangeByHash(hash: Hash): Change | null; + getChangesAdded(other: Automerge): Change[]; + getHeads(): Heads; + getLastLocalChange(): Change; + getMissingDeps(heads?: Heads): Heads; + + // memory management + free(): void; + clone(actor?: string): Automerge; + fork(actor?: string): Automerge; + forkAt(heads: Heads, actor?: string): Automerge; + + // dump internal state to console.log + dump(): void; + + // dump internal state to a JS object + toJS(): any; +} + +export interface JsSyncState { + lastSentHeads: any; + sentHashes: any; + readonly sharedHeads: any; +} + +export interface SyncState extends JsSyncState { + free(): void; + clone(): SyncState; +} + diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 4e91b2e2..82171218 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -1,15 +1,11 @@ -import AutomergeWASM from "automerge-wasm" -import { Automerge, Heads, ObjID } from "automerge-wasm" -// @ts-ignore +import { Automerge, Heads, ObjID } from "./low_level_api" import { Int, Uint, Float64 } from "./numbers" -// @ts-ignore import { Counter, getWriteableCounter } from "./counter" -// @ts-ignore import { Text } from "./text" import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } from "./constants" -export function parseListIndex(key) { +function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) if (typeof key !== 'number') { // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key)) @@ -100,7 +96,7 @@ function import_value(value) { } } -export const MapHandler = { +const MapHandler = { get (target, key) : any { const { context, objectId, path, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } @@ -196,7 +192,7 @@ export const MapHandler = { } -export const ListHandler = { +const ListHandler = { get (target, index) { const {context, objectId, path, readonly, frozen, heads } = target index = parseListIndex(index) @@ -338,7 +334,7 @@ export const ListHandler = { } } -export const TextHandler = Object.assign({}, ListHandler, { +const TextHandler = Object.assign({}, ListHandler, { get (target, index) { // FIXME this is a one line change from ListHandler.get() const {context, objectId, path, readonly, frozen, heads } = target @@ -622,5 +618,3 @@ function textMethods(target) { return methods } - -//module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler } diff --git a/automerge-js/test/basic_test.ts b/automerge-js/test/basic_test.ts index 5aff21b0..9508f3d3 100644 --- a/automerge-js/test/basic_test.ts +++ b/automerge-js/test/basic_test.ts @@ -1,6 +1,9 @@ import * as assert from 'assert' import * as util from 'util' import * as Automerge from '../src' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) describe('Automerge', () => { describe('basics', () => { diff --git a/automerge-js/test/columnar_test.ts b/automerge-js/test/columnar_test.ts index ca670377..fc01741b 100644 --- a/automerge-js/test/columnar_test.ts +++ b/automerge-js/test/columnar_test.ts @@ -2,6 +2,9 @@ import * as assert from 'assert' import { checkEncoded } from './helpers' import * as Automerge from '../src' import { encodeChange, decodeChange } from '../src' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) describe('change encoding', () => { it('should encode text edits', () => { diff --git a/automerge-js/test/legacy_tests.ts b/automerge-js/test/legacy_tests.ts index 4034ca25..044b7eef 100644 --- a/automerge-js/test/legacy_tests.ts +++ b/automerge-js/test/legacy_tests.ts @@ -2,6 +2,9 @@ import * as assert from 'assert' import * as Automerge from '../src' import { assertEqualsOneOf } from './helpers' import { decodeChange } from '../src/columnar' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) const UUID_PATTERN = /^[0-9a-f]{32}$/ const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/ diff --git a/automerge-js/test/sync_test.ts b/automerge-js/test/sync_test.ts index c7f8015b..db5c3bb9 100644 --- a/automerge-js/test/sync_test.ts +++ b/automerge-js/test/sync_test.ts @@ -3,6 +3,9 @@ import * as Automerge from '../src' import { BloomFilter } from '../src/sync' import { decodeChangeMeta } from '../src/columnar' import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) function inspect(a) { const util = require("util"); diff --git a/automerge-js/test/text_test.ts b/automerge-js/test/text_test.ts index 8dbfc93c..51424c91 100644 --- a/automerge-js/test/text_test.ts +++ b/automerge-js/test/text_test.ts @@ -1,6 +1,9 @@ import * as assert from 'assert' import * as Automerge from '../src' import { assertEqualsOneOf } from './helpers' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) function attributeStateToAttributes(accumulatedAttributes) { const attributes = {} diff --git a/automerge-js/test/uuid_test.ts b/automerge-js/test/uuid_test.ts index 4182a8c4..1bed4f49 100644 --- a/automerge-js/test/uuid_test.ts +++ b/automerge-js/test/uuid_test.ts @@ -1,5 +1,8 @@ import * as assert from 'assert' import * as Automerge from '../src' +import * as AutomergeWASM from "automerge-wasm" + +Automerge.use(AutomergeWASM) const uuid = Automerge.uuid From 5e1bdb79eddc70044b83a17f77650c491e06869a Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 18 May 2022 15:39:42 -0400 Subject: [PATCH 06/17] eslint --fix --- automerge-js/src/columnar.ts | 116 +++++++++++++++--------------- automerge-js/src/common.ts | 4 +- automerge-js/src/encoding.ts | 2 +- automerge-js/src/index.ts | 12 ++-- automerge-js/src/low_level_api.ts | 4 +- automerge-js/src/proxies.ts | 48 ++++++------- automerge-js/src/sync.ts | 38 +++++----- automerge-js/src/text.ts | 2 +- 8 files changed, 113 insertions(+), 113 deletions(-) diff --git a/automerge-js/src/columnar.ts b/automerge-js/src/columnar.ts index fd203333..54847e12 100644 --- a/automerge-js/src/columnar.ts +++ b/automerge-js/src/columnar.ts @@ -145,7 +145,7 @@ function parseAllOpIds(changes, single) { if (op.obj.actorId) actors[op.obj.actorId] = true if (op.elemId && op.elemId.actorId) actors[op.elemId.actorId] = true if (op.child && op.child.actorId) actors[op.child.actorId] = true - for (let pred of op.pred) actors[pred.actorId] = true + for (const pred of op.pred) actors[pred.actorId] = true return op }) newChanges.push(change) @@ -155,10 +155,10 @@ function parseAllOpIds(changes, single) { if (single) { actorIds = [changes[0].actor].concat(actorIds.filter(actor => actor !== changes[0].actor)) } - for (let change of newChanges) { + for (const change of newChanges) { change.actorNum = actorIds.indexOf(change.actor) for (let i = 0; i < change.ops.length; i++) { - let op = change.ops[i] + const op = change.ops[i] op.id = {counter: change.startOp + i, actorNum: change.actorNum, actorId: change.actor} op.obj = actorIdToActorNum(op.obj, actorIds) op.elemId = actorIdToActorNum(op.elemId, actorIds) @@ -393,7 +393,7 @@ function encodeOps(ops, forDocument) { columns.predActor = new RLEEncoder('uint') } - for (let op of ops) { + for (const op of ops) { encodeObjectId(op, columns) encodeOperationKey(op, columns) columns.insert.appendValue(!!op.insert) @@ -427,8 +427,8 @@ function encodeOps(ops, forDocument) { } } - let columnList : any = [] - for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { + const columnList : any = [] + for (const {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]}) } return columnList.sort((a, b) => a.id - b.id) @@ -436,7 +436,7 @@ function encodeOps(ops, forDocument) { function expandMultiOps(ops, startOp, actor) { let opNum = startOp - let expandedOps : any = [] + const expandedOps : any = [] for (const op of ops) { if (op.action === 'set' && op.values && op.insert) { if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') @@ -471,7 +471,7 @@ function expandMultiOps(ops, startOp, actor) { */ function decodeOps(ops, forDocument) { const newOps : any = [] - for (let op of ops) { + for (const op of ops) { const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) const action = ACTIONS[op.action] || op.action @@ -503,7 +503,7 @@ function decodeOps(ops, forDocument) { */ function checkSortedOpIds(opIds) { let last = null - for (let opId of opIds) { + for (const opId of opIds) { if (last && compareParsedOpIds(last, opId) !== -1) { throw new RangeError('operation IDs are not in ascending order') } @@ -565,7 +565,7 @@ export function makeDecoders(columns, columnSpec) { function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) - let parsedRows : any = [] + const parsedRows : any = [] while (columns.some(col => !col.decoder.done)) { let row = {}, col = 0 while (col < columns.length) { @@ -578,7 +578,7 @@ function decodeColumns(columns, actorIds, columnSpec) { if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { const values : any = [], count = columns[col].decoder.readValue() for (let i = 0; i < count; i++) { - let value = {} + const value = {} for (let colOffset = 1; colOffset < groupCols; colOffset++) { decodeValueColumns(columns, col + colOffset, actorIds, value) } @@ -615,7 +615,7 @@ function decodeColumnInfo(decoder) { function encodeColumnInfo(encoder, columns) { const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0) encoder.appendUint53(nonEmptyColumns.length) - for (let column of nonEmptyColumns) { + for (const column of nonEmptyColumns) { encoder.appendUint53(column.id) encoder.appendUint53(column.encoder.buffer.byteLength) } @@ -626,7 +626,7 @@ function decodeChangeHeader(decoder) { for (let i = 0; i < numDeps; i++) { deps.push(bytesToHexString(decoder.readRawBytes(32))) } - let change : any = { + const change : any = { actor: decoder.readHexString(), seq: decoder.readUint53(), startOp: decoder.readUint53(), @@ -714,7 +714,7 @@ export function encodeChange(changeObj) { const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') encoder.appendUint53(change.deps.length) - for (let hash of change.deps.slice().sort()) { + for (const hash of change.deps.slice().sort()) { encoder.appendRawBytes(hexStringToBytes(hash)) } encoder.appendHexString(change.actor) @@ -723,11 +723,11 @@ export function encodeChange(changeObj) { encoder.appendInt53(change.time) encoder.appendPrefixedString(change.message || '') encoder.appendUint53(actorIds.length - 1) - for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) + for (const actor of actorIds.slice(1)) encoder.appendHexString(actor) const columns : any = encodeOps(change.ops, false) encodeColumnInfo(encoder, columns) - for (let column of columns) encoder.appendRawBytes(column.encoder.buffer) + for (const column of columns) encoder.appendRawBytes(column.encoder.buffer) if (change.extraBytes) encoder.appendRawBytes(change.extraBytes) }) @@ -842,8 +842,8 @@ export function splitContainers(buffer) { */ export function decodeChanges(binaryChanges) { let decoded : any = [] - for (let binaryChange of binaryChanges) { - for (let chunk of splitContainers(binaryChange)) { + for (const binaryChange of binaryChanges) { + for (const chunk of splitContainers(binaryChange)) { if (chunk[8] === CHUNK_TYPE_DOCUMENT) { decoded = decoded.concat(decodeDocument(chunk)) } else if (chunk[8] === CHUNK_TYPE_CHANGE || chunk[8] === CHUNK_TYPE_DEFLATE) { @@ -869,8 +869,8 @@ function sortOpIds(a, b) { } function groupDocumentOps(changes) { - let byObjectId = {}, byReference = {}, objectType = {} - for (let change of changes) { + const byObjectId = {}, byReference = {}, objectType = {} + for (const change of changes) { for (let i = 0; i < change.ops.length; i++) { const op = change.ops[i], opId = `${op.id.counter}@${op.id.actorId}` const objectId = (op.obj === '_root') ? '_root' : `${op.obj.counter}@${op.obj.actorId}` @@ -902,7 +902,7 @@ function groupDocumentOps(changes) { byObjectId[objectId][key][opId] = op op.succ = [] - for (let pred of op.pred) { + for (const pred of op.pred) { const predId = `${pred.counter}@${pred.actorId}` if (!byObjectId[objectId][key][predId]) { throw new RangeError(`No predecessor operation ${predId}`) @@ -912,15 +912,15 @@ function groupDocumentOps(changes) { } } - let ops = [] - for (let objectId of Object.keys(byObjectId).sort(sortOpIds)) { + const ops = [] + for (const objectId of Object.keys(byObjectId).sort(sortOpIds)) { let keys : string[] = [] if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { - let stack = ['_head'] + const stack = ['_head'] while (stack.length > 0) { const key : any = stack.pop() if (key !== '_head') keys.push(key) - for (let opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) + for (const opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) } } else { // FIXME JavaScript sorts based on UTF-16 encoding. We should change this to use the UTF-8 @@ -928,8 +928,8 @@ function groupDocumentOps(changes) { keys = Object.keys(byObjectId[objectId]).sort() } - for (let key of keys) { - for (let opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { + for (const key of keys) { + for (const opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { const op = byObjectId[objectId][key][opId] // @ts-ignore if (op.action !== 'del') ops.push(op) @@ -945,8 +945,8 @@ function groupDocumentOps(changes) { * Does not return anything, only mutates `changes`. */ function groupChangeOps(changes, ops) { - let changesByActor = {} // map from actorId to array of changes by that actor - for (let change of changes) { + const changesByActor = {} // map from actorId to array of changes by that actor + for (const change of changes) { change.ops = [] if (!changesByActor[change.actor]) changesByActor[change.actor] = [] if (change.seq !== changesByActor[change.actor].length + 1) { @@ -958,12 +958,12 @@ function groupChangeOps(changes, ops) { changesByActor[change.actor].push(change) } - let opsById = {} - for (let op of ops) { + const opsById = {} + for (const op of ops) { if (op.action === 'del') throw new RangeError('document should not contain del operations') op.pred = opsById[op.id] ? opsById[op.id].pred : [] opsById[op.id] = op - for (let succ of op.succ) { + for (const succ of op.succ) { if (!opsById[succ]) { if (op.elemId) { const elemId = op.insert ? op.id : op.elemId @@ -976,12 +976,12 @@ function groupChangeOps(changes, ops) { } delete op.succ } - for (let op of Object.values(opsById)) { + for (const op of Object.values(opsById)) { // @ts-ignore if (op.action === 'del') ops.push(op) } - for (let op of ops) { + for (const op of ops) { const { counter, actorId } = parseOpId(op.id) const actorChanges = changesByActor[actorId] // Binary search to find the change that should contain this operation @@ -1000,7 +1000,7 @@ function groupChangeOps(changes, ops) { actorChanges[left].ops.push(op) } - for (let change of changes) { + for (const change of changes) { change.ops.sort((op1, op2) => sortOpIds(op1.id, op2.id)) change.startOp = change.maxOp - change.ops.length + 1 delete change.maxOp @@ -1026,8 +1026,8 @@ function encodeDocumentChanges(changes) { extraLen : new RLEEncoder('uint'), extraRaw : new Encoder() } - let indexByHash = {} // map from change hash to its index in the changes array - let heads = {} // change hashes that are not a dependency of any other change + const indexByHash = {} // map from change hash to its index in the changes array + const heads = {} // change hashes that are not a dependency of any other change for (let i = 0; i < changes.length; i++) { const change = changes[i] @@ -1041,7 +1041,7 @@ function encodeDocumentChanges(changes) { columns.message.appendValue(change.message) columns.depsNum.appendValue(change.deps.length) - for (let dep of change.deps) { + for (const dep of change.deps) { if (typeof indexByHash[dep] !== 'number') { throw new RangeError(`Unknown dependency hash: ${dep}`) } @@ -1057,8 +1057,8 @@ function encodeDocumentChanges(changes) { } } - let changesColumns : any = [] - for (let {columnName, columnId} of DOCUMENT_COLUMNS) { + const changesColumns : any = [] + for (const {columnName, columnId} of DOCUMENT_COLUMNS) { changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]}) } changesColumns.sort((a, b) => a.id - b.id) @@ -1066,11 +1066,11 @@ function encodeDocumentChanges(changes) { } function decodeDocumentChanges(changes, expectedHeads) { - let heads = {} // change hashes that are not a dependency of any other change + const heads = {} // change hashes that are not a dependency of any other change for (let i = 0; i < changes.length; i++) { - let change = changes[i] + const change = changes[i] change.deps = [] - for (let index of change.depsNum.map(d => d.depsIndex)) { + for (const index of change.depsNum.map(d => d.depsIndex)) { if (!changes[index] || !changes[index].hash) { throw new RangeError(`No hash for index ${index} while processing index ${i}`) } @@ -1110,24 +1110,24 @@ export function encodeDocument(binaryChanges) { const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false) const { changesColumns, heads } = encodeDocumentChanges(changes) const opsColumns = encodeOps(groupDocumentOps(changes), true) - for (let column of changesColumns) deflateColumn(column) - for (let column of opsColumns) deflateColumn(column) + for (const column of changesColumns) deflateColumn(column) + for (const column of opsColumns) deflateColumn(column) return encodeContainer(CHUNK_TYPE_DOCUMENT, encoder => { encoder.appendUint53(actorIds.length) - for (let actor of actorIds) { + for (const actor of actorIds) { encoder.appendHexString(actor) } encoder.appendUint53(heads.length) - for (let head of heads.sort()) { + for (const head of heads.sort()) { encoder.appendRawBytes(hexStringToBytes(head)) } encodeColumnInfo(encoder, changesColumns) encodeColumnInfo(encoder, opsColumns) // @ts-ignore - for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + for (const column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) // @ts-ignore - for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) + for (const column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) }).bytes } @@ -1201,17 +1201,17 @@ function inflateColumn(column) { */ function addPatchProperty(objects, property) { let values : any = {}, counter : any = null - for (let op of property.ops) { + for (const op of property.ops) { // Apply counters and their increments regardless of the number of successor operations if (op.actionName === 'set' && op.value.datatype === 'counter') { if (!counter) counter = {opId: op.opId, value: 0, succ: {}} counter.value += op.value.value - for (let succId of op.succ) counter.succ[succId] = true + for (const succId of op.succ) counter.succ[succId] = true } else if (op.actionName === 'inc') { if (!counter) throw new RangeError(`inc operation ${op.opId} without a counter`) counter.value += op.value.value delete counter.succ[op.opId] - for (let succId of op.succ) counter.succ[succId] = true + for (const succId of op.succ) counter.succ[succId] = true } else if (op.succ.length === 0) { // Ignore any ops that have been overwritten if (op.actionName.startsWith('make')) { @@ -1240,7 +1240,7 @@ function addPatchProperty(objects, property) { } if (Object.keys(values).length > 0) { - let obj = objects[property.objId] + const obj = objects[property.objId] if (obj.type === 'map' || obj.type === 'table') { obj.props[property.key] = values } else if (obj.type === 'list' || obj.type === 'text') { @@ -1278,7 +1278,7 @@ function makeListEdits(list, values, elemId, index) { function condenseEdits(diff) { if (diff.type === 'list' || diff.type === 'text') { diff.edits.forEach(e => condenseEdits(e.value)) - let newEdits = diff.edits + const newEdits = diff.edits diff.edits = [] for (const edit of newEdits) appendEdit(diff.edits, edit) } else if (diff.type === 'map' || diff.type === 'table') { @@ -1300,7 +1300,7 @@ export function appendEdit(existingEdits, nextEdit) { return } - let lastEdit = existingEdits[existingEdits.length - 1] + const lastEdit = existingEdits[existingEdits.length - 1] if (lastEdit.action === 'insert' && nextEdit.action === 'insert' && lastEdit.index === nextEdit.index - 1 && lastEdit.value.type === 'value' && nextEdit.value.type === 'value' && @@ -1345,7 +1345,7 @@ export function constructPatch(documentBuffer) { const col : any = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( (acc, col: any) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) - let objects = {_root: {objectId: '_root', type: 'map', props: {}}} + const objects = {_root: {objectId: '_root', type: 'map', props: {}}} let property : any = null while (!col.idActor.done) { @@ -1362,7 +1362,7 @@ export function constructPatch(documentBuffer) { const objActor = col.objActor.readValue(), objCtr = col.objCtr.readValue() const objId = objActor === null ? '_root' : `${objCtr}@${actorIds[objActor]}` - let obj = objects[objId] + const obj = objects[objId] if (!obj) throw new RangeError(`Operation for nonexistent object: ${objId}`) const keyActor = col.keyActor.readValue(), keyCtr = col.keyCtr.readValue() @@ -1373,7 +1373,7 @@ export function constructPatch(documentBuffer) { const rawValue = col.valRaw.readRawBytes(sizeTag >> 4) const value = decodeValue(sizeTag, rawValue) const succNum = col.succNum.readValue() - let succ : string[] = [] + const succ : string[] = [] for (let i = 0; i < succNum; i++) { succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`) } diff --git a/automerge-js/src/common.ts b/automerge-js/src/common.ts index 5f1b53d1..f8abe8ea 100644 --- a/automerge-js/src/common.ts +++ b/automerge-js/src/common.ts @@ -8,8 +8,8 @@ export function isObject(obj: any) : boolean { */ export function copyObject(obj: any) : any { if (!isObject(obj)) return {} - let copy : any = {} - for (let key of Object.keys(obj)) { + const copy : any = {} + for (const key of Object.keys(obj)) { copy[key] = obj[key] } return copy diff --git a/automerge-js/src/encoding.ts b/automerge-js/src/encoding.ts index 55ba679d..e31312ce 100644 --- a/automerge-js/src/encoding.ts +++ b/automerge-js/src/encoding.ts @@ -761,7 +761,7 @@ export class RLEEncoder extends Encoder { this.appendRawValue(this.lastValue) } else if (this.state === 'literal') { this.appendInt53(-this.literal.length) - for (let v of this.literal) this.appendRawValue(v) + for (const v of this.literal) this.appendRawValue(v) } else if (this.state === 'nulls') { this.appendInt32(0) this.appendUint53(this.count) diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index a1cc4968..cf207200 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -34,7 +34,7 @@ export function use(api: LowLevelApi) { } function _state(doc: Doc) : Automerge { - let state = (doc)[STATE] + const state = (doc)[STATE] if (state == undefined) { throw new RangeError("must be the document root") } @@ -118,7 +118,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc) : ActorId { } function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { - let values = context.getAll(objectId, prop) + const values = context.getAll(objectId, prop) if (values.length <= 1) { return } - let result = {} + const result = {} for (const conflict of values) { const datatype = conflict[0] const value = conflict[1] @@ -407,8 +407,8 @@ export function toJS(doc: any) : any { //@ts-ignore return doc.map((a: any) => toJS(a)) } - let tmp : any = {} - for (let index in doc) { + const tmp : any = {} + for (const index in doc) { tmp[index] = toJS(doc[index]) } return tmp diff --git a/automerge-js/src/low_level_api.ts b/automerge-js/src/low_level_api.ts index 4f01a18b..813839fa 100644 --- a/automerge-js/src/low_level_api.ts +++ b/automerge-js/src/low_level_api.ts @@ -95,12 +95,12 @@ export interface LowLevelApi { } export function UseApi(api: LowLevelApi) { - for (let k in api) { + for (const k in api) { ApiHandler[k] = api[k] } } -export let ApiHandler : LowLevelApi = { +export const ApiHandler : LowLevelApi = { create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called") }, diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 82171218..1733ab4b 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -19,7 +19,7 @@ function parseListIndex(key) { function valueAt(target, prop) : any { const { context, objectId, path, readonly, heads} = target - let value = context.get(objectId, prop, heads) + const value = context.get(objectId, prop, heads) if (value === undefined) { return } @@ -112,7 +112,7 @@ const MapHandler = { }, set (target, key, val) { - let { context, objectId, path, readonly, frozen} = target + const { context, objectId, path, readonly, frozen} = target target.cache = {} // reset cache on set if (val && val[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') @@ -125,7 +125,7 @@ const MapHandler = { target.heads = val return true } - let [ value, datatype ] = import_value(val) + const [ value, datatype ] = import_value(val) if (frozen) { throw new RangeError("Attempting to use an outdated Automerge document") } @@ -225,7 +225,7 @@ const ListHandler = { }, set (target, index, val) { - let {context, objectId, path, readonly, frozen } = target + const {context, objectId, path, readonly, frozen } = target index = parseListIndex(index) if (val && val[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') @@ -318,14 +318,14 @@ const ListHandler = { index = parseListIndex(index) - let value = valueAt(target, index) + const value = valueAt(target, index) return { configurable: true, enumerable: true, value } }, getPrototypeOf(target) { return Object.getPrototypeOf([]) }, ownKeys (target) : string[] { const {context, objectId, heads } = target - let keys : string[] = [] + const keys : string[] = [] // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array // but not uncommenting it causes for (i in list) {} to not enumerate values properly //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } @@ -375,13 +375,13 @@ export function mapProxy(context: Automerge, objectId: ObjID, path?: string[], r } export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { - let target = [] + const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, ListHandler) } export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { - let target = [] + const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, TextHandler) } @@ -437,17 +437,17 @@ function listMethods(target) { }, pop() { - let length = context.length(objectId) + const length = context.length(objectId) if (length == 0) { return undefined } - let last = valueAt(target, length - 1) + const last = valueAt(target, length - 1) context.delete(objectId, length - 1) return last }, push(...values) { - let len = context.length(objectId) + const len = context.length(objectId) this.splice(len, 0, ...values) return context.length(objectId) }, @@ -462,7 +462,7 @@ function listMethods(target) { splice(index, del, ...vals) { index = parseListIndex(index) del = parseListIndex(del) - for (let val of vals) { + for (const val of vals) { if (val && val[OBJECT_ID]) { throw new RangeError('Cannot create a reference to an existing document object') } @@ -473,14 +473,14 @@ function listMethods(target) { if (readonly) { throw new RangeError("Sequence object cannot be modified outside of a change block") } - let result : any = [] + const result : any = [] for (let i = 0; i < del; i++) { - let value = valueAt(target, index) + const value = valueAt(target, index) result.push(value) context.delete(objectId, index) } const values = vals.map((val) => import_value(val)) - for (let [value,datatype] of values) { + for (const [value,datatype] of values) { switch (datatype) { case "list": const list = context.insertObject(objectId, index, []) @@ -513,10 +513,10 @@ function listMethods(target) { }, entries() { - let i = 0; + const i = 0; const iterator = { next: () => { - let value = valueAt(target, i) + const value = valueAt(target, i) if (value === undefined) { return { value: undefined, done: true } } else { @@ -529,7 +529,7 @@ function listMethods(target) { keys() { let i = 0; - let len = context.length(objectId, heads) + const len = context.length(objectId, heads) const iterator = { next: () => { let value : undefined | number = undefined @@ -541,10 +541,10 @@ function listMethods(target) { }, values() { - let i = 0; + const i = 0; const iterator = { next: () => { - let value = valueAt(target, i) + const value = valueAt(target, i) if (value === undefined) { return { value: undefined, done: true } } else { @@ -558,13 +558,13 @@ function listMethods(target) { // Read-only methods that can delegate to the JavaScript built-in implementations // FIXME - super slow - for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', + for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString', 'toString']) { methods[method] = (...args) => { const list : any = [] while (true) { - let value = valueAt(target, list.length) + const value = valueAt(target, list.length) if (value == undefined) { break } @@ -591,9 +591,9 @@ function textMethods(target) { return context.text(objectId, heads).replace(//g,'') }, toSpans () : any[] { - let spans : any[] = [] + const spans : any[] = [] let chars = '' - let length = this.length + const length = this.length for (let i = 0; i < length; i++) { const value = this[i] if (typeof value === 'string') { diff --git a/automerge-js/src/sync.ts b/automerge-js/src/sync.ts index fd40e343..cf90d5cf 100644 --- a/automerge-js/src/sync.ts +++ b/automerge-js/src/sync.ts @@ -48,7 +48,7 @@ export class BloomFilter { this.numBitsPerEntry = BITS_PER_ENTRY this.numProbes = NUM_PROBES this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - for (let hash of arg) this.addHash(hash) + for (const hash of arg) this.addHash(hash) } else if (arg instanceof Uint8Array) { if (arg.byteLength === 0) { this.numEntries = 0 @@ -96,7 +96,7 @@ export class BloomFilter { // on the next three lines, the right shift means interpret value as unsigned let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo - let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo + const z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo const probes = [x] for (let i = 1; i < this.numProbes; i++) { x = (x + y) % modulo @@ -110,7 +110,7 @@ export class BloomFilter { * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). */ addHash(hash) { - for (let probe of this.getProbes(hash)) { + for (const probe of this.getProbes(hash)) { this.bits[probe >>> 3] |= 1 << (probe & 7) } } @@ -120,7 +120,7 @@ export class BloomFilter { */ containsHash(hash) { if (this.numEntries === 0) return false - for (let probe of this.getProbes(hash)) { + for (const probe of this.getProbes(hash)) { if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { return false } @@ -148,7 +148,7 @@ function encodeHashes(encoder, hashes) { * array of hex strings. */ function decodeHashes(decoder) : string[] { - let length = decoder.readUint32(), hashes : string[] = [] + const length = decoder.readUint32(), hashes : string[] = [] for (let i = 0; i < length; i++) { hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) } @@ -165,12 +165,12 @@ export function encodeSyncMessage(message) { encodeHashes(encoder, message.heads) encodeHashes(encoder, message.need) encoder.appendUint32(message.have.length) - for (let have of message.have) { + for (const have of message.have) { encodeHashes(encoder, have.lastSync) encoder.appendPrefixedBytes(have.bloom) } encoder.appendUint32(message.changes.length) - for (let change of message.changes) { + for (const change of message.changes) { encoder.appendPrefixedBytes(change) } return encoder.buffer @@ -188,7 +188,7 @@ export function decodeSyncMessage(bytes) { const heads = decodeHashes(decoder) const need = decodeHashes(decoder) const haveCount = decoder.readUint32() - let message = {heads, need, have: [], changes: []} + const message = {heads, need, have: [], changes: []} for (let i = 0; i < haveCount; i++) { const lastSync = decodeHashes(decoder) const bloom = decoder.readPrefixedBytes() @@ -255,9 +255,9 @@ function getChangesToSend(backend, have, need) { return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) } - let lastSyncHashes : any = {}, bloomFilters : BloomFilter[] = [] - for (let h of have) { - for (let hash of h.lastSync) lastSyncHashes[hash] = true + const lastSyncHashes : any = {}, bloomFilters : BloomFilter[] = [] + for (const h of have) { + for (const hash of h.lastSync) lastSyncHashes[hash] = true bloomFilters.push(new BloomFilter(h.bloom)) } @@ -265,12 +265,12 @@ function getChangesToSend(backend, have, need) { const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) .map(change => decodeChangeMeta(change, true)) - let changeHashes : any = {}, dependents : any = {}, hashesToSend : any = {} - for (let change of changes) { + const changeHashes : any = {}, dependents : any = {}, hashesToSend : any = {} + for (const change of changes) { changeHashes[change.hash] = true // For each change, make a list of changes that depend on it - for (let dep of change.deps) { + for (const dep of change.deps) { if (!dependents[dep]) dependents[dep] = [] dependents[dep].push(change.hash) } @@ -282,11 +282,11 @@ function getChangesToSend(backend, have, need) { } // Include any changes that depend on a Bloom-negative change - let stack = Object.keys(hashesToSend) + const stack = Object.keys(hashesToSend) while (stack.length > 0) { const hash : any = stack.pop() if (dependents[hash]) { - for (let dep of dependents[hash]) { + for (const dep of dependents[hash]) { if (!hashesToSend[dep]) { hashesToSend[dep] = true stack.push(dep) @@ -296,8 +296,8 @@ function getChangesToSend(backend, have, need) { } // Include any explicitly requested changes - let changesToSend : any = [] - for (let hash of need) { + const changesToSend : any = [] + for (const hash of need) { hashesToSend[hash] = true if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? const change = Backend.getChangeByHash(backend, hash) @@ -306,7 +306,7 @@ function getChangesToSend(backend, have, need) { } // Return changes in the order they were returned by getMissingChanges() - for (let change of changes) { + for (const change of changes) { if (hashesToSend[change.hash]) changesToSend.push(change.change) } return changesToSend diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 02aac54d..738289a4 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -118,7 +118,7 @@ export class Text { } // Read-only methods that can delegate to the JavaScript built-in array -for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', +for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', 'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString']) { Text.prototype[method] = function (...args) { From 515a2eb94b80e891029413b7ab80ac198acdf655 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 18 May 2022 16:16:29 -0400 Subject: [PATCH 07/17] removing some ts errors --- automerge-js/src/bloom.ts | 124 ++++++++ automerge-js/src/index.ts | 2 +- automerge-js/src/low_level_api.ts | 7 +- automerge-js/src/proxies.ts | 16 +- automerge-js/src/sync.ts | 487 ------------------------------ automerge-js/src/text.ts | 13 +- automerge-js/src/uuid.ts | 11 +- automerge-js/test/sync_test.ts | 2 +- automerge-wasm/index.d.ts | 7 +- 9 files changed, 160 insertions(+), 509 deletions(-) create mode 100644 automerge-js/src/bloom.ts delete mode 100644 automerge-js/src/sync.ts diff --git a/automerge-js/src/bloom.ts b/automerge-js/src/bloom.ts new file mode 100644 index 00000000..cb66466a --- /dev/null +++ b/automerge-js/src/bloom.ts @@ -0,0 +1,124 @@ +/** + * Implementation of the data synchronisation protocol that brings a local and a remote document + * into the same state. This is typically used when two nodes have been disconnected for some time, + * and need to exchange any changes that happened while they were disconnected. The two nodes that + * are syncing could be client and server, or server and client, or two peers with symmetric roles. + * + * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual + * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472 + * + * The protocol assumes that every time a node successfully syncs with another node, it remembers + * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The + * next time we try to sync with the same node, we start from the assumption that the other node's + * document version is no older than the outcome of the last sync, so we only need to exchange any + * changes that are more recent than the last sync. This assumption may not be true if the other + * node did not correctly persist its state (perhaps it crashed before writing the result of the + * last sync to disk), and we fall back to sending the entire document in this case. + */ + +import { hexStringToBytes, Encoder, Decoder } from './encoding' + +// These constants correspond to a 1% false positive rate. The values can be changed without +// breaking compatibility of the network protocol, since the parameters used for a particular +// Bloom filter are encoded in the wire format. +const BITS_PER_ENTRY = 10, NUM_PROBES = 7 + +/** + * A Bloom filter implementation that can be serialised to a byte array for transmission + * over a network. The entries that are added are assumed to already be SHA-256 hashes, + * so this implementation does not perform its own hashing. + */ +export class BloomFilter { + numEntries: number; + numBitsPerEntry: number; + numProbes: number; + bits: Uint8Array; + + constructor (arg) { + if (Array.isArray(arg)) { + // arg is an array of SHA256 hashes in hexadecimal encoding + this.numEntries = arg.length + this.numBitsPerEntry = BITS_PER_ENTRY + this.numProbes = NUM_PROBES + this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + for (const hash of arg) this.addHash(hash) + } else if (arg instanceof Uint8Array) { + if (arg.byteLength === 0) { + this.numEntries = 0 + this.numBitsPerEntry = 0 + this.numProbes = 0 + this.bits = arg + } else { + const decoder = new Decoder(arg) + this.numEntries = decoder.readUint32() + this.numBitsPerEntry = decoder.readUint32() + this.numProbes = decoder.readUint32() + this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + } + } else { + throw new TypeError('invalid argument') + } + } + + /** + * Returns the Bloom filter state, encoded as a byte array. + */ + get bytes() { + if (this.numEntries === 0) return new Uint8Array(0) + const encoder = new Encoder() + encoder.appendUint32(this.numEntries) + encoder.appendUint32(this.numBitsPerEntry) + encoder.appendUint32(this.numProbes) + encoder.appendRawBytes(this.bits) + return encoder.buffer + } + + /** + * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits + * in the Bloom filter need to be tested or set for this particular entry. We do this by + * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers, + * and then using triple hashing to compute the probe indexes. The algorithm comes from: + * + * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification. + * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004. + * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf + */ + getProbes(hash) { + const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength + if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) + // on the next three lines, the right shift means interpret value as unsigned + let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo + let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo + const z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo + const probes = [x] + for (let i = 1; i < this.numProbes; i++) { + x = (x + y) % modulo + y = (y + z) % modulo + probes.push(x) + } + return probes + } + + /** + * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). + */ + addHash(hash) { + for (const probe of this.getProbes(hash)) { + this.bits[probe >>> 3] |= 1 << (probe & 7) + } + } + + /** + * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter. + */ + containsHash(hash) { + if (this.numEntries === 0) return false + for (const probe of this.getProbes(hash)) { + if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { + return false + } + } + return true + } +} + diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index cf207200..2885531c 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -118,7 +118,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : T { return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) } -export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { +export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : Array { const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, ListHandler) } -export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : any { +export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : Array { const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, TextHandler) } -export function rootProxy(context: Automerge, readonly?: boolean) : any { +export function rootProxy(context: Automerge, readonly?: boolean) : T { return mapProxy(context, "_root", [], !!readonly) } @@ -494,7 +494,7 @@ function listMethods(target) { break; case "map": const map = context.insertObject(objectId, index, {}) - const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); + const proxyMap : any = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] } @@ -578,7 +578,7 @@ function listMethods(target) { return methods } -function textMethods(target) { +function textMethods(target) : any { const {context, objectId, path, readonly, frozen, heads } = target const methods : any = { set (index, value) { diff --git a/automerge-js/src/sync.ts b/automerge-js/src/sync.ts deleted file mode 100644 index cf90d5cf..00000000 --- a/automerge-js/src/sync.ts +++ /dev/null @@ -1,487 +0,0 @@ -/** - * Implementation of the data synchronisation protocol that brings a local and a remote document - * into the same state. This is typically used when two nodes have been disconnected for some time, - * and need to exchange any changes that happened while they were disconnected. The two nodes that - * are syncing could be client and server, or server and client, or two peers with symmetric roles. - * - * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual - * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472 - * - * The protocol assumes that every time a node successfully syncs with another node, it remembers - * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The - * next time we try to sync with the same node, we start from the assumption that the other node's - * document version is no older than the outcome of the last sync, so we only need to exchange any - * changes that are more recent than the last sync. This assumption may not be true if the other - * node did not correctly persist its state (perhaps it crashed before writing the result of the - * last sync to disk), and we fall back to sending the entire document in this case. - */ - -const Backend : any = {} //require('./backend') -import { hexStringToBytes, bytesToHexString, Encoder, Decoder } from './encoding' -import { decodeChangeMeta } from './columnar' -import { copyObject } from './common' - -const HASH_SIZE = 32 // 256 bits = 32 bytes -const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification -const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identification - -// These constants correspond to a 1% false positive rate. The values can be changed without -// breaking compatibility of the network protocol, since the parameters used for a particular -// Bloom filter are encoded in the wire format. -const BITS_PER_ENTRY = 10, NUM_PROBES = 7 - -/** - * A Bloom filter implementation that can be serialised to a byte array for transmission - * over a network. The entries that are added are assumed to already be SHA-256 hashes, - * so this implementation does not perform its own hashing. - */ -export class BloomFilter { - numEntries: number; - numBitsPerEntry: number; - numProbes: number; - bits: Uint8Array; - - constructor (arg) { - if (Array.isArray(arg)) { - // arg is an array of SHA256 hashes in hexadecimal encoding - this.numEntries = arg.length - this.numBitsPerEntry = BITS_PER_ENTRY - this.numProbes = NUM_PROBES - this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - for (const hash of arg) this.addHash(hash) - } else if (arg instanceof Uint8Array) { - if (arg.byteLength === 0) { - this.numEntries = 0 - this.numBitsPerEntry = 0 - this.numProbes = 0 - this.bits = arg - } else { - const decoder = new Decoder(arg) - this.numEntries = decoder.readUint32() - this.numBitsPerEntry = decoder.readUint32() - this.numProbes = decoder.readUint32() - this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - } - } else { - throw new TypeError('invalid argument') - } - } - - /** - * Returns the Bloom filter state, encoded as a byte array. - */ - get bytes() { - if (this.numEntries === 0) return new Uint8Array(0) - const encoder = new Encoder() - encoder.appendUint32(this.numEntries) - encoder.appendUint32(this.numBitsPerEntry) - encoder.appendUint32(this.numProbes) - encoder.appendRawBytes(this.bits) - return encoder.buffer - } - - /** - * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits - * in the Bloom filter need to be tested or set for this particular entry. We do this by - * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers, - * and then using triple hashing to compute the probe indexes. The algorithm comes from: - * - * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification. - * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004. - * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf - */ - getProbes(hash) { - const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength - if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) - // on the next three lines, the right shift means interpret value as unsigned - let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo - let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo - const z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo - const probes = [x] - for (let i = 1; i < this.numProbes; i++) { - x = (x + y) % modulo - y = (y + z) % modulo - probes.push(x) - } - return probes - } - - /** - * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). - */ - addHash(hash) { - for (const probe of this.getProbes(hash)) { - this.bits[probe >>> 3] |= 1 << (probe & 7) - } - } - - /** - * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter. - */ - containsHash(hash) { - if (this.numEntries === 0) return false - for (const probe of this.getProbes(hash)) { - if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { - return false - } - } - return true - } -} - -/** - * Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array. - */ -function encodeHashes(encoder, hashes) { - if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array') - encoder.appendUint32(hashes.length) - for (let i = 0; i < hashes.length; i++) { - if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted') - const bytes = hexStringToBytes(hashes[i]) - if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits') - encoder.appendRawBytes(bytes) - } -} - -/** - * Decodes a byte array in the format returned by encodeHashes(), and returns its content as an - * array of hex strings. - */ -function decodeHashes(decoder) : string[] { - const length = decoder.readUint32(), hashes : string[] = [] - for (let i = 0; i < length; i++) { - hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) - } - return hashes -} - -/** - * Takes a sync message of the form `{heads, need, have, changes}` and encodes it as a byte array for - * transmission. - */ -export function encodeSyncMessage(message) { - const encoder = new Encoder() - encoder.appendByte(MESSAGE_TYPE_SYNC) - encodeHashes(encoder, message.heads) - encodeHashes(encoder, message.need) - encoder.appendUint32(message.have.length) - for (const have of message.have) { - encodeHashes(encoder, have.lastSync) - encoder.appendPrefixedBytes(have.bloom) - } - encoder.appendUint32(message.changes.length) - for (const change of message.changes) { - encoder.appendPrefixedBytes(change) - } - return encoder.buffer -} - -/** - * Takes a binary-encoded sync message and decodes it into the form `{heads, need, have, changes}`. - */ -export function decodeSyncMessage(bytes) { - const decoder = new Decoder(bytes) - const messageType = decoder.readByte() - if (messageType !== MESSAGE_TYPE_SYNC) { - throw new RangeError(`Unexpected message type: ${messageType}`) - } - const heads = decodeHashes(decoder) - const need = decodeHashes(decoder) - const haveCount = decoder.readUint32() - const message = {heads, need, have: [], changes: []} - for (let i = 0; i < haveCount; i++) { - const lastSync = decodeHashes(decoder) - const bloom = decoder.readPrefixedBytes() - // @ts-ignore - message.have.push({lastSync, bloom}) - } - const changeCount = decoder.readUint32() - for (let i = 0; i < changeCount; i++) { - const change = decoder.readPrefixedBytes() - // @ts-ignore - message.changes.push(change) - } - // Ignore any trailing bytes -- they can be used for extensions by future versions of the protocol - return message -} - -/** - * Takes a SyncState and encodes as a byte array those parts of the state that should persist across - * an application restart or disconnect and reconnect. The ephemeral parts of the state that should - * be cleared on reconnect are not encoded. - */ -export function encodeSyncState(syncState) { - const encoder = new Encoder() - encoder.appendByte(PEER_STATE_TYPE) - encodeHashes(encoder, syncState.sharedHeads) - return encoder.buffer -} - -/** - * Takes a persisted peer state as encoded by `encodeSyncState` and decodes it into a SyncState - * object. The parts of the peer state that were not encoded are initialised with default values. - */ -export function decodeSyncState(bytes) { - const decoder = new Decoder(bytes) - const recordType = decoder.readByte() - if (recordType !== PEER_STATE_TYPE) { - throw new RangeError(`Unexpected record type: ${recordType}`) - } - const sharedHeads = decodeHashes(decoder) - return Object.assign(initSyncState(), { sharedHeads }) -} - -/** - * Constructs a Bloom filter containing all changes that are not one of the hashes in - * `lastSync` or its transitive dependencies. In other words, the filter contains those - * changes that have been applied since the version identified by `lastSync`. Returns - * an object of the form `{lastSync, bloom}` as required for the `have` field of a sync - * message. - */ -function makeBloomFilter(backend, lastSync) { - const newChanges = Backend.getChanges(backend, lastSync) - const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash) - return {lastSync, bloom: new BloomFilter(hashes).bytes} -} - -/** - * Call this function when a sync message is received from another node. The `message` argument - * needs to already have been decoded using `decodeSyncMessage()`. This function determines the - * changes that we need to send to the other node in response. Returns an array of changes (as - * byte arrays). - */ -function getChangesToSend(backend, have, need) { - if (have.length === 0) { - return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) - } - - const lastSyncHashes : any = {}, bloomFilters : BloomFilter[] = [] - for (const h of have) { - for (const hash of h.lastSync) lastSyncHashes[hash] = true - bloomFilters.push(new BloomFilter(h.bloom)) - } - - // Get all changes that were added since the last sync - const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) - .map(change => decodeChangeMeta(change, true)) - - const changeHashes : any = {}, dependents : any = {}, hashesToSend : any = {} - for (const change of changes) { - changeHashes[change.hash] = true - - // For each change, make a list of changes that depend on it - for (const dep of change.deps) { - if (!dependents[dep]) dependents[dep] = [] - dependents[dep].push(change.hash) - } - - // Exclude any change hashes contained in one or more Bloom filters - if (bloomFilters.every(bloom => !bloom.containsHash(change.hash))) { - hashesToSend[change.hash] = true - } - } - - // Include any changes that depend on a Bloom-negative change - const stack = Object.keys(hashesToSend) - while (stack.length > 0) { - const hash : any = stack.pop() - if (dependents[hash]) { - for (const dep of dependents[hash]) { - if (!hashesToSend[dep]) { - hashesToSend[dep] = true - stack.push(dep) - } - } - } - } - - // Include any explicitly requested changes - const changesToSend : any = [] - for (const hash of need) { - hashesToSend[hash] = true - if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? - const change = Backend.getChangeByHash(backend, hash) - if (change) changesToSend.push(change) - } - } - - // Return changes in the order they were returned by getMissingChanges() - for (const change of changes) { - if (hashesToSend[change.hash]) changesToSend.push(change.change) - } - return changesToSend -} - -export function initSyncState() { - return { - sharedHeads: [], - lastSentHeads: [], - theirHeads: null, - theirNeed: null, - theirHave: null, - sentHashes: {}, - } -} - -function compareArrays(a, b) { - return (a.length === b.length) && a.every((v, i) => v === b[i]) -} - -/** - * Given a backend and what we believe to be the state of our peer, generate a message which tells - * them about we have and includes any changes we believe they need - */ -export function generateSyncMessage(backend, syncState) { - if (!backend) { - throw new Error("generateSyncMessage called with no Automerge document") - } - if (!syncState) { - throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") - } - - let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState - const ourHeads = Backend.getHeads(backend) - - // Hashes to explicitly request from the remote peer: any missing dependencies of unapplied - // changes, and any of the remote peer's heads that we don't know about - const ourNeed = Backend.getMissingDeps(backend, theirHeads || []) - - // There are two reasons why ourNeed may be nonempty: 1. we might be missing dependencies due to - // Bloom filter false positives; 2. we might be missing heads that the other peer mentioned - // because they (intentionally) only sent us a subset of changes. In case 1, we leave the `have` - // field of the message empty because we just want to fill in the missing dependencies for now. - // In case 2, or if ourNeed is empty, we send a Bloom filter to request any unsent changes. - let ourHave : any = [] - if (!theirHeads || ourNeed.every(hash => theirHeads.includes(hash))) { - ourHave = [makeBloomFilter(backend, sharedHeads)] - } - - // Fall back to a full re-sync if the sender's last sync state includes hashes - // that we don't know. This could happen if we crashed after the last sync and - // failed to persist changes that the other node already sent us. - if (theirHave && theirHave.length > 0) { - const lastSync = theirHave[0].lastSync - if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) { - // we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need - const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []} - return [syncState, encodeSyncMessage(resetMsg)] - } - } - - // XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size - // these changes should ideally be RLE encoded but we haven't implemented that yet. - let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : [] - - // If the heads are equal, we're in sync and don't need to do anything further - const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) - const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) - if (headsUnchanged && headsEqual && changesToSend.length === 0) { - // no need to send a sync message if we know we're synced! - return [syncState, null] - } - - // TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the - // unnecessary recomputation - changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash]) - - // Regular response to a sync message: send any changes that the other node - // doesn't have. We leave the "have" field empty because the previous message - // generated by `syncStart` already indicated what changes we have. - const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend} - if (changesToSend.length > 0) { - sentHashes = copyObject(sentHashes) - for (const change of changesToSend) { - sentHashes[decodeChangeMeta(change, true).hash] = true - } - } - - syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes}) - return [syncState, encodeSyncMessage(syncMessage)] -} - -/** - * Computes the heads that we share with a peer after we have just received some changes from that - * peer and applied them. This may not be sufficient to bring our heads in sync with the other - * peer's heads, since they may have only sent us a subset of their outstanding changes. - * - * `myOldHeads` are the local heads before the most recent changes were applied, `myNewHeads` are - * the local heads after those changes were applied, and `ourOldSharedHeads` is the previous set of - * shared heads. Applying the changes will have replaced some heads with others, but some heads may - * have remained unchanged (because they are for branches on which no changes have been added). Any - * such unchanged heads remain in the sharedHeads. Any sharedHeads that were replaced by applying - * changes are also replaced as sharedHeads. This is safe because if we received some changes from - * another peer, that means that peer had those changes, and therefore we now both know about them. - */ -function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { - const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head)) - const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head)) - const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort() - return advancedHeads -} - - -/** - * Given a backend, a message message and the state of our peer, apply any changes, update what - * we believe about the peer, and (if there were applied changes) produce a patch for the frontend - */ -export function receiveSyncMessage(backend, oldSyncState, binaryMessage) { - if (!backend) { - throw new Error("generateSyncMessage called with no Automerge document") - } - if (!oldSyncState) { - throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") - } - - let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null - const message = decodeSyncMessage(binaryMessage) - const beforeHeads = Backend.getHeads(backend) - - // If we received changes, we try to apply them to the document. There may still be missing - // dependencies due to Bloom filter false positives, in which case the backend will enqueue the - // changes without applying them. The set of changes may also be incomplete if the sender decided - // to break a large set of changes into chunks. - if (message.changes.length > 0) { - [backend, patch] = Backend.applyChanges(backend, message.changes) - sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads) - } - - // If heads are equal, indicate we don't need to send a response message - if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) { - lastSentHeads = message.heads - } - - // If all of the remote heads are known to us, that means either our heads are equal, or we are - // ahead of the remote peer. In this case, take the remote heads to be our shared heads. - const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head)) - if (knownHeads.length === message.heads.length) { - sharedHeads = message.heads - // If the remote peer has lost all its data, reset our state to perform a full resync - if (message.heads.length === 0) { - lastSentHeads = [] - sentHashes = [] - } - } else { - // If some remote heads are unknown to us, we add all the remote heads we know to - // sharedHeads, but don't remove anything from sharedHeads. This might cause sharedHeads to - // contain some redundant hashes (where one hash is actually a transitive dependency of - // another), but this will be cleared up as soon as we know all the remote heads. - sharedHeads = [...new Set(knownHeads.concat(sharedHeads))].sort() - } - - const syncState = { - sharedHeads, // what we have in common to generate an efficient bloom filter - lastSentHeads, - theirHave: message.have, // the information we need to calculate the changes they need - theirHeads: message.heads, - theirNeed: message.need, - sentHashes - } - return [backend, syncState, patch] -} - -module.exports = { - receiveSyncMessage, generateSyncMessage, - encodeSyncMessage, decodeSyncMessage, - initSyncState, encodeSyncState, decodeSyncState, - BloomFilter // BloomFilter is a private API, exported only for testing purposes -} diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 738289a4..e31f979c 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,8 +1,7 @@ -import { OBJECT_ID } from './constants' -import { isObject } from '../src/common' +import { Value } from "./low_level_api" export class Text { - elems: any[] + elems: Value[] constructor (text?: string | string[]) { //const instance = Object.create(Text.prototype) @@ -21,7 +20,7 @@ export class Text { return this.elems.length } - get (index) : any { + get (index) : Value { return this.elems[index] } @@ -66,8 +65,8 @@ export class Text { * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans: * => ['ab', {x: 3}, 'cd'] */ - toSpans() : any[] { - const spans : any = [] + toSpans() : Value[] { + const spans : Value[] = [] let chars = '' for (const elem of this.elems) { if (typeof elem === 'string') { @@ -97,7 +96,7 @@ export class Text { /** * Updates the list item at position `index` to a new value `value`. */ - set (index: number, value: any) { + set (index: number, value: Value) { this.elems[index] = value } diff --git a/automerge-js/src/uuid.ts b/automerge-js/src/uuid.ts index bc6c4bb1..549b0fc5 100644 --- a/automerge-js/src/uuid.ts +++ b/automerge-js/src/uuid.ts @@ -6,11 +6,16 @@ function defaultFactory() { let factory = defaultFactory -export function uuid() { +interface UUIDFactory extends Function { + setFactory(f: typeof factory); + reset(); +} + +export const uuid : UUIDFactory = () => { return factory() } -// @ts-ignore uuid.setFactory = newFactory => { factory = newFactory } -// @ts-ignore + uuid.reset = () => { factory = defaultFactory } + diff --git a/automerge-js/test/sync_test.ts b/automerge-js/test/sync_test.ts index db5c3bb9..0118776c 100644 --- a/automerge-js/test/sync_test.ts +++ b/automerge-js/test/sync_test.ts @@ -1,6 +1,6 @@ import * as assert from 'assert' import * as Automerge from '../src' -import { BloomFilter } from '../src/sync' +import { BloomFilter } from '../src/bloom' import { decodeChangeMeta } from '../src/columnar' import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" import * as AutomergeWASM from "automerge-wasm" diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index 47f32deb..e4701a62 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -44,10 +44,15 @@ export type Datatype = "text" | "list"; +export type SyncHave { + lastSync: Heads, + bloom: Uint8Array, +} + export type DecodedSyncMessage = { heads: Heads, need: Heads, - have: any[] + have: SyncHave[] changes: Change[] } From fd02585d2ad22d74a959150dce88d66a8696713c Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Wed, 18 May 2022 17:36:09 -0400 Subject: [PATCH 08/17] removed a bunch of lint errors --- automerge-js/src/columnar.ts | 23 ++++--- automerge-js/src/counter.ts | 2 +- automerge-js/src/encoding.ts | 14 ++-- automerge-js/src/index.ts | 34 ++++----- .../src/{low_level_api.ts => low_level.ts} | 0 automerge-js/src/proxies.ts | 69 ++++++++++--------- automerge-js/src/text.ts | 2 +- 7 files changed, 73 insertions(+), 71 deletions(-) rename automerge-js/src/{low_level_api.ts => low_level.ts} (100%) diff --git a/automerge-js/src/columnar.ts b/automerge-js/src/columnar.ts index 54847e12..2560380b 100644 --- a/automerge-js/src/columnar.ts +++ b/automerge-js/src/columnar.ts @@ -541,7 +541,8 @@ export function decoderByColumnId(columnId, buffer) { export function makeDecoders(columns, columnSpec) { const emptyBuf = new Uint8Array(0) - let decoders : any = [], columnIndex = 0, specIndex = 0 + const decoders : any = [] + let columnIndex = 0, specIndex = 0 while (columnIndex < columns.length || specIndex < columnSpec.length) { if (columnIndex === columns.length || @@ -567,10 +568,12 @@ function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) const parsedRows : any = [] while (columns.some(col => !col.decoder.done)) { - let row = {}, col = 0 + const row = {} + let col = 0 while (col < columns.length) { const columnId = columns[col].columnId - let groupId = columnId >> 4, groupCols = 1 + const groupId = columnId >> 4 + let groupCols = 1 while (col + groupCols < columns.length && columns[col + groupCols].columnId >> 4 === groupId) { groupCols++ } @@ -600,7 +603,8 @@ function decodeColumnInfo(decoder) { // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 - let lastColumnId = -1, columns : any = [], numColumns = decoder.readUint53() + let lastColumnId = -1 + const columns : any = [], numColumns = decoder.readUint53() for (let i = 0; i < numColumns; i++) { const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { @@ -827,7 +831,8 @@ function inflateChange(buffer) { * returns an array of subarrays, each subarray containing one change. */ export function splitContainers(buffer) { - let decoder = new Decoder(buffer), chunks : any = [], startOffset = 0 + const decoder = new Decoder(buffer), chunks : any = [] + let startOffset = 0 while (!decoder.done) { decodeContainerHeader(decoder, false) chunks.push(buffer.subarray(startOffset, decoder.offset)) @@ -912,7 +917,7 @@ function groupDocumentOps(changes) { } } - const ops = [] + const ops : any[] = [] for (const objectId of Object.keys(byObjectId).sort(sortOpIds)) { let keys : string[] = [] if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { @@ -930,8 +935,7 @@ function groupDocumentOps(changes) { for (const key of keys) { for (const opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { - const op = byObjectId[objectId][key][opId] - // @ts-ignore + const op : any = byObjectId[objectId][key][opId] if (op.action !== 'del') ops.push(op) } } @@ -1200,7 +1204,8 @@ function inflateColumn(column) { * or false if the property has been deleted. */ function addPatchProperty(objects, property) { - let values : any = {}, counter : any = null + const values : any = {} + let counter : any = null for (const op of property.ops) { // Apply counters and their increments regardless of the number of successor operations if (op.actionName === 'set' && op.value.datatype === 'counter') { diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index 0539af39..34ce211b 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "./low_level_api" +import { Automerge, ObjID, Prop } from "./low_level" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, diff --git a/automerge-js/src/encoding.ts b/automerge-js/src/encoding.ts index e31312ce..773c3288 100644 --- a/automerge-js/src/encoding.ts +++ b/automerge-js/src/encoding.ts @@ -26,11 +26,11 @@ export function hexStringToBytes(value: string) : Uint8Array { if (!/^([0-9a-f][0-9a-f])*$/.test(value)) { throw new RangeError('value is not hexadecimal') } - if (value === '') { + const match = value.match(/../g) + if (match === null) { return new Uint8Array(0) } else { - // @ts-ignore - return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) + return new Uint8Array(match.map(b => parseInt(b, 16))) } } @@ -44,7 +44,8 @@ for (let i = 0; i < 256; i++) { * Converts a Uint8Array into the equivalent hexadecimal string. */ export function bytesToHexString(bytes: Uint8Array) : string { - let hex = '', len = bytes.byteLength + let hex = '' + const len = bytes.byteLength for (let i = 0; i < len; i++) { hex += BYTE_TO_HEX[bytes[i]] } @@ -989,7 +990,8 @@ export class DeltaEncoder extends RLEEncoder { // Copy any null values, and the first non-null value, so that appendValue() computes the // difference between the encoder's last value and the decoder's first (absolute) value. - let value = decoder.readValue(), nulls = 0 + const value = decoder.readValue() + let nulls = 0 this.appendValue(value) if (value === null) { nulls = decoder.count + 1 @@ -1011,9 +1013,7 @@ export class DeltaEncoder extends RLEEncoder { if (remaining !== undefined) remaining -= nulls + 1 const { nonNullValues, sum } = super.copyFrom(decoder, {count: remaining, sumValues: true}) if (nonNullValues > 0) { - // @ts-ignore this.absoluteValue = sum - // @ts-ignore decoder.absoluteValue = sum } } diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index 2885531c..e4fc5e4b 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -14,9 +14,9 @@ export { Text } from "./text" export { Counter } from "./counter" export { Int, Uint, Float64 } from "./numbers" -import { ApiHandler, LowLevelApi, UseApi } from "./low_level_api" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./low_level_api" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "./low_level_api" +import { ApiHandler, LowLevelApi, UseApi } from "./low_level" +import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./low_level" +import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "./low_level" export type ChangeOptions = { message?: string, time?: number } @@ -113,18 +113,13 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc, options: ChangeOptions, callback: ChangeFn(local: Doc, remote: Doc) : Doc { const remoteState = _state(remote) const changes = localState.getChangesAdded(remoteState) localState.applyChanges(changes) - //@ts-ignore - local[HEADS] = heads + Reflect.set(local,HEADS,heads) return rootProxy(localState, true) } @@ -286,8 +278,7 @@ export function applyChanges(doc: Doc, changes: Change[]) : [Doc] { const state = _state(doc) const heads = state.getHeads() state.applyChanges(changes) - //@ts-ignore - doc[HEADS] = heads + Reflect.set(doc,HEADS,heads) return [rootProxy(state, true)]; } @@ -351,8 +342,7 @@ export function receiveSyncMessage(doc: Doc, inState: SyncState, message: const state = _state(doc) const heads = state.getHeads() state.receiveSyncMessage(syncState, message) - //@ts-ignore - doc[HEADS] = heads; + Reflect.set(doc,HEADS,heads) const outState = ApiHandler.exportSyncState(syncState) return [rootProxy(state, true), outState, null]; } diff --git a/automerge-js/src/low_level_api.ts b/automerge-js/src/low_level.ts similarity index 100% rename from automerge-js/src/low_level_api.ts rename to automerge-js/src/low_level.ts diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 38efd7d2..e936af64 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -1,5 +1,5 @@ -import { Automerge, Heads, ObjID } from "./low_level_api" +import { Automerge, Heads, ObjID } from "./low_level" import { Int, Uint, Float64 } from "./numbers" import { Counter, getWriteableCounter } from "./counter" import { Text } from "./text" @@ -98,7 +98,7 @@ function import_value(value) { const MapHandler = { get (target, key) : any { - const { context, objectId, path, readonly, frozen, heads, cache } = target + const { context, objectId, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (key === OBJECT_ID) return objectId if (key === READ_ONLY) return readonly @@ -133,27 +133,30 @@ const MapHandler = { throw new RangeError(`Object property "${key}" cannot be modified`) } switch (datatype) { - case "list": + case "list": { const list = context.putObject(objectId, key, []) const proxyList = listProxy(context, list, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyList[i] = value[i] } - break; - case "text": + break + } + case "text": { const text = context.putObject(objectId, key, "", "text") const proxyText = textProxy(context, text, [ ... path, key ], readonly ); for (let i = 0; i < value.length; i++) { proxyText[i] = value.get(i) } - break; - case "map": + break + } + case "map": { const map = context.putObject(objectId, key, {}) const proxyMap : any = mapProxy(context, map, [ ... path, key ], readonly ); for (const key in value) { proxyMap[key] = value[key] } break; + } default: context.put(objectId, key, value, datatype) } @@ -161,7 +164,7 @@ const MapHandler = { }, deleteProperty (target, key) { - const { context, objectId, path, readonly, frozen } = target + const { context, objectId, readonly } = target target.cache = {} // reset cache on delete if (readonly) { throw new RangeError(`Object property "${key}" cannot be modified`) @@ -176,7 +179,7 @@ const MapHandler = { }, getOwnPropertyDescriptor (target, key) { - const { context, objectId } = target + // const { context, objectId } = target const value = this.get(target, key) if (typeof value !== 'undefined') { return { @@ -194,10 +197,9 @@ const MapHandler = { const ListHandler = { get (target, index) { - const {context, objectId, path, readonly, frozen, heads } = target + const {context, objectId, readonly, frozen, heads } = target index = parseListIndex(index) - // @ts-ignore - if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } + if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (index === OBJECT_ID) return objectId if (index === READ_ONLY) return readonly @@ -249,7 +251,7 @@ const ListHandler = { throw new RangeError(`Object property "${index}" cannot be modified`) } switch (datatype) { - case "list": + case "list": { let list if (index >= context.length(objectId)) { list = context.insertObject(objectId, index, []) @@ -259,7 +261,8 @@ const ListHandler = { const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) break; - case "text": + } + case "text": { let text if (index >= context.length(objectId)) { text = context.insertObject(objectId, index, "", "text") @@ -269,7 +272,8 @@ const ListHandler = { const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) break; - case "map": + } + case "map": { let map if (index >= context.length(objectId)) { map = context.insertObject(objectId, index, {}) @@ -281,6 +285,7 @@ const ListHandler = { proxyMap[key] = value[key] } break; + } default: if (index >= context.length(objectId)) { context.insert(objectId, index, value, datatype) @@ -311,7 +316,7 @@ const ListHandler = { }, getOwnPropertyDescriptor (target, index) { - const {context, objectId, path, readonly, frozen, heads} = target + const {context, objectId, heads} = target if (index === 'length') return {writable: true, value: context.length(objectId, heads) } if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId} @@ -322,12 +327,12 @@ const ListHandler = { return { configurable: true, enumerable: true, value } }, - getPrototypeOf(target) { return Object.getPrototypeOf([]) }, + getPrototypeOf(target) { return Object.getPrototypeOf(target) }, ownKeys (target) : string[] { - const {context, objectId, heads } = target const keys : string[] = [] // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array // but not uncommenting it causes for (i in list) {} to not enumerate values properly + //const {context, objectId, heads } = target //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } keys.push("length"); return keys @@ -337,11 +342,10 @@ const ListHandler = { const TextHandler = Object.assign({}, ListHandler, { get (target, index) { // FIXME this is a one line change from ListHandler.get() - const {context, objectId, path, readonly, frozen, heads } = target + const {context, objectId, readonly, frozen, heads } = target index = parseListIndex(index) if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } - // @ts-ignore - if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } + if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } if (index === OBJECT_ID) return objectId if (index === READ_ONLY) return readonly if (index === FROZEN) return frozen @@ -482,23 +486,26 @@ function listMethods(target) { const values = vals.map((val) => import_value(val)) for (const [value,datatype] of values) { switch (datatype) { - case "list": + case "list": { const list = context.insertObject(objectId, index, []) const proxyList = listProxy(context, list, [ ... path, index ], readonly); proxyList.splice(0,0,...value) break; - case "text": + } + case "text": { const text = context.insertObject(objectId, index, "", "text") const proxyText = textProxy(context, text, [ ... path, index ], readonly); proxyText.splice(0,0,...value) break; - case "map": + } + case "map": { const map = context.insertObject(objectId, index, {}) const proxyMap : any = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] } break; + } default: context.insert(objectId, index, value, datatype) } @@ -563,13 +570,13 @@ function listMethods(target) { 'slice', 'some', 'toLocaleString', 'toString']) { methods[method] = (...args) => { const list : any = [] - while (true) { - const value = valueAt(target, list.length) - if (value == undefined) { - break + let value + do { + value = valueAt(target, list.length) + if (value !== undefined) { + list.push(value) } - list.push(value) - } + } while (value !== undefined) return list[method](...args) } @@ -579,7 +586,7 @@ function listMethods(target) { } function textMethods(target) : any { - const {context, objectId, path, readonly, frozen, heads } = target + const {context, objectId, heads } = target const methods : any = { set (index, value) { return this[index] = value diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index e31f979c..2d568e1c 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,4 +1,4 @@ -import { Value } from "./low_level_api" +import { Value } from "./low_level" export class Text { elems: Value[] From d2fba6bf048169d2757fd786675ddfe3eac11234 Mon Sep 17 00:00:00 2001 From: Scott Trinh Date: Thu, 19 May 2022 09:13:56 -0400 Subject: [PATCH 09/17] Use an `UnknownObject` type alias --- automerge-js/src/common.ts | 8 +++++--- automerge-js/src/types.ts | 2 ++ 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 automerge-js/src/types.ts diff --git a/automerge-js/src/common.ts b/automerge-js/src/common.ts index f8abe8ea..6fc45c7c 100644 --- a/automerge-js/src/common.ts +++ b/automerge-js/src/common.ts @@ -1,4 +1,6 @@ -export function isObject(obj: any) : boolean { +import { UnknownObject } from './types'; + +export function isObject(obj: unknown) : obj is UnknownObject { return typeof obj === 'object' && obj !== null } @@ -6,9 +8,9 @@ export function isObject(obj: any) : boolean { * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`. * https://jsperf.com/cloning-large-objects/1 */ -export function copyObject(obj: any) : any { +export function copyObject(obj: T) : T { if (!isObject(obj)) return {} - const copy : any = {} + const copy = {} for (const key of Object.keys(obj)) { copy[key] = obj[key] } diff --git a/automerge-js/src/types.ts b/automerge-js/src/types.ts new file mode 100644 index 00000000..37443332 --- /dev/null +++ b/automerge-js/src/types.ts @@ -0,0 +1,2 @@ +export type UnknownObject = Record; +export type Dictionary = Record; From bd35361354deedbca245120a64534eeb5da69539 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 22 May 2022 13:28:41 -0400 Subject: [PATCH 10/17] fixed typescript errors, pull wasm dep (mostly) out --- automerge-js/package.json | 6 +- automerge-js/src/columnar.ts | 19 +- automerge-js/src/common.ts | 6 +- automerge-js/src/counter.ts | 2 +- automerge-js/src/encoding.ts | 1 + automerge-js/src/index.ts | 56 +++--- automerge-js/src/low_level.ts | 179 +------------------ automerge-js/src/proxies.ts | 54 +++--- automerge-js/src/text.ts | 10 +- automerge-js/src/types.ts | 16 ++ automerge-js/tsconfig.json | 2 +- automerge-wasm/examples/webpack/package.json | 6 +- automerge-wasm/examples/webpack/src/index.js | 7 +- automerge-wasm/index.d.ts | 65 +++++-- automerge-wasm/nodejs-index.js | 2 +- automerge-wasm/web-index.js | 34 +++- 16 files changed, 197 insertions(+), 268 deletions(-) diff --git a/automerge-js/package.json b/automerge-js/package.json index ac6c5c5a..30dc689a 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -43,7 +43,9 @@ "license": "MIT", "scripts": { "lint": "eslint src", - "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/types.json", + "build": "yarn build-cjs", + "build-cjs": "tsc -p config/cjs.json && tsc -p config/types.json", + "build-mjs": "tsc -p config/mjs.json && tsc -p config/types.json", "test": "ts-mocha -p tsconfig.json test/**/*.ts" }, "devDependencies": { @@ -55,10 +57,10 @@ "eslint": "^8.15.0", "mocha": "^10.0.0", "ts-mocha": "^10.0.0", - "automerge-wasm": "^0.1.3", "typescript": "^4.6.4" }, "dependencies": { + "automerge-wasm": "file:../automerge-wasm", "fast-sha256": "^1.3.0", "pako": "^2.0.4", "uuid": "^8.3" diff --git a/automerge-js/src/columnar.ts b/automerge-js/src/columnar.ts index 2560380b..b1776910 100644 --- a/automerge-js/src/columnar.ts +++ b/automerge-js/src/columnar.ts @@ -1,10 +1,20 @@ import * as pako from 'pako' -import { copyObject, parseOpId, equalBytes } from './common' +import { parseOpId, equalBytes } from './common' import { utf8ToString, hexStringToBytes, bytesToHexString, Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder } from './encoding' + +interface Op { + id: string; + action: string; + obj: string; + elemId?: string; + key?: string; + pred: string[]; +} + // Maybe we should be using the platform's built-in hash implementation? // Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest @@ -133,11 +143,11 @@ function compareParsedOpIds(id1, id2) { function parseAllOpIds(changes, single) { const actors : any = {}, newChanges : any = [] for (let change of changes) { - change = copyObject(change) + change = { ... change } actors[change.actor] = true change.ops = expandMultiOps(change.ops, change.startOp, change.actor) change.ops = change.ops.map(op => { - op = copyObject(op) + op = { ... op } if (op.obj !== '_root') op.obj = parseOpId(op.obj) if (op.elemId && op.elemId !== '_head') op.elemId = parseOpId(op.elemId) if (op.child) op.child = parseOpId(op.child) @@ -962,7 +972,7 @@ function groupChangeOps(changes, ops) { changesByActor[change.actor].push(change) } - const opsById = {} + const opsById : { [key:string]: Op } = {} for (const op of ops) { if (op.action === 'del') throw new RangeError('document should not contain del operations') op.pred = opsById[op.id] ? opsById[op.id].pred : [] @@ -981,7 +991,6 @@ function groupChangeOps(changes, ops) { delete op.succ } for (const op of Object.values(opsById)) { - // @ts-ignore if (op.action === 'del') ops.push(op) } diff --git a/automerge-js/src/common.ts b/automerge-js/src/common.ts index 6fc45c7c..9b5a7299 100644 --- a/automerge-js/src/common.ts +++ b/automerge-js/src/common.ts @@ -8,14 +8,16 @@ export function isObject(obj: unknown) : obj is UnknownObject { * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`. * https://jsperf.com/cloning-large-objects/1 */ +/* export function copyObject(obj: T) : T { - if (!isObject(obj)) return {} - const copy = {} + if (!isObject(obj)) throw RangeError(`Cannot copy object '${obj}'`) //return {} + const copy : UnknownObject = {} for (const key of Object.keys(obj)) { copy[key] = obj[key] } return copy } +*/ /** * Takes a string in the form that is used to identify operations (a counter concatenated diff --git a/automerge-js/src/counter.ts b/automerge-js/src/counter.ts index 34ce211b..97372381 100644 --- a/automerge-js/src/counter.ts +++ b/automerge-js/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "./low_level" +import { Automerge, ObjID, Prop } from "./types" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, diff --git a/automerge-js/src/encoding.ts b/automerge-js/src/encoding.ts index 773c3288..dac447ec 100644 --- a/automerge-js/src/encoding.ts +++ b/automerge-js/src/encoding.ts @@ -287,6 +287,7 @@ export class Encoder { * the buffer constructed by this Encoder. */ finish() { + return } } diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index e4fc5e4b..e20f32a2 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -1,24 +1,20 @@ -import { uuid } from './uuid' - export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" -import { Counter } from "./counter" -import { Text } from "./text" -import { Int, Uint, Float64 } from "./numbers" + import { isObject } from "./common" -export { Text } from "./text" -export { Counter } from "./counter" -export { Int, Uint, Float64 } from "./numbers" +import { Text, Counter } from "./types" +export { Text, Counter, Int, Uint, Float64 } from "./types" import { ApiHandler, LowLevelApi, UseApi } from "./low_level" -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./low_level" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "./low_level" -export type ChangeOptions = { message?: string, time?: number } +import { ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./types" +import { SyncState, SyncMessage, DecodedSyncMessage, AutomergeValue } from "./types" + +export type ChangeOptions = { message?: string, time?: number } export type Doc = { readonly [P in keyof T]: Doc } @@ -78,7 +74,7 @@ export function from(initialState: T | Doc, actor?: ActorId): Doc { return change(init(actor), (d) => Object.assign(d, initialState)) } -export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { +export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { if (typeof options === 'function') { return _change(doc, {}, options) } else if (typeof callback === 'function') { @@ -91,7 +87,7 @@ export function change(doc: Doc, options: string | ChangeOptions | Chan } } -function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { +function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { if (typeof callback !== "function") { @@ -134,7 +130,7 @@ function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn(doc: Doc, options: ChangeOptions) { +export function emptyChange(doc: Doc, options: ChangeOptions) { if (options === undefined) { options = {} } @@ -190,22 +186,20 @@ function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { if (values.length <= 1) { return } - const result = {} - for (const conflict of values) { - const datatype = conflict[0] - const value = conflict[1] - switch (datatype) { + const result : { [key: ObjID]: AutomergeValue } = {} + for (const fullVal of values) { + //const datatype = fullVal[0] + //const value = fullVal[1] + //switch (datatype) { + switch (fullVal[0]) { case "map": - //@ts-ignore - result[value] = mapProxy(context, value, [ prop ], true) + result[fullVal[1]] = mapProxy(context, fullVal[1], [ prop ], true) break; case "list": - //@ts-ignore - result[value] = listProxy(context, value, [ prop ], true) + result[fullVal[1]] = listProxy(context, fullVal[1], [ prop ], true) break; case "text": - //@ts-ignore - result[value] = textProxy(context, value, [ prop ], true) + result[fullVal[1]] = textProxy(context, fullVal[1], [ prop ], true) break; //case "table": //case "cursor": @@ -216,19 +210,16 @@ function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { case "boolean": case "bytes": case "null": - //@ts-ignore - result[conflict[2]] = value + result[fullVal[2]] = fullVal[1] break; case "counter": - //@ts-ignore - result[conflict[2]] = new Counter(value) + result[fullVal[2]] = new Counter(fullVal[1]) break; case "timestamp": - //@ts-ignore - result[conflict[2]] = new Date(value) + result[fullVal[2]] = new Date(fullVal[1]) break; default: - throw RangeError(`datatype ${datatype} unimplemented`) + throw RangeError(`datatype ${fullVal[0]} unimplemented`) } } return result @@ -394,7 +385,6 @@ export function toJS(doc: any) : any { return doc.map((a) => toJS(a)) } if (doc instanceof Text) { - //@ts-ignore return doc.map((a: any) => toJS(a)) } const tmp : any = {} diff --git a/automerge-js/src/low_level.ts b/automerge-js/src/low_level.ts index 27c18c56..5a1277fd 100644 --- a/automerge-js/src/low_level.ts +++ b/automerge-js/src/low_level.ts @@ -1,103 +1,7 @@ -export type Actor = string; -export type ObjID = string; -export type Change = Uint8Array; -export type SyncMessage = Uint8Array; -export type Prop = string | number; -export type Hash = string; -export type Heads = Hash[]; -export type Value = string | number | boolean | null | Date | Uint8Array -export type ObjType = string | Array | Object -export type FullValue = - ["str", string] | - ["int", number] | - ["uint", number] | - ["f64", number] | - ["boolean", boolean] | - ["timestamp", Date] | - ["counter", number] | - ["bytes", Uint8Array] | - ["null", Uint8Array] | - ["map", ObjID] | - ["list", ObjID] | - ["text", ObjID] | - ["table", ObjID] - -export enum ObjTypeName { - list = "list", - map = "map", - table = "table", - text = "text", -} - -export type Datatype = - "boolean" | - "str" | - "int" | - "uint" | - "f64" | - "null" | - "timestamp" | - "counter" | - "bytes" | - "map" | - "text" | - "list"; - -export type SyncHave = { - lastSync: Heads, - bloom: Uint8Array, -} - -export type DecodedSyncMessage = { - heads: Heads, - need: Heads, - have: SyncHave[] - changes: Change[] -} - -export type DecodedChange = { - actor: Actor, - seq: number - startOp: number, - time: number, - message: string | null, - deps: Heads, - hash: Hash, - ops: Op[] -} - -export type Op = { - action: string, - obj: ObjID, - key: string, - value?: string | number | boolean, - datatype?: string, - pred: string[], -} - -export type Patch = { - obj: ObjID - action: 'assign' | 'insert' | 'delete' - key: Prop - value: Value - datatype: Datatype - conflict: boolean -} - -export interface LowLevelApi { - create(actor?: Actor): Automerge; - load(data: Uint8Array, actor?: Actor): Automerge; - encodeChange(change: DecodedChange): Change; - decodeChange(change: Change): DecodedChange; - initSyncState(): SyncState; - encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; - decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; - encodeSyncState(state: SyncState): Uint8Array; - decodeSyncState(data: Uint8Array): SyncState; - exportSyncState(state: SyncState): JsSyncState; - importSyncState(state: JsSyncState): SyncState; -} +import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "automerge-wasm" +import { API as LowLevelApi } from "automerge-wasm" +export { API as LowLevelApi } from "automerge-wasm" export function UseApi(api: LowLevelApi) { for (const k in api) { @@ -105,6 +9,7 @@ export function UseApi(api: LowLevelApi) { } } +/* eslint-disable */ export const ApiHandler : LowLevelApi = { create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, @@ -118,78 +23,4 @@ export const ApiHandler : LowLevelApi = { exportSyncState(state: SyncState): JsSyncState { throw new RangeError("Automerge.use() not called") }, importSyncState(state: JsSyncState): SyncState { throw new RangeError("Automerge.use() not called") }, } - -export interface Automerge { - // change state - put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; - putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; - insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; - insertObject(obj: ObjID, index: number, value: ObjType): ObjID; - push(obj: ObjID, value: Value, datatype?: Datatype): undefined; - pushObject(obj: ObjID, value: ObjType): ObjID; - splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; - increment(obj: ObjID, prop: Prop, value: number): void; - delete(obj: ObjID, prop: Prop): void; - - // returns a single value - if there is a conflict return the winner - get(obj: ObjID, prop: any, heads?: Heads): FullValue | null; - // return all values in case of a conflict - getAll(obj: ObjID, arg: any, heads?: Heads): FullValue[]; - keys(obj: ObjID, heads?: Heads): string[]; - text(obj: ObjID, heads?: Heads): string; - length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads): any; - - // transactions - commit(message?: string, time?: number): Hash; - merge(other: Automerge): Heads; - getActorId(): Actor; - pendingOps(): number; - rollback(): number; - - // patches - enablePatches(enable: boolean): void; - popPatches(): Patch[]; - - // save and load to local store - save(): Uint8Array; - saveIncremental(): Uint8Array; - loadIncremental(data: Uint8Array): number; - - // sync over network - receiveSyncMessage(state: SyncState, message: SyncMessage): void; - generateSyncMessage(state: SyncState): SyncMessage | null; - - // low level change functions - applyChanges(changes: Change[]): void; - getChanges(have_deps: Heads): Change[]; - getChangeByHash(hash: Hash): Change | null; - getChangesAdded(other: Automerge): Change[]; - getHeads(): Heads; - getLastLocalChange(): Change; - getMissingDeps(heads?: Heads): Heads; - - // memory management - free(): void; - clone(actor?: string): Automerge; - fork(actor?: string): Automerge; - forkAt(heads: Heads, actor?: string): Automerge; - - // dump internal state to console.log - dump(): void; - - // dump internal state to a JS object - toJS(): any; -} - -export interface JsSyncState { - lastSentHeads: any; - sentHashes: any; - readonly sharedHeads: any; -} - -export interface SyncState extends JsSyncState { - free(): void; - clone(): SyncState; -} - +/* eslint-enable */ diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index e936af64..05ac2873 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -1,9 +1,10 @@ -import { Automerge, Heads, ObjID } from "./low_level" +import { Automerge, Heads, ObjID } from "./types" import { Int, Uint, Float64 } from "./numbers" import { Counter, getWriteableCounter } from "./counter" import { Text } from "./text" import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } from "./constants" +import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue, Prop } from "./types" function parseListIndex(key) { if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) @@ -17,7 +18,7 @@ function parseListIndex(key) { return key } -function valueAt(target, prop) : any { +function valueAt(target, prop: Prop) : AutomergeValue | undefined { const { context, objectId, path, readonly, heads} = target const value = context.get(objectId, prop, heads) if (value === undefined) { @@ -97,7 +98,7 @@ function import_value(value) { } const MapHandler = { - get (target, key) : any { + get (target, key) : AutomergeValue { const { context, objectId, readonly, frozen, heads, cache } = target if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } if (key === OBJECT_ID) return objectId @@ -151,7 +152,7 @@ const MapHandler = { } case "map": { const map = context.putObject(objectId, key, {}) - const proxyMap : any = mapProxy(context, map, [ ... path, key ], readonly ); + const proxyMap = mapProxy(context, map, [ ... path, key ], readonly ); for (const key in value) { proxyMap[key] = value[key] } @@ -280,7 +281,7 @@ const ListHandler = { } else { map = context.putObject(objectId, index, {}) } - const proxyMap : any = mapProxy(context, map, [ ... path, index ], readonly); + const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] } @@ -328,7 +329,7 @@ const ListHandler = { }, getPrototypeOf(target) { return Object.getPrototypeOf(target) }, - ownKeys (target) : string[] { + ownKeys (/*target*/) : string[] { const keys : string[] = [] // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array // but not uncommenting it causes for (i in list) {} to not enumerate values properly @@ -369,29 +370,30 @@ const TextHandler = Object.assign({}, ListHandler, { return textMethods(target)[index] || listMethods(target)[index] } }, - getPrototypeOf(target) { + getPrototypeOf(/*target*/) { return Object.getPrototypeOf(new Text()) }, }) -export function mapProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : T { +export function mapProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : MapValue { return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) } -export function listProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : Array { +export function listProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : ListValue { const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, ListHandler) } -export function textProxy(context: Automerge, objectId: ObjID, path?: string[], readonly?: boolean, heads?: Heads) : Array { +export function textProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : TextValue { const target = [] Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) return new Proxy(target, TextHandler) } export function rootProxy(context: Automerge, readonly?: boolean) : T { - return mapProxy(context, "_root", [], !!readonly) + /* eslint-disable-next-line */ + return mapProxy(context, "_root", [], !!readonly) } function listMethods(target) { @@ -406,7 +408,7 @@ function listMethods(target) { return this }, - fill(val: any, start: number, end: number) { + fill(val: ScalarValue, start: number, end: number) { // FIXME needs tests const [value, datatype] = import_value(val) start = parseListIndex(start || 0) @@ -417,7 +419,7 @@ function listMethods(target) { return this }, - indexOf(o, start = 0) { + indexOf(/*o, start = 0*/) { // FIXME /* const id = o[OBJECT_ID] @@ -477,10 +479,12 @@ function listMethods(target) { if (readonly) { throw new RangeError("Sequence object cannot be modified outside of a change block") } - const result : any = [] + const result : AutomergeValue[] = [] for (let i = 0; i < del; i++) { const value = valueAt(target, index) - result.push(value) + if (value !== undefined) { + result.push(value) + } context.delete(objectId, index) } const values = vals.map((val) => import_value(val)) @@ -500,7 +504,7 @@ function listMethods(target) { } case "map": { const map = context.insertObject(objectId, index, {}) - const proxyMap : any = mapProxy(context, map, [ ... path, index ], readonly); + const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); for (const key in value) { proxyMap[key] = value[key] } @@ -569,7 +573,7 @@ function listMethods(target) { 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString', 'toString']) { methods[method] = (...args) => { - const list : any = [] + const list : AutomergeValue = [] let value do { value = valueAt(target, list.length) @@ -585,22 +589,22 @@ function listMethods(target) { return methods } -function textMethods(target) : any { +function textMethods(target) { const {context, objectId, heads } = target - const methods : any = { + const methods = { set (index, value) { return this[index] = value }, - get (index) { + get (index) : AutomergeValue { return this[index] }, - toString () { + toString () : string { return context.text(objectId, heads).replace(//g,'') }, - toSpans () : any[] { - const spans : any[] = [] + toSpans () : AutomergeValue[] { + const spans : AutomergeValue[] = [] let chars = '' - const length = this.length + const length = context.length(objectId) for (let i = 0; i < length; i++) { const value = this[i] if (typeof value === 'string') { @@ -618,7 +622,7 @@ function textMethods(target) : any { } return spans }, - toJSON () { + toJSON () : string { return this.toString() } } diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index 2d568e1c..c58c1efa 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -1,4 +1,4 @@ -import { Value } from "./low_level" +import { Value } from "./types" export class Text { elems: Value[] @@ -114,11 +114,17 @@ export class Text { deleteAt(index, numDelete = 1) { this.elems.splice(index, numDelete) } + + map(callback, thisArg?) { + this.elems.map(callback, thisArg) + } + + } // Read-only methods that can delegate to the JavaScript built-in array for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', - 'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', + 'indexOf', 'join', 'lastIndexOf', 'reduce', 'reduceRight', 'slice', 'some', 'toLocaleString']) { Text.prototype[method] = function (...args) { const array = [...this] diff --git a/automerge-js/src/types.ts b/automerge-js/src/types.ts index 37443332..609c71e7 100644 --- a/automerge-js/src/types.ts +++ b/automerge-js/src/types.ts @@ -1,2 +1,18 @@ + +export { Actor as ActorId, Value, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" +export { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" + +export { Text } from "./text" +export { Counter } from "./counter" +export { Int, Uint, Float64 } from "./numbers" + export type UnknownObject = Record; export type Dictionary = Record; + +import { Counter } from "./counter" + +export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array +export type MapValue = { [key: string]: AutomergeValue } +export type ListValue = Array +export type TextValue = Array +export type ScalarValue = string | number | null | boolean | Date | Counter | Uint8Array diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json index b0e2620c..26fa7e8f 100644 --- a/automerge-js/tsconfig.json +++ b/automerge-js/tsconfig.json @@ -2,7 +2,7 @@ "compilerOptions": { "target": "es2016", "sourceMap": false, - "declaration": false, + "declaration": true, "resolveJsonModule": true, "module": "commonjs", "moduleResolution": "node", diff --git a/automerge-wasm/examples/webpack/package.json b/automerge-wasm/examples/webpack/package.json index 5c90319c..2ba64736 100644 --- a/automerge-wasm/examples/webpack/package.json +++ b/automerge-wasm/examples/webpack/package.json @@ -10,12 +10,12 @@ }, "author": "", "dependencies": { - "automerge-wasm": "^0.1.2" + "automerge-wasm": "file:automerge-wasm-0.1.3.tgz" }, "devDependencies": { + "serve": "^13.0.2", "webpack": "^5.72.1", "webpack-cli": "^4.9.2", - "webpack-node-externals": "^3.0.0", - "serve": "^13.0.2" + "webpack-node-externals": "^3.0.0" } } diff --git a/automerge-wasm/examples/webpack/src/index.js b/automerge-wasm/examples/webpack/src/index.js index 8394af50..bab417f5 100644 --- a/automerge-wasm/examples/webpack/src/index.js +++ b/automerge-wasm/examples/webpack/src/index.js @@ -2,10 +2,13 @@ import init, { create } from "automerge-wasm" // hello world code that will run correctly on web or node -init().then(_ => { - const doc = create() +init().then((Automerge) => { + console.log("Automerge=", Automerge) + console.log("create=", create) + const doc = Automerge.create() doc.put("/", "hello", "world") const result = doc.materialize("/") + //const result = xxx if (typeof document !== 'undefined') { // browser diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index e4701a62..ba5cf07d 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -7,7 +7,8 @@ export type Prop = string | number; export type Hash = string; export type Heads = Hash[]; export type Value = string | number | boolean | null | Date | Uint8Array -export type ObjType = string | Array | Object +export type MaterializeValue = Record | Array | Value +export type ObjType = string | Array | Record export type FullValue = ["str", string] | ["int", number] | @@ -17,12 +18,27 @@ export type FullValue = ["timestamp", Date] | ["counter", number] | ["bytes", Uint8Array] | - ["null", Uint8Array] | + ["null", null] | ["map", ObjID] | ["list", ObjID] | ["text", ObjID] | ["table", ObjID] +export type FullValueWithId = + ["str", string, ObjID ] | + ["int", number, ObjID ] | + ["uint", number, ObjID ] | + ["f64", number, ObjID ] | + ["boolean", boolean, ObjID ] | + ["timestamp", Date, ObjID ] | + ["counter", number, ObjID ] | + ["bytes", Uint8Array, ObjID ] | + ["null", null, ObjID ] | + ["map", ObjID ] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] + export enum ObjTypeName { list = "list", map = "map", @@ -44,7 +60,7 @@ export type Datatype = "text" | "list"; -export type SyncHave { +export type SyncHave = { lastSync: Heads, bloom: Uint8Array, } @@ -97,26 +113,40 @@ export function decodeSyncState(data: Uint8Array): SyncState; export function exportSyncState(state: SyncState): JsSyncState; export function importSyncState(state: JsSyncState): SyncState; +export class API { + create(actor?: Actor): Automerge; + load(data: Uint8Array, actor?: Actor): Automerge; + encodeChange(change: DecodedChange): Change; + decodeChange(change: Change): DecodedChange; + initSyncState(): SyncState; + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; + encodeSyncState(state: SyncState): Uint8Array; + decodeSyncState(data: Uint8Array): SyncState; + exportSyncState(state: SyncState): JsSyncState; + importSyncState(state: JsSyncState): SyncState; +} + export class Automerge { // change state - put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; + put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): void; putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; - insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; + insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): void; insertObject(obj: ObjID, index: number, value: ObjType): ObjID; - push(obj: ObjID, value: Value, datatype?: Datatype): undefined; + push(obj: ObjID, value: Value, datatype?: Datatype): void; pushObject(obj: ObjID, value: ObjType): ObjID; splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; increment(obj: ObjID, prop: Prop, value: number): void; delete(obj: ObjID, prop: Prop): void; // returns a single value - if there is a conflict return the winner - get(obj: ObjID, prop: any, heads?: Heads): FullValue | null; + get(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null; // return all values in case of a conflict - getAll(obj: ObjID, arg: any, heads?: Heads): FullValue[]; + getAll(obj: ObjID, arg: Prop, heads?: Heads): FullValueWithId[]; keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads): any; + materialize(obj?: ObjID, heads?: Heads): MaterializeValue; // transactions commit(message?: string, time?: number): Hash; @@ -155,20 +185,23 @@ export class Automerge { // dump internal state to console.log dump(): void; - - // dump internal state to a JS object - toJS(): any; } export class JsSyncState { + sharedHeads: Heads; + lastSentHeads: Heads; + theirHeads: Heads | undefined; + theirHeed: Heads | undefined; + theirHave: SyncHave[] | undefined; + sentHashes: Heads; } export class SyncState { free(): void; clone(): SyncState; - lastSentHeads: any; - sentHashes: any; - readonly sharedHeads: any; + lastSentHeads: Heads; + sentHashes: Heads; + readonly sharedHeads: Heads; } -export default function init (): Promise; +export default function init (): Promise; diff --git a/automerge-wasm/nodejs-index.js b/automerge-wasm/nodejs-index.js index a8b9b1cd..58eddd76 100644 --- a/automerge-wasm/nodejs-index.js +++ b/automerge-wasm/nodejs-index.js @@ -3,4 +3,4 @@ module.exports = wasm module.exports.load = module.exports.loadDoc delete module.exports.loadDoc Object.defineProperty(module.exports, "__esModule", { value: true }); -module.exports.default = () => (new Promise((resolve,reject) => { resolve() })) +module.exports.default = () => (new Promise((resolve,reject) => { resolve(module.exports) })) diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js index 80057798..1ce280b3 100644 --- a/automerge-wasm/web-index.js +++ b/automerge-wasm/web-index.js @@ -11,5 +11,37 @@ export { exportSyncState, importSyncState, } from "./bindgen.js" +import { + loadDoc as load, + create, + encodeChange, + decodeChange, + initSyncState, + encodeSyncMessage, + decodeSyncMessage, + encodeSyncState, + decodeSyncState, + exportSyncState, + importSyncState, +} from "./bindgen.js" + +let api = { + load, + create, + encodeChange, + decodeChange, + initSyncState, + encodeSyncMessage, + decodeSyncMessage, + encodeSyncState, + decodeSyncState, + exportSyncState, + importSyncState +} + import init from "./bindgen.js" -export default init; +export default function() { + return new Promise((resolve,reject) => init().then(() => { + resolve({ ... api, load, create, foo: "bar" }) + })) +} From d638a41a6c960a43c2568fb884ae1dd449d2a69c Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 22 May 2022 13:41:01 -0400 Subject: [PATCH 11/17] record type --- automerge-wasm/index.d.ts | 2 +- automerge-wasm/package.json | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index ba5cf07d..ff94d279 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -8,7 +8,7 @@ export type Hash = string; export type Heads = Hash[]; export type Value = string | number | boolean | null | Date | Uint8Array export type MaterializeValue = Record | Array | Value -export type ObjType = string | Array | Record +export type ObjType = string | Array | Record export type FullValue = ["str", string] | ["int", number] | diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index 7029688c..b214fa81 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -26,16 +26,20 @@ "module": "./web/index.js", "main": "./nodejs/index.js", "scripts": { + "lint": "eslint test", "build": "cross-env PROFILE=dev TARGET=nodejs yarn target", "release": "cross-env PROFILE=release yarn buildall", "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target", "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET && cp $TARGET-index.js $TARGET/index.js", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, - "dependencies": {}, + "dependencies": { + }, "devDependencies": { "@types/expect": "^24.3.0", "@types/jest": "^27.4.0", + "@typescript-eslint/eslint-plugin": "^5.25.0", + "@typescript-eslint/parser": "^5.25.0", "@types/mocha": "^9.1.0", "@types/node": "^17.0.13", "cross-env": "^7.0.3", @@ -44,6 +48,7 @@ "pako": "^2.0.4", "rimraf": "^3.0.2", "ts-mocha": "^9.0.2", - "typescript": "^4.5.5" + "typescript": "^4.5.5", + "eslint": "^8.15.0" } } From 07f5678a2bc578e10f7c6e506742a8fdb8c8b090 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sun, 22 May 2022 13:54:59 -0400 Subject: [PATCH 12/17] linting in wasm --- automerge-wasm/.eslintignore | 3 + automerge-wasm/.eslintrc.cjs | 11 + automerge-wasm/index.d.ts | 4 +- automerge-wasm/package.json | 12 +- automerge-wasm/test/readme.ts | 80 ++++---- automerge-wasm/test/test.ts | 373 +++++++++++++++++----------------- automerge-wasm/tsconfig.json | 3 +- 7 files changed, 251 insertions(+), 235 deletions(-) create mode 100644 automerge-wasm/.eslintignore create mode 100644 automerge-wasm/.eslintrc.cjs diff --git a/automerge-wasm/.eslintignore b/automerge-wasm/.eslintignore new file mode 100644 index 00000000..7cd573e3 --- /dev/null +++ b/automerge-wasm/.eslintignore @@ -0,0 +1,3 @@ +web +nodejs +examples diff --git a/automerge-wasm/.eslintrc.cjs b/automerge-wasm/.eslintrc.cjs new file mode 100644 index 00000000..80e08d55 --- /dev/null +++ b/automerge-wasm/.eslintrc.cjs @@ -0,0 +1,11 @@ +module.exports = { + root: true, + parser: '@typescript-eslint/parser', + plugins: [ + '@typescript-eslint', + ], + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + ], +}; diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts index ff94d279..cfecd081 100644 --- a/automerge-wasm/index.d.ts +++ b/automerge-wasm/index.d.ts @@ -7,8 +7,8 @@ export type Prop = string | number; export type Hash = string; export type Heads = Hash[]; export type Value = string | number | boolean | null | Date | Uint8Array -export type MaterializeValue = Record | Array | Value -export type ObjType = string | Array | Record +export type MaterializeValue = { [key:string]: MaterializeValue } | Array | Value +export type ObjType = string | Array | { [key: string]: ObjType | Value } export type FullValue = ["str", string] | ["int", number] | diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json index b214fa81..f1077fe2 100644 --- a/automerge-wasm/package.json +++ b/automerge-wasm/package.json @@ -26,29 +26,27 @@ "module": "./web/index.js", "main": "./nodejs/index.js", "scripts": { - "lint": "eslint test", + "lint": "eslint test/*.ts", "build": "cross-env PROFILE=dev TARGET=nodejs yarn target", "release": "cross-env PROFILE=release yarn buildall", "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target", "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET && cp $TARGET-index.js $TARGET/index.js", "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" }, - "dependencies": { - }, "devDependencies": { "@types/expect": "^24.3.0", "@types/jest": "^27.4.0", - "@typescript-eslint/eslint-plugin": "^5.25.0", - "@typescript-eslint/parser": "^5.25.0", "@types/mocha": "^9.1.0", "@types/node": "^17.0.13", + "@typescript-eslint/eslint-plugin": "^5.25.0", + "@typescript-eslint/parser": "^5.25.0", "cross-env": "^7.0.3", + "eslint": "^8.16.0", "fast-sha256": "^1.3.0", "mocha": "^9.1.3", "pako": "^2.0.4", "rimraf": "^3.0.2", "ts-mocha": "^9.0.2", - "typescript": "^4.5.5", - "eslint": "^8.15.0" + "typescript": "^4.6.4" } } diff --git a/automerge-wasm/test/readme.ts b/automerge-wasm/test/readme.ts index 5b7ddaf2..d06df0fb 100644 --- a/automerge-wasm/test/readme.ts +++ b/automerge-wasm/test/readme.ts @@ -7,18 +7,18 @@ import init, { create, load } from '..' describe('Automerge', () => { describe('Readme Examples', () => { it('Using the Library and Creating a Document (1)', () => { - let doc = create() + const doc = create() doc.free() }) it('Using the Library and Creating a Document (2)', (done) => { init().then((_:any) => { - let doc = create() + const doc = create() doc.free() done() }) }) it('Automerge Scalar Types (1)', () => { - let doc = create() + const doc = create() doc.put("/", "prop1", 100) // int doc.put("/", "prop2", 3.14) // f64 doc.put("/", "prop3", "hello world") @@ -40,7 +40,7 @@ describe('Automerge', () => { doc.free() }) it('Automerge Scalar Types (2)', () => { - let doc = create() + const doc = create() doc.put("/", "prop1", 100, "int") doc.put("/", "prop2", 100, "uint") doc.put("/", "prop3", 100.5, "f64") @@ -54,37 +54,37 @@ describe('Automerge', () => { doc.free() }) it('Automerge Object Types (1)', () => { - let doc = create() + const doc = create() // you can create an object by passing in the inital state - if blank pass in `{}` // the return value is the Object Id // these functions all return an object id - let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) - let token = doc.putObject("/", "tokens", {}) + const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + const token = doc.putObject("/", "tokens", {}) // lists can be made with javascript arrays - let birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"]) - let bots = doc.putObject("/", "bots", []) + const birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"]) + const bots = doc.putObject("/", "bots", []) // text is initialized with a string - let notes = doc.putObject("/", "notes", "Hello world!") + const notes = doc.putObject("/", "notes", "Hello world!") doc.free() }) it('Automerge Object Types (2)', () => { - let doc = create() + const doc = create() - let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) + const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) doc.put(config, "align", "right") // Anywhere Object Ids are being used a path can also be used. // The following two statements are equivalent: - let id = doc.get("/", "config") + const id = doc.get("/", "config") if (id && id[0] === 'map') { doc.put(id[1], "align", "right") } @@ -98,14 +98,14 @@ describe('Automerge', () => { doc.free() }) it('Maps (1)', () => { - let doc = create() - let mymap = doc.putObject("_root", "mymap", { foo: "bar"}) + const doc = create() + const mymap = doc.putObject("_root", "mymap", { foo: "bar"}) // make a new map with the foo key doc.put(mymap, "bytes", new Uint8Array([1,2,3])) // assign a byte array to key `bytes` of the mymap object - let submap = doc.putObject(mymap, "sub", {}) + const submap = doc.putObject(mymap, "sub", {}) // make a new empty object and assign it to the key `sub` of mymap assert.deepEqual(doc.keys(mymap),["bytes","foo","sub"]) @@ -114,8 +114,8 @@ describe('Automerge', () => { doc.free() }) it('Lists (1)', () => { - let doc = create() - let items = doc.putObject("_root", "items", [10,"box"]) + const doc = create() + const items = doc.putObject("_root", "items", [10,"box"]) // init a new list with two elements doc.push(items, true) // push `true` to the end of the list doc.putObject(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value @@ -130,13 +130,13 @@ describe('Automerge', () => { doc.free() }) it('Text (1)', () => { - let doc = create("aaaaaa") - let notes = doc.putObject("_root", "notes", "Hello world") + const doc = create("aaaaaa") + const notes = doc.putObject("_root", "notes", "Hello world") doc.splice(notes, 6, 5, "everyone") assert.deepEqual(doc.text(notes), "Hello everyone") - let obj = doc.insertObject(notes, 6, { hi: "there" }) + const obj = doc.insertObject(notes, 6, { hi: "there" }) assert.deepEqual(doc.text(notes), "Hello \ufffceveryone") assert.deepEqual(doc.get(notes, 6), ["map", obj]) @@ -145,15 +145,15 @@ describe('Automerge', () => { doc.free() }) it('Querying Data (1)', () => { - let doc1 = create("aabbcc") + const doc1 = create("aabbcc") doc1.put("_root", "key1", "val1") - let key2 = doc1.putObject("_root", "key2", []) + const key2 = doc1.putObject("_root", "key2", []) assert.deepEqual(doc1.get("_root", "key1"), ["str", "val1"]) assert.deepEqual(doc1.get("_root", "key2"), ["list", "2@aabbcc"]) assert.deepEqual(doc1.keys("_root"), ["key1", "key2"]) - let doc2 = doc1.fork("ffaaff") + const doc2 = doc1.fork("ffaaff") // set a value concurrently doc1.put("_root","key3","doc1val") @@ -167,11 +167,11 @@ describe('Automerge', () => { doc1.free(); doc2.free() }) it('Counters (1)', () => { - let doc1 = create("aaaaaa") + const doc1 = create("aaaaaa") doc1.put("_root", "number", 0) doc1.put("_root", "total", 0, "counter") - let doc2 = doc1.fork("bbbbbb") + const doc2 = doc1.fork("bbbbbb") doc2.put("_root", "number", 10) doc2.increment("_root", "total", 11) @@ -185,7 +185,7 @@ describe('Automerge', () => { doc1.free(); doc2.free() }) it('Transactions (1)', () => { - let doc = create() + const doc = create() doc.put("_root", "key", "val1") @@ -209,13 +209,13 @@ describe('Automerge', () => { doc.free() }) it('Viewing Old Versions of the Document (1)', () => { - let doc = create() + const doc = create() doc.put("_root", "key", "val1") - let heads1 = doc.getHeads() + const heads1 = doc.getHeads() doc.put("_root", "key", "val2") - let heads2 = doc.getHeads() + const heads2 = doc.getHeads() doc.put("_root", "key", "val3") @@ -227,10 +227,10 @@ describe('Automerge', () => { doc.free() }) it('Forking And Merging (1)', () => { - let doc1 = create() + const doc1 = create() doc1.put("_root", "key1", "val1") - let doc2 = doc1.fork() + const doc2 = doc1.fork() doc1.put("_root", "key2", "val2") doc2.put("_root", "key3", "val3") @@ -243,31 +243,31 @@ describe('Automerge', () => { doc1.free(); doc2.free() }) it('Saving And Loading (1)', () => { - let doc1 = create() + const doc1 = create() doc1.put("_root", "key1", "value1") - let save1 = doc1.save() + const save1 = doc1.save() - let doc2 = load(save1) + const doc2 = load(save1) doc2.materialize("_root") // returns { key1: "value1" } doc1.put("_root", "key2", "value2") - let saveIncremental = doc1.saveIncremental() + const saveIncremental = doc1.saveIncremental() - let save2 = doc1.save() + const save2 = doc1.save() - let save3 = new Uint8Array([... save1, ... saveIncremental]) + const save3 = new Uint8Array([... save1, ... saveIncremental]) // save2 has fewer bytes than save3 but contains the same ops doc2.loadIncremental(saveIncremental) - let doc3 = load(save2) + const doc3 = load(save2) - let doc4 = load(save3) + const doc4 = load(save3) assert.deepEqual(doc1.materialize("_root"), { key1: "value1", key2: "value2" }) assert.deepEqual(doc2.materialize("_root"), { key1: "value1", key2: "value2" }) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts index e02dde26..ce04d930 100644 --- a/automerge-wasm/test/test.ts +++ b/automerge-wasm/test/test.ts @@ -33,29 +33,29 @@ describe('Automerge', () => { }) it('should create, clone and free', () => { - let doc1 = create() - let doc2 = doc1.clone() + const doc1 = create() + const doc2 = doc1.clone() doc1.free() doc2.free() }) it('should be able to start and commit', () => { - let doc = create() + const doc = create() doc.commit() doc.free() }) it('getting a nonexistant prop does not throw an error', () => { - let doc = create() - let root = "_root" - let result = doc.get(root,"hello") + const doc = create() + const root = "_root" + const result = doc.get(root,"hello") assert.deepEqual(result,undefined) doc.free() }) it('should be able to set and get a simple value', () => { - let doc : Automerge = create("aabbcc") - let root = "_root" + const doc : Automerge = create("aabbcc") + const root = "_root" let result doc.put(root, "hello", "world") @@ -112,22 +112,22 @@ describe('Automerge', () => { }) it('should be able to use bytes', () => { - let doc = create() + const doc = create() doc.put("_root","data1", new Uint8Array([10,11,12])); doc.put("_root","data2", new Uint8Array([13,14,15]), "bytes"); - let value1 = doc.get("_root", "data1") + const value1 = doc.get("_root", "data1") assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); - let value2 = doc.get("_root", "data2") + const value2 = doc.get("_root", "data2") assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); doc.free() }) it('should be able to make sub objects', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" let result - let submap = doc.putObject(root, "submap", {}) + const submap = doc.putObject(root, "submap", {}) doc.put(submap, "number", 6, "uint") assert.strictEqual(doc.pendingOps(),2) @@ -140,10 +140,10 @@ describe('Automerge', () => { }) it('should be able to make lists', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" - let submap = doc.putObject(root, "numbers", []) + const submap = doc.putObject(root, "numbers", []) doc.insert(submap, 0, "a"); doc.insert(submap, 1, "b"); doc.insert(submap, 2, "c"); @@ -163,15 +163,15 @@ describe('Automerge', () => { }) it('lists have insert, set, splice, and push ops', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" - let submap = doc.putObject(root, "letters", []) + const submap = doc.putObject(root, "letters", []) doc.insert(submap, 0, "a"); doc.insert(submap, 0, "b"); assert.deepEqual(doc.materialize(), { letters: ["b", "a" ] }) doc.push(submap, "c"); - let heads = doc.getHeads() + const heads = doc.getHeads() assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c" ] }) doc.push(submap, 3, "timestamp"); assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3) ] }) @@ -187,17 +187,17 @@ describe('Automerge', () => { }) it('should be able delete non-existant props', () => { - let doc = create() + const doc = create() doc.put("_root", "foo","bar") doc.put("_root", "bip","bap") - let hash1 = doc.commit() + const hash1 = doc.commit() assert.deepEqual(doc.keys("_root"),["bip","foo"]) doc.delete("_root", "foo") doc.delete("_root", "baz") - let hash2 = doc.commit() + const hash2 = doc.commit() assert.deepEqual(doc.keys("_root"),["bip"]) assert.deepEqual(doc.keys("_root", [hash1]),["bip", "foo"]) @@ -206,8 +206,8 @@ describe('Automerge', () => { }) it('should be able to del', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" doc.put(root, "xxx", "xxx"); assert.deepEqual(doc.get(root, "xxx"),["str","xxx"]) @@ -217,8 +217,8 @@ describe('Automerge', () => { }) it('should be able to use counters', () => { - let doc = create() - let root = "_root" + const doc = create() + const root = "_root" doc.put(root, "counter", 10, "counter"); assert.deepEqual(doc.get(root, "counter"),["counter",10]) @@ -230,10 +230,10 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - let doc = create() - let root = "_root"; + const doc = create() + const root = "_root"; - let text = doc.putObject(root, "text", ""); + const text = doc.putObject(root, "text", ""); doc.splice(text, 0, 0, "hello ") doc.splice(text, 6, 0, ["w","o","r","l","d"]) doc.splice(text, 11, 0, ["!","?"]) @@ -247,39 +247,39 @@ describe('Automerge', () => { }) it('should be able to insert objects into text', () => { - let doc = create() - let text = doc.putObject("/", "text", "Hello world"); - let obj = doc.insertObject(text, 6, { hello: "world" }); + const doc = create() + const text = doc.putObject("/", "text", "Hello world"); + const obj = doc.insertObject(text, 6, { hello: "world" }); assert.deepEqual(doc.text(text), "Hello \ufffcworld"); assert.deepEqual(doc.get(text, 6), ["map", obj]); assert.deepEqual(doc.get(obj, "hello"), ["str", "world"]); }) it('should be able save all or incrementally', () => { - let doc = create() + const doc = create() doc.put("_root", "foo", 1) - let save1 = doc.save() + const save1 = doc.save() doc.put("_root", "bar", 2) - let saveMidway = doc.clone().save(); + const saveMidway = doc.clone().save(); - let save2 = doc.saveIncremental(); + const save2 = doc.saveIncremental(); doc.put("_root", "baz", 3); - let save3 = doc.saveIncremental(); + const save3 = doc.saveIncremental(); - let saveA = doc.save(); - let saveB = new Uint8Array([... save1, ...save2, ...save3]); + const saveA = doc.save(); + const saveB = new Uint8Array([... save1, ...save2, ...save3]); assert.notDeepEqual(saveA, saveB); - let docA = load(saveA); - let docB = load(saveB); - let docC = load(saveMidway) + const docA = load(saveA); + const docB = load(saveB); + const docC = load(saveMidway) docC.loadIncremental(save3) assert.deepEqual(docA.keys("_root"), docB.keys("_root")); @@ -292,12 +292,12 @@ describe('Automerge', () => { }) it('should be able to splice text', () => { - let doc = create() - let text = doc.putObject("_root", "text", ""); + const doc = create() + const text = doc.putObject("_root", "text", ""); doc.splice(text, 0, 0, "hello world"); - let hash1 = doc.commit(); + const hash1 = doc.commit(); doc.splice(text, 6, 0, "big bad "); - let hash2 = doc.commit(); + const hash2 = doc.commit(); assert.strictEqual(doc.text(text), "hello big bad world") assert.strictEqual(doc.length(text), 19) assert.strictEqual(doc.text(text, [ hash1 ]), "hello world") @@ -308,10 +308,10 @@ describe('Automerge', () => { }) it('local inc increments all visible counters in a map', () => { - let doc1 = create("aaaa") + const doc1 = create("aaaa") doc1.put("_root", "hello", "world") - let doc2 = load(doc1.save(), "bbbb"); - let doc3 = load(doc1.save(), "cccc"); + const doc2 = load(doc1.save(), "bbbb"); + const doc3 = load(doc1.save(), "cccc"); let heads = doc1.getHeads() doc1.put("_root", "cnt", 20) doc2.put("_root", "cnt", 0, "counter") @@ -331,8 +331,8 @@ describe('Automerge', () => { [ 'counter', 15, '2@cccc' ], ]) - let save1 = doc1.save() - let doc4 = load(save1) + const save1 = doc1.save() + const doc4 = load(save1) assert.deepEqual(doc4.save(), save1); doc1.free() doc2.free() @@ -341,11 +341,11 @@ describe('Automerge', () => { }) it('local inc increments all visible counters in a sequence', () => { - let doc1 = create("aaaa") - let seq = doc1.putObject("_root", "seq", []) + const doc1 = create("aaaa") + const seq = doc1.putObject("_root", "seq", []) doc1.insert(seq, 0, "hello") - let doc2 = load(doc1.save(), "bbbb"); - let doc3 = load(doc1.save(), "cccc"); + const doc2 = load(doc1.save(), "bbbb"); + const doc3 = load(doc1.save(), "cccc"); let heads = doc1.getHeads() doc1.put(seq, 0, 20) doc2.put(seq, 0, 0, "counter") @@ -365,8 +365,8 @@ describe('Automerge', () => { [ 'counter', 15, '3@cccc' ], ]) - let save = doc1.save() - let doc4 = load(save) + const save = doc1.save() + const doc4 = load(save) assert.deepEqual(doc4.save(), save); doc1.free() doc2.free() @@ -375,7 +375,7 @@ describe('Automerge', () => { }) it('paths can be used instead of objids', () => { - let doc = create("aaaa") + const doc = create("aaaa") doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar"}, [1,2,3]] }) assert.deepEqual(doc.materialize("/list"), [{ foo: "bar"}, [1,2,3]]) @@ -383,26 +383,26 @@ describe('Automerge', () => { }) it('should be able to fetch changes by hash', () => { - let doc1 = create("aaaa") - let doc2 = create("bbbb") + const doc1 = create("aaaa") + const doc2 = create("bbbb") doc1.put("/","a","b") doc2.put("/","b","c") - let head1 = doc1.getHeads() - let head2 = doc2.getHeads() - let change1 = doc1.getChangeByHash(head1[0]) - let change2 = doc1.getChangeByHash(head2[0]) + const head1 = doc1.getHeads() + const head2 = doc2.getHeads() + const change1 = doc1.getChangeByHash(head1[0]) + const change2 = doc1.getChangeByHash(head2[0]) assert.deepEqual(change2, null) if (change1 === null) { throw new RangeError("change1 should not be null") } assert.deepEqual(decodeChange(change1).hash, head1[0]) }) it('recursive sets are possible', () => { - let doc = create("aaaa") - let l1 = doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) - let l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) - let l3 = doc.putObject("_root","info1","hello world") // 'text' object + const doc = create("aaaa") + const l1 = doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]]) + const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) + const l3 = doc.putObject("_root","info1","hello world") // 'text' object doc.put("_root","info2","hello world") // 'str' - let l4 = doc.putObject("_root","info3","hello world") + const l4 = doc.putObject("_root","info3","hello world") assert.deepEqual(doc.materialize(), { "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], "info1": "hello world", @@ -416,15 +416,15 @@ describe('Automerge', () => { }) it('only returns an object id when objects are created', () => { - let doc = create("aaaa") - let r1 = doc.put("_root","foo","bar") - let r2 = doc.putObject("_root","list",[]) - let r3 = doc.put("_root","counter",10, "counter") - let r4 = doc.increment("_root","counter",1) - let r5 = doc.delete("_root","counter") - let r6 = doc.insert(r2,0,10); - let r7 = doc.insertObject(r2,0,{}); - let r8 = doc.splice(r2,1,0,["a","b","c"]); + const doc = create("aaaa") + const r1 = doc.put("_root","foo","bar") + const r2 = doc.putObject("_root","list",[]) + const r3 = doc.put("_root","counter",10, "counter") + const r4 = doc.increment("_root","counter",1) + const r5 = doc.delete("_root","counter") + const r6 = doc.insert(r2,0,10); + const r7 = doc.insertObject(r2,0,{}); + const r8 = doc.splice(r2,1,0,["a","b","c"]); //let r9 = doc.splice(r2,1,0,["a",[],{},"d"]); assert.deepEqual(r1,null); assert.deepEqual(r2,"2@aaaa"); @@ -439,13 +439,13 @@ describe('Automerge', () => { }) it('objects without properties are preserved', () => { - let doc1 = create("aaaa") - let a = doc1.putObject("_root","a",{}); - let b = doc1.putObject("_root","b",{}); - let c = doc1.putObject("_root","c",{}); - let d = doc1.put(c,"d","dd"); - let saved = doc1.save(); - let doc2 = load(saved); + const doc1 = create("aaaa") + const a = doc1.putObject("_root","a",{}); + const b = doc1.putObject("_root","b",{}); + const c = doc1.putObject("_root","c",{}); + const d = doc1.put(c,"d","dd"); + const saved = doc1.save(); + const doc2 = load(saved); assert.deepEqual(doc2.get("_root","a"),["map",a]) assert.deepEqual(doc2.keys(a),[]) assert.deepEqual(doc2.get("_root","b"),["map",b]) @@ -458,26 +458,26 @@ describe('Automerge', () => { }) it('should allow you to forkAt a heads', () => { - let A = create("aaaaaa") + const A = create("aaaaaa") A.put("/", "key1","val1"); A.put("/", "key2","val2"); - let heads1 = A.getHeads(); - let B = A.fork("bbbbbb") + const heads1 = A.getHeads(); + const B = A.fork("bbbbbb") A.put("/", "key3","val3"); B.put("/", "key4","val4"); A.merge(B) - let heads2 = A.getHeads(); + const heads2 = A.getHeads(); A.put("/", "key5","val5"); assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/",heads1)) assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/",heads2)) }) it('should handle merging text conflicts then saving & loading', () => { - let A = create("aabbcc") - let At = A.putObject('_root', 'text', "") + const A = create("aabbcc") + const At = A.putObject('_root', 'text', "") A.splice(At, 0, 0, 'hello') - let B = A.fork() + const B = A.fork() assert.deepEqual(B.get("_root","text"), [ "text", At]) @@ -488,9 +488,9 @@ describe('Automerge', () => { A.merge(B) - let binary = A.save() + const binary = A.save() - let C = load(binary) + const C = load(binary) assert.deepEqual(C.get('_root', 'text'), ['text', '1@aabbcc']) assert.deepEqual(C.text(At), 'hell! world') @@ -499,7 +499,7 @@ describe('Automerge', () => { describe('patch generation', () => { it('should include root object key updates', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.put('_root', 'hello', 'world') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -511,7 +511,7 @@ describe('Automerge', () => { }) it('should include nested object creation', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', {friday: {robins: 3}}) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -525,7 +525,7 @@ describe('Automerge', () => { }) it('should delete map keys', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.put('_root', 'favouriteBird', 'Robin') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -540,7 +540,7 @@ describe('Automerge', () => { }) it('should include list element insertion', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -554,7 +554,7 @@ describe('Automerge', () => { }) it('should insert nested maps into a list', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', []) doc2.loadIncremental(doc1.saveIncremental()) doc1.insertObject('1@aaaa', 0, {species: 'Goldfinch', count: 3}) @@ -570,7 +570,7 @@ describe('Automerge', () => { }) it('should calculate list indexes based on visible elements', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.loadIncremental(doc1.saveIncremental()) doc1.delete('1@aaaa', 0) @@ -588,9 +588,9 @@ describe('Automerge', () => { }) it('should handle concurrent insertions at the head of a list', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.putObject('_root', 'values', []) - let change1 = doc1.saveIncremental() + const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) @@ -598,7 +598,7 @@ describe('Automerge', () => { doc1.insert('1@aaaa', 1, 'd') doc2.insert('1@aaaa', 0, 'a') doc2.insert('1@aaaa', 1, 'b') - let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) @@ -621,9 +621,9 @@ describe('Automerge', () => { }) it('should handle concurrent insertions beyond the head', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.putObject('_root', 'values', ['a', 'b']) - let change1 = doc1.saveIncremental() + const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) @@ -631,7 +631,7 @@ describe('Automerge', () => { doc1.insert('1@aaaa', 3, 'f') doc2.insert('1@aaaa', 2, 'c') doc2.insert('1@aaaa', 3, 'd') - let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) @@ -654,10 +654,10 @@ describe('Automerge', () => { }) it('should handle conflicts on root object keys', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Goldfinch') - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change1); doc3.loadIncremental(change2) @@ -678,11 +678,11 @@ describe('Automerge', () => { }) it('should handle three-way conflicts', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Chaffinch') doc3.put('_root', 'bird', 'Goldfinch') - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental(), change3 = doc3.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental(), change3 = doc3.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) doc3.enablePatches(true) @@ -717,11 +717,11 @@ describe('Automerge', () => { }) it('should allow a conflict to be resolved', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Chaffinch') doc3.enablePatches(true) - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.loadIncremental(change2); doc3.loadIncremental(change1) doc2.loadIncremental(change1); doc3.loadIncremental(change2) doc1.put('_root', 'bird', 'Goldfinch') @@ -736,12 +736,12 @@ describe('Automerge', () => { }) it('should handle a concurrent map key overwrite and delete', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.put('_root', 'bird', 'Greenfinch') doc2.loadIncremental(doc1.saveIncremental()) doc1.put('_root', 'bird', 'Goldfinch') doc2.delete('_root', 'bird') - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) doc1.loadIncremental(change2) @@ -760,15 +760,15 @@ describe('Automerge', () => { }) it('should handle a conflict on a list element', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.putObject('_root', 'birds', ['Thrush', 'Magpie']) - let change1 = doc1.saveIncremental() + const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) doc1.put('1@aaaa', 0, 'Song Thrush') doc2.put('1@aaaa', 0, 'Redwing') - let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) @@ -789,9 +789,9 @@ describe('Automerge', () => { }) it('should handle a concurrent list element overwrite and delete', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') doc1.putObject('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) - let change1 = doc1.saveIncremental() + const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) doc3.loadIncremental(change1) doc4.loadIncremental(change1) @@ -799,7 +799,7 @@ describe('Automerge', () => { doc1.put('1@aaaa', 1, 'Song Thrush') doc2.put('1@aaaa', 0, 'Ring-necked parakeet') doc2.put('1@aaaa', 2, 'Redwing') - let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() + const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() doc3.enablePatches(true) doc4.enablePatches(true) doc3.loadIncremental(change2); doc3.loadIncremental(change3) @@ -824,12 +824,12 @@ describe('Automerge', () => { }) it('should handle deletion of a conflict value', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') doc1.put('_root', 'bird', 'Robin') doc2.put('_root', 'bird', 'Wren') - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc2.delete('_root', 'bird') - let change3 = doc2.saveIncremental() + const change3 = doc2.saveIncremental() doc3.enablePatches(true) doc3.loadIncremental(change1) doc3.loadIncremental(change2) @@ -848,10 +848,10 @@ describe('Automerge', () => { }) it('should handle conflicting nested objects', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc1.putObject('_root', 'birds', ['Parakeet']) doc2.putObject('_root', 'birds', {'Sparrowhawk': 1}) - let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() + const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() doc1.enablePatches(true) doc2.enablePatches(true) doc1.loadIncremental(change2) @@ -871,7 +871,7 @@ describe('Automerge', () => { it('should support date objects', () => { // FIXME: either use Date objects or use numbers consistently - let doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date() + const doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date() doc1.put('_root', 'createdAt', now.getTime(), 'timestamp') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) @@ -883,7 +883,7 @@ describe('Automerge', () => { }) it('should capture local put ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) doc1.put('_root', 'key1', 1) doc1.put('_root', 'key1', 2) @@ -902,7 +902,7 @@ describe('Automerge', () => { }) it('should capture local insert ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.insert(list, 0, 1) @@ -923,7 +923,7 @@ describe('Automerge', () => { }) it('should capture local push ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.push(list, 1) @@ -940,7 +940,7 @@ describe('Automerge', () => { }) it('should capture local splice ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.splice(list, 0, 0, [1,2,3,4]) @@ -959,7 +959,7 @@ describe('Automerge', () => { }) it('should capture local increment ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) doc1.put('_root', 'counter', 2, 'counter') doc1.increment('_root', 'counter', 4) @@ -973,7 +973,7 @@ describe('Automerge', () => { it('should capture local delete ops', () => { - let doc1 = create('aaaa') + const doc1 = create('aaaa') doc1.enablePatches(true) doc1.put('_root', 'key1', 1) doc1.put('_root', 'key2', 2) @@ -989,7 +989,7 @@ describe('Automerge', () => { }) it('should support counters in a map', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc2.enablePatches(true) doc1.put('_root', 'starlings', 2, 'counter') doc2.loadIncremental(doc1.saveIncremental()) @@ -1004,7 +1004,7 @@ describe('Automerge', () => { }) it('should support counters in a list', () => { - let doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create('aaaa'), doc2 = create('bbbb') doc2.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc2.loadIncremental(doc1.saveIncremental()) @@ -1029,9 +1029,9 @@ describe('Automerge', () => { describe('sync', () => { it('should send a sync message implying no local data', () => { - let doc = create() - let s1 = initSyncState() - let m1 = doc.generateSyncMessage(s1) + const doc = create() + const s1 = initSyncState() + const m1 = doc.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } const message: DecodedSyncMessage = decodeSyncMessage(m1) assert.deepStrictEqual(message.heads, []) @@ -1043,21 +1043,21 @@ describe('Automerge', () => { }) it('should not reply if we have no data as well', () => { - let n1 = create(), n2 = create() - let s1 = initSyncState(), s2 = initSyncState() - let m1 = n1.generateSyncMessage(s1) + const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() + const m1 = n1.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } n2.receiveSyncMessage(s2, m1) - let m2 = n2.generateSyncMessage(s2) + const m2 = n2.generateSyncMessage(s2) assert.deepStrictEqual(m2, null) }) it('repos with equal heads do not need a reply message', () => { - let n1 = create(), n2 = create() - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes - let list = n1.putObject("_root","n", []) + const list = n1.putObject("_root","n", []) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -1067,21 +1067,21 @@ describe('Automerge', () => { assert.deepStrictEqual(n1.materialize(), n2.materialize()) // generate a naive sync message - let m1 = n1.generateSyncMessage(s1) + const m1 = n1.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) // heads are equal so this message should be null n2.receiveSyncMessage(s2, m1) - let m2 = n2.generateSyncMessage(s2) + const m2 = n2.generateSyncMessage(s2) assert.strictEqual(m2, null) }) it('n1 should offer all changes to n2 when starting from nothing', () => { - let n1 = create(), n2 = create() + const n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.putObject("_root","n",[]) + const list = n1.putObject("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list, i, i) @@ -1094,10 +1094,10 @@ describe('Automerge', () => { }) it('should sync peers where one has commits the other does not', () => { - let n1 = create(), n2 = create() + const n1 = create(), n2 = create() // make changes for n1 that n2 should request - let list = n1.putObject("_root","n",[]) + const list = n1.putObject("_root","n",[]) n1.commit("",0) for (let i = 0; i < 10; i++) { n1.insert(list,i,i) @@ -1111,8 +1111,8 @@ describe('Automerge', () => { it('should work with prior sync state', () => { // create & synchronize two nodes - let n1 = create(), n2 = create() - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { n1.put("_root","x",i) @@ -1134,8 +1134,8 @@ describe('Automerge', () => { it('should not generate messages once synced', () => { // create & synchronize two nodes - let n1 = create('abc123'), n2 = create('def456') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('abc123'), n2 = create('def456') + const s1 = initSyncState(), s2 = initSyncState() let message, patch for (let i = 0; i < 5; i++) { @@ -1182,8 +1182,8 @@ describe('Automerge', () => { it('should allow simultaneous messages during synchronization', () => { // create & synchronize two nodes - let n1 = create('abc123'), n2 = create('def456') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('abc123'), n2 = create('def456') + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { n1.put("_root", "x", i) @@ -1261,10 +1261,11 @@ describe('Automerge', () => { }) it('should assume sent changes were recieved until we hear otherwise', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState(), message = null + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() + let message = null - let items = n1.putObject("_root", "items", []) + const items = n1.putObject("_root", "items", []) n1.commit("",0) sync(n1, n2, s1, s2) @@ -1291,8 +1292,8 @@ describe('Automerge', () => { it('should work regardless of who initiates the exchange', () => { // create & synchronize two nodes - let n1 = create(), n2 = create() - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create(), n2 = create() + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { n1.put("_root", "x", i) @@ -1319,8 +1320,8 @@ describe('Automerge', () => { // lastSync is undefined. // create two peers both with divergent commits - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { n1.put("_root","x",i) @@ -1352,7 +1353,7 @@ describe('Automerge', () => { // lastSync is c9. // create two peers both with divergent commits - let n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1381,8 +1382,8 @@ describe('Automerge', () => { }) it('should ensure non-empty state after sync', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 3; i++) { n1.put("_root","x",i) @@ -1400,8 +1401,9 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState() + const s2 = initSyncState() // n1 makes three changes, which we sync to n2 for (let i = 0; i < 3; i++) { @@ -1412,7 +1414,8 @@ describe('Automerge', () => { sync(n1, n2, s1, s2) // save a copy of n2 as "r" to simulate recovering from crash - let r, rSyncState + let r + let rSyncState ;[r, rSyncState] = [n2.clone(), s2.clone()] // sync another few commits @@ -1446,8 +1449,8 @@ describe('Automerge', () => { }) it('should resync after one node experiences data loss without disconnecting', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 for (let i = 0; i < 3; i++) { @@ -1460,7 +1463,7 @@ describe('Automerge', () => { assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) assert.deepStrictEqual(n1.materialize(), n2.materialize()) - let n2AfterDataLoss = create('89abcdef') + const n2AfterDataLoss = create('89abcdef') // "n2" now has no data, but n1 still thinks it does. Note we don't do // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting @@ -1470,8 +1473,8 @@ describe('Automerge', () => { }) it('should handle changes concurrent to the last sync heads', () => { - let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') - let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() // Change 1 is known to all three nodes //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) @@ -1505,7 +1508,7 @@ describe('Automerge', () => { }) it('should handle histories with lots of branching and merging', () => { - let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') n1.put("_root","x",0); n1.commit("",0) n2.applyChanges([n1.getLastLocalChange()]) n3.applyChanges([n1.getLastLocalChange()]) @@ -1526,7 +1529,7 @@ describe('Automerge', () => { n2.applyChanges([change1]) } - let s1 = initSyncState(), s2 = initSyncState() + const s1 = initSyncState(), s2 = initSyncState() sync(n1, n2, s1, s2) // Having n3's last change concurrent to the last sync heads forces us into the slower code path @@ -1652,7 +1655,7 @@ describe('Automerge', () => { assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - let n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + const n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() sync(n1, n3, s13, s31) assert.deepStrictEqual(n1.getHeads(), [n1hash2]) assert.deepStrictEqual(n3.getHeads(), [n1hash2]) @@ -1819,7 +1822,7 @@ describe('Automerge', () => { // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') + const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() let message1, message2, message3 @@ -1889,8 +1892,8 @@ describe('Automerge', () => { }) it('should allow any change to be requested', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() let message = null for (let i = 0; i < 3; i++) { @@ -1917,8 +1920,8 @@ describe('Automerge', () => { }) it('should ignore requests for a nonexistent change', () => { - let n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + const n1 = create('01234567'), n2 = create('89abcdef') + const s1 = initSyncState(), s2 = initSyncState() let message = null for (let i = 0; i < 3; i++) { @@ -1940,7 +1943,7 @@ describe('Automerge', () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') + const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') let s1 = initSyncState(), s2 = initSyncState() let msg, decodedMsg diff --git a/automerge-wasm/tsconfig.json b/automerge-wasm/tsconfig.json index 69ca846b..2627c69b 100644 --- a/automerge-wasm/tsconfig.json +++ b/automerge-wasm/tsconfig.json @@ -13,5 +13,6 @@ "target": "es2016", "typeRoots": ["./index.d.ts"] }, - "exclude": ["dist/**/*"] + "include": ["test/**/*.ts"], + "exclude": ["dist/**/*", "examples/**/*"] } From 3a44ccd52dd7ae08701adb8b02a886ef20439394 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 23 May 2022 18:49:29 +0200 Subject: [PATCH 13/17] clean up lint, simplify package, hand write an index.d.ts --- automerge-js/README.md | 4 +- automerge-js/config/cjs.json | 8 - automerge-js/config/types.json | 10 - automerge-js/examples/webpack/src/index.js | 6 +- automerge-js/package.json | 43 +- automerge-js/src/bloom.ts | 124 ---- automerge-js/src/index.ts | 61 +- automerge-js/src/proxies.ts | 4 +- automerge-js/src/text.ts | 10 +- automerge-js/src/types.ts | 2 +- automerge-js/src/uuid.ts | 4 +- automerge-js/test/helpers.ts | 2 +- .../columnar.ts => test/legacy/columnar.js} | 665 ++++-------------- .../{src/common.ts => test/legacy/common.js} | 36 +- .../encoding.ts => test/legacy/encoding.js} | 80 +-- automerge-js/test/legacy/sync.js | 480 +++++++++++++ automerge-js/test/legacy_tests.ts | 2 +- automerge-js/test/sync_test.ts | 4 +- automerge-js/test/text_test.ts | 3 +- automerge-js/tsconfig.json | 4 +- 20 files changed, 736 insertions(+), 816 deletions(-) delete mode 100644 automerge-js/config/cjs.json delete mode 100644 automerge-js/config/types.json delete mode 100644 automerge-js/src/bloom.ts rename automerge-js/{src/columnar.ts => test/legacy/columnar.js} (62%) rename automerge-js/{src/common.ts => test/legacy/common.js} (66%) rename automerge-js/{src/encoding.ts => test/legacy/encoding.js} (96%) create mode 100644 automerge-js/test/legacy/sync.js diff --git a/automerge-js/README.md b/automerge-js/README.md index 3875e2b1..3c5cde33 100644 --- a/automerge-js/README.md +++ b/automerge-js/README.md @@ -2,7 +2,5 @@ ## Todo 1. write a readme -1. final name for package - to distinguish it from the old one -1. get a index.d.ts you like 1. publish package - +1. make sure the example code works with published packages diff --git a/automerge-js/config/cjs.json b/automerge-js/config/cjs.json deleted file mode 100644 index 890a0422..00000000 --- a/automerge-js/config/cjs.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "target": "es2016", - "module": "commonjs", - "outDir": "../dist/cjs" - } -} diff --git a/automerge-js/config/types.json b/automerge-js/config/types.json deleted file mode 100644 index 3e7cde18..00000000 --- a/automerge-js/config/types.json +++ /dev/null @@ -1,10 +0,0 @@ - -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "declaration": true, - "emitDeclarationOnly": true, - "outFile": "../index.d.ts" - }, - "include": [ "../src/index.ts" ] -} diff --git a/automerge-js/examples/webpack/src/index.js b/automerge-js/examples/webpack/src/index.js index 7d0b8371..876c1940 100644 --- a/automerge-js/examples/webpack/src/index.js +++ b/automerge-js/examples/webpack/src/index.js @@ -1,8 +1,10 @@ -import init, * as Automerge from "automerge-js" +import * as Automerge from "automerge-js" +import init from "automerge-wasm" // hello world code that will run correctly on web or node -init().then(_ => { +init().then((api) => { + Automerge.use(api) let doc = Automerge.init() doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") const result = JSON.stringify(doc) diff --git a/automerge-js/package.json b/automerge-js/package.json index 30dc689a..728ff970 100644 --- a/automerge-js/package.json +++ b/automerge-js/package.json @@ -13,40 +13,23 @@ "LICENSE", "package.json", "index.d.ts", - "package.json", - "index.d.ts", - "dist/mjs/constants.js", - "dist/mjs/numbers.js", - "dist/mjs/sync.js", - "dist/mjs/index.js", - "dist/mjs/encoding.js", - "dist/mjs/columnar.js", - "dist/mjs/uuid.js", - "dist/mjs/counter.js", - "dist/mjs/common.js", - "dist/mjs/text.js", - "dist/mjs/proxies.js", - "dist/cjs/constants.js", - "dist/cjs/numbers.js", - "dist/cjs/sync.js", - "dist/cjs/index.js", - "dist/cjs/encoding.js", - "dist/cjs/columnar.js", - "dist/cjs/uuid.js", - "dist/cjs/counter.js", - "dist/cjs/common.js", - "dist/cjs/text.js", - "dist/cjs/proxies.js" + "dist/constants.js", + "dist/types.js", + "dist/numbers.js", + "dist/index.js", + "dist/uuid.js", + "dist/counter.js", + "dist/low_level.js", + "dist/text.js", + "dist/proxies.js" ], - "module": "./dist/mjs/index.js", - "main": "./dist/cjs/index.js", + "types": "index.d.ts", + "main": "./dist/index.js", "license": "MIT", "scripts": { "lint": "eslint src", - "build": "yarn build-cjs", - "build-cjs": "tsc -p config/cjs.json && tsc -p config/types.json", - "build-mjs": "tsc -p config/mjs.json && tsc -p config/types.json", - "test": "ts-mocha -p tsconfig.json test/**/*.ts" + "build": "tsc", + "test": "ts-mocha test/*.ts" }, "devDependencies": { "@types/expect": "^24.3.0", diff --git a/automerge-js/src/bloom.ts b/automerge-js/src/bloom.ts deleted file mode 100644 index cb66466a..00000000 --- a/automerge-js/src/bloom.ts +++ /dev/null @@ -1,124 +0,0 @@ -/** - * Implementation of the data synchronisation protocol that brings a local and a remote document - * into the same state. This is typically used when two nodes have been disconnected for some time, - * and need to exchange any changes that happened while they were disconnected. The two nodes that - * are syncing could be client and server, or server and client, or two peers with symmetric roles. - * - * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual - * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472 - * - * The protocol assumes that every time a node successfully syncs with another node, it remembers - * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The - * next time we try to sync with the same node, we start from the assumption that the other node's - * document version is no older than the outcome of the last sync, so we only need to exchange any - * changes that are more recent than the last sync. This assumption may not be true if the other - * node did not correctly persist its state (perhaps it crashed before writing the result of the - * last sync to disk), and we fall back to sending the entire document in this case. - */ - -import { hexStringToBytes, Encoder, Decoder } from './encoding' - -// These constants correspond to a 1% false positive rate. The values can be changed without -// breaking compatibility of the network protocol, since the parameters used for a particular -// Bloom filter are encoded in the wire format. -const BITS_PER_ENTRY = 10, NUM_PROBES = 7 - -/** - * A Bloom filter implementation that can be serialised to a byte array for transmission - * over a network. The entries that are added are assumed to already be SHA-256 hashes, - * so this implementation does not perform its own hashing. - */ -export class BloomFilter { - numEntries: number; - numBitsPerEntry: number; - numProbes: number; - bits: Uint8Array; - - constructor (arg) { - if (Array.isArray(arg)) { - // arg is an array of SHA256 hashes in hexadecimal encoding - this.numEntries = arg.length - this.numBitsPerEntry = BITS_PER_ENTRY - this.numProbes = NUM_PROBES - this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - for (const hash of arg) this.addHash(hash) - } else if (arg instanceof Uint8Array) { - if (arg.byteLength === 0) { - this.numEntries = 0 - this.numBitsPerEntry = 0 - this.numProbes = 0 - this.bits = arg - } else { - const decoder = new Decoder(arg) - this.numEntries = decoder.readUint32() - this.numBitsPerEntry = decoder.readUint32() - this.numProbes = decoder.readUint32() - this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) - } - } else { - throw new TypeError('invalid argument') - } - } - - /** - * Returns the Bloom filter state, encoded as a byte array. - */ - get bytes() { - if (this.numEntries === 0) return new Uint8Array(0) - const encoder = new Encoder() - encoder.appendUint32(this.numEntries) - encoder.appendUint32(this.numBitsPerEntry) - encoder.appendUint32(this.numProbes) - encoder.appendRawBytes(this.bits) - return encoder.buffer - } - - /** - * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits - * in the Bloom filter need to be tested or set for this particular entry. We do this by - * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers, - * and then using triple hashing to compute the probe indexes. The algorithm comes from: - * - * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification. - * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004. - * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf - */ - getProbes(hash) { - const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength - if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) - // on the next three lines, the right shift means interpret value as unsigned - let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo - let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo - const z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo - const probes = [x] - for (let i = 1; i < this.numProbes; i++) { - x = (x + y) % modulo - y = (y + z) % modulo - probes.push(x) - } - return probes - } - - /** - * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). - */ - addHash(hash) { - for (const probe of this.getProbes(hash)) { - this.bits[probe >>> 3] |= 1 << (probe & 7) - } - } - - /** - * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter. - */ - containsHash(hash) { - if (this.numEntries === 0) return false - for (const probe of this.getProbes(hash)) { - if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { - return false - } - } - return true - } -} - diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts index e20f32a2..02f864b1 100644 --- a/automerge-js/src/index.ts +++ b/automerge-js/src/index.ts @@ -4,14 +4,12 @@ export { uuid } from './uuid' import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" -import { isObject } from "./common" - -import { Text, Counter } from "./types" +import { Counter } from "./types" export { Text, Counter, Int, Uint, Float64 } from "./types" import { ApiHandler, LowLevelApi, UseApi } from "./low_level" -import { ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "./types" +import { ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "./types" import { SyncState, SyncMessage, DecodedSyncMessage, AutomergeValue } from "./types" export type ChangeOptions = { message?: string, time?: number } @@ -30,7 +28,7 @@ export function use(api: LowLevelApi) { } function _state(doc: Doc) : Automerge { - const state = (doc)[STATE] + const state = Reflect.get(doc,STATE) if (state == undefined) { throw new RangeError("must be the document root") } @@ -38,19 +36,19 @@ function _state(doc: Doc) : Automerge { } function _frozen(doc: Doc) : boolean { - return (doc)[FROZEN] === true + return Reflect.get(doc,FROZEN) === true } function _heads(doc: Doc) : Heads | undefined { - return (doc)[HEADS] + return Reflect.get(doc,HEADS) } function _obj(doc: Doc) : ObjID { - return (doc)[OBJECT_ID] + return Reflect.get(doc,OBJECT_ID) } function _readonly(doc: Doc) : boolean { - return (doc)[READ_ONLY] === true + return Reflect.get(doc,READ_ONLY) === true } export function init(actor?: ActorId) : Doc{ @@ -181,16 +179,15 @@ export function getActorId(doc: Doc) : ActorId { return state.getActorId() } -function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { +type Conflicts = { [key: string]: AutomergeValue } + +function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : Conflicts | undefined { const values = context.getAll(objectId, prop) if (values.length <= 1) { return } - const result : { [key: ObjID]: AutomergeValue } = {} + const result : Conflicts = {} for (const fullVal of values) { - //const datatype = fullVal[0] - //const value = fullVal[1] - //switch (datatype) { switch (fullVal[0]) { case "map": result[fullVal[1]] = mapProxy(context, fullVal[1], [ prop ], true) @@ -225,7 +222,7 @@ function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : any { return result } -export function getConflicts(doc: Doc, prop: Prop) : any { +export function getConflicts(doc: Doc, prop: Prop) : Conflicts | undefined { const state = _state(doc) const objectId = _obj(doc) return conflictAt(state, objectId, prop) @@ -274,7 +271,6 @@ export function applyChanges(doc: Doc, changes: Change[]) : [Doc] { } export function getHistory(doc: Doc) : State[] { - const actor = getActorId(doc) const history = getAllChanges(doc) return history.map((change, index) => ({ get change () { @@ -289,7 +285,7 @@ export function getHistory(doc: Doc) : State[] { } // FIXME : no tests -export function equals(val1: any, val2: any) : boolean { +export function equals(val1: unknown, val2: unknown) : boolean { if (!isObject(val1) || !isObject(val2)) return val1 === val2 const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() if (keys1.length !== keys2.length) return false @@ -373,27 +369,14 @@ export function dump(doc: Doc) { state.dump() } -export function toJS(doc: any) : any { - if (typeof doc === "object") { - if (doc instanceof Uint8Array) { - return doc - } - if (doc === null) { - return doc - } - if (doc instanceof Array) { - return doc.map((a) => toJS(a)) - } - if (doc instanceof Text) { - return doc.map((a: any) => toJS(a)) - } - const tmp : any = {} - for (const index in doc) { - tmp[index] = toJS(doc[index]) - } - return tmp - } else { - return doc - } +// FIXME - return T? +export function toJS(doc: Doc) : MaterializeValue { + let state = _state(doc) + let heads = _heads(doc) + return state.materialize("_root", heads) } + +function isObject(obj: unknown) : obj is Record { + return typeof obj === 'object' && obj !== null +} diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts index 05ac2873..fbb044a6 100644 --- a/automerge-js/src/proxies.ts +++ b/automerge-js/src/proxies.ts @@ -592,10 +592,10 @@ function listMethods(target) { function textMethods(target) { const {context, objectId, heads } = target const methods = { - set (index, value) { + set (index: number, value) { return this[index] = value }, - get (index) : AutomergeValue { + get (index: number) : AutomergeValue { return this[index] }, toString () : string { diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts index c58c1efa..26f4a861 100644 --- a/automerge-js/src/text.ts +++ b/automerge-js/src/text.ts @@ -20,7 +20,7 @@ export class Text { return this.elems.length } - get (index) : Value { + get (index: number) : Value { return this.elems[index] } @@ -103,7 +103,7 @@ export class Text { /** * Inserts new list items `values` starting at position `index`. */ - insertAt(index: number, ...values) { + insertAt(index: number, ...values: Value[]) { this.elems.splice(index, 0, ... values) } @@ -111,12 +111,12 @@ export class Text { * Deletes `numDelete` list items starting at position `index`. * if `numDelete` is not given, one item is deleted. */ - deleteAt(index, numDelete = 1) { + deleteAt(index: number, numDelete = 1) { this.elems.splice(index, numDelete) } - map(callback, thisArg?) { - this.elems.map(callback, thisArg) + map(callback: (e: Value) => T) { + this.elems.map(callback) } diff --git a/automerge-js/src/types.ts b/automerge-js/src/types.ts index 609c71e7..5fb63abd 100644 --- a/automerge-js/src/types.ts +++ b/automerge-js/src/types.ts @@ -1,5 +1,5 @@ -export { Actor as ActorId, Value, Prop, ObjID, Change, DecodedChange, Heads, Automerge } from "automerge-wasm" +export { Actor as ActorId, Value, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-wasm" export { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm" export { Text } from "./text" diff --git a/automerge-js/src/uuid.ts b/automerge-js/src/uuid.ts index 549b0fc5..5ddb5ae6 100644 --- a/automerge-js/src/uuid.ts +++ b/automerge-js/src/uuid.ts @@ -7,8 +7,8 @@ function defaultFactory() { let factory = defaultFactory interface UUIDFactory extends Function { - setFactory(f: typeof factory); - reset(); + setFactory(f: typeof factory): void; + reset(): void; } export const uuid : UUIDFactory = () => { diff --git a/automerge-js/test/helpers.ts b/automerge-js/test/helpers.ts index 76cae7d6..d5292130 100644 --- a/automerge-js/test/helpers.ts +++ b/automerge-js/test/helpers.ts @@ -1,5 +1,5 @@ import * as assert from 'assert' -import { Encoder } from '../src/encoding' +import { Encoder } from './legacy/encoding' // Assertion that succeeds if the first argument deepStrictEquals at least one of the // subsequent arguments (but we don't care which one) diff --git a/automerge-js/src/columnar.ts b/automerge-js/test/legacy/columnar.js similarity index 62% rename from automerge-js/src/columnar.ts rename to automerge-js/test/legacy/columnar.js index b1776910..b97e6275 100644 --- a/automerge-js/src/columnar.ts +++ b/automerge-js/test/legacy/columnar.js @@ -1,19 +1,9 @@ -import * as pako from 'pako' -import { parseOpId, equalBytes } from './common' -import { +const pako = require('pako') +const { copyObject, parseOpId, equalBytes } = require('./common') +const { utf8ToString, hexStringToBytes, bytesToHexString, Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder -} from './encoding' - - -interface Op { - id: string; - action: string; - obj: string; - elemId?: string; - key?: string; - pred: string[]; -} +} = require('./encoding') // Maybe we should be using the platform's built-in hash implementation? // Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have @@ -28,7 +18,7 @@ interface Op { // - It does not need a secure source of random bits and does not need to be // constant-time; // - I have reviewed the source code and it seems pretty reasonable. -import { Hash } from 'fast-sha256' +const { Hash } = require('fast-sha256') // These bytes don't mean anything, they were generated randomly const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83]) @@ -42,7 +32,7 @@ const CHUNK_TYPE_DEFLATE = 2 // like CHUNK_TYPE_CHANGE but with DEFLATE compress const DEFLATE_MIN_SIZE = 256 // The least-significant 3 bits of a columnId indicate its datatype -export const COLUMN_TYPE = { +const COLUMN_TYPE = { GROUP_CARD: 0, ACTOR_ID: 1, INT_RLE: 2, INT_DELTA: 3, BOOLEAN: 4, STRING_RLE: 5, VALUE_LEN: 6, VALUE_RAW: 7 } @@ -53,15 +43,15 @@ const COLUMN_TYPE_DEFLATE = 8 // In the values in a column of type VALUE_LEN, the bottom four bits indicate the type of the value, // one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the // associated VALUE_RAW column (in bytes). -export const VALUE_TYPE = { +const VALUE_TYPE = { NULL: 0, FALSE: 1, TRUE: 2, LEB128_UINT: 3, LEB128_INT: 4, IEEE754: 5, UTF8: 6, BYTES: 7, COUNTER: 8, TIMESTAMP: 9, MIN_UNKNOWN: 10, MAX_UNKNOWN: 15 } // make* actions must be at even-numbered indexes in this list -export const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] +const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] -export const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} +const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} const COMMON_COLUMNS = [ {columnName: 'objActor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, @@ -79,13 +69,13 @@ const COMMON_COLUMNS = [ {columnName: 'chldCtr', columnId: 6 << 4 | COLUMN_TYPE.INT_DELTA} ] -export const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ +const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ {columnName: 'predNum', columnId: 7 << 4 | COLUMN_TYPE.GROUP_CARD}, {columnName: 'predActor', columnId: 7 << 4 | COLUMN_TYPE.ACTOR_ID}, {columnName: 'predCtr', columnId: 7 << 4 | COLUMN_TYPE.INT_DELTA} ]) -export const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ +const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ {columnName: 'succNum', columnId: 8 << 4 | COLUMN_TYPE.GROUP_CARD}, {columnName: 'succActor', columnId: 8 << 4 | COLUMN_TYPE.ACTOR_ID}, {columnName: 'succCtr', columnId: 8 << 4 | COLUMN_TYPE.INT_DELTA} @@ -141,13 +131,13 @@ function compareParsedOpIds(id1, id2) { * false. */ function parseAllOpIds(changes, single) { - const actors : any = {}, newChanges : any = [] + const actors = {}, newChanges = [] for (let change of changes) { - change = { ... change } + change = copyObject(change) actors[change.actor] = true change.ops = expandMultiOps(change.ops, change.startOp, change.actor) change.ops = change.ops.map(op => { - op = { ... op } + op = copyObject(op) if (op.obj !== '_root') op.obj = parseOpId(op.obj) if (op.elemId && op.elemId !== '_head') op.elemId = parseOpId(op.elemId) if (op.child) op.child = parseOpId(op.child) @@ -155,7 +145,7 @@ function parseAllOpIds(changes, single) { if (op.obj.actorId) actors[op.obj.actorId] = true if (op.elemId && op.elemId.actorId) actors[op.elemId.actorId] = true if (op.child && op.child.actorId) actors[op.child.actorId] = true - for (const pred of op.pred) actors[pred.actorId] = true + for (let pred of op.pred) actors[pred.actorId] = true return op }) newChanges.push(change) @@ -165,10 +155,10 @@ function parseAllOpIds(changes, single) { if (single) { actorIds = [changes[0].actor].concat(actorIds.filter(actor => actor !== changes[0].actor)) } - for (const change of newChanges) { + for (let change of newChanges) { change.actorNum = actorIds.indexOf(change.actor) for (let i = 0; i < change.ops.length; i++) { - const op = change.ops[i] + let op = change.ops[i] op.id = {counter: change.startOp + i, actorNum: change.actorNum, actorId: change.actor} op.obj = actorIdToActorNum(op.obj, actorIds) op.elemId = actorIdToActorNum(op.elemId, actorIds) @@ -232,21 +222,34 @@ function encodeOperationAction(op, columns) { } /** - * Encodes the integer `value` into the two columns `valLen` and `valRaw`, - * with the datatype tag set to `typeTag`. If `typeTag` is zero, it is set - * automatically to signed or unsigned depending on the sign of the value. - * Values with non-zero type tags are always encoded as signed integers. + * Given the datatype for a number, determine the typeTag and the value to encode + * otherwise guess */ -function encodeInteger(value, typeTag, columns) { - let numBytes - if (value < 0 || typeTag > 0) { - numBytes = columns.valRaw.appendInt53(value) - if (!typeTag) typeTag = VALUE_TYPE.LEB128_INT - } else { - numBytes = columns.valRaw.appendUint53(value) - typeTag = VALUE_TYPE.LEB128_UINT +function getNumberTypeAndValue(op) { + switch (op.datatype) { + case "counter": + return [ VALUE_TYPE.COUNTER, op.value ] + case "timestamp": + return [ VALUE_TYPE.TIMESTAMP, op.value ] + case "uint": + return [ VALUE_TYPE.LEB128_UINT, op.value ] + case "int": + return [ VALUE_TYPE.LEB128_INT, op.value ] + case "float64": { + const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + view64.setFloat64(0, op.value, true) + return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ] + } + default: + // increment operators get resolved here ... + if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) { + return [ VALUE_TYPE.LEB128_INT, op.value ] + } else { + const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + view64.setFloat64(0, op.value, true) + return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ] + } } - columns.valLen.appendValue(numBytes << 4 | typeTag) } /** @@ -266,33 +269,23 @@ function encodeValue(op, columns) { } else if (ArrayBuffer.isView(op.value)) { const numBytes = columns.valRaw.appendRawBytes(new Uint8Array(op.value.buffer)) columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.BYTES) - } else if (op.datatype === 'counter' && typeof op.value === 'number') { - encodeInteger(op.value, VALUE_TYPE.COUNTER, columns) - } else if (op.datatype === 'timestamp' && typeof op.value === 'number') { - encodeInteger(op.value, VALUE_TYPE.TIMESTAMP, columns) + } else if (typeof op.value === 'number') { + let [typeTag, value] = getNumberTypeAndValue(op) + let numBytes + if (typeTag === VALUE_TYPE.LEB128_UINT) { + numBytes = columns.valRaw.appendUint53(value) + } else if (typeTag === VALUE_TYPE.IEEE754) { + numBytes = columns.valRaw.appendRawBytes(value) + } else { + numBytes = columns.valRaw.appendInt53(value) + } + columns.valLen.appendValue(numBytes << 4 | typeTag) } else if (typeof op.datatype === 'number' && op.datatype >= VALUE_TYPE.MIN_UNKNOWN && op.datatype <= VALUE_TYPE.MAX_UNKNOWN && op.value instanceof Uint8Array) { const numBytes = columns.valRaw.appendRawBytes(op.value) columns.valLen.appendValue(numBytes << 4 | op.datatype) } else if (op.datatype) { throw new RangeError(`Unknown datatype ${op.datatype} for value ${op.value}`) - } else if (typeof op.value === 'number') { - if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) { - encodeInteger(op.value, 0, columns) - } else { - // Encode number in 32-bit float if this can be done without loss of precision - const buf32 = new ArrayBuffer(4), view32 = new DataView(buf32) - view32.setFloat32(0, op.value, true) // true means little-endian - if (view32.getFloat32(0, true) === op.value) { - columns.valRaw.appendRawBytes(new Uint8Array(buf32)) - columns.valLen.appendValue(4 << 4 | VALUE_TYPE.IEEE754) - } else { - const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) - view64.setFloat64(0, op.value, true) // true means little-endian - columns.valRaw.appendRawBytes(new Uint8Array(buf64)) - columns.valLen.appendValue(8 << 4 | VALUE_TYPE.IEEE754) - } - } } else { throw new RangeError(`Unsupported value in operation: ${op.value}`) } @@ -304,7 +297,7 @@ function encodeValue(op, columns) { * form `{value: value, datatype: datatypeTag}` where `value` is a JavaScript primitive datatype * corresponding to the value, and `datatypeTag` is a datatype annotation such as 'counter'. */ -export function decodeValue(sizeTag, bytes) { +function decodeValue(sizeTag, bytes) { if (sizeTag === VALUE_TYPE.NULL) { return {value: null} } else if (sizeTag === VALUE_TYPE.FALSE) { @@ -315,15 +308,13 @@ export function decodeValue(sizeTag, bytes) { return {value: utf8ToString(bytes)} } else { if (sizeTag % 16 === VALUE_TYPE.LEB128_UINT) { - return {value: new Decoder(bytes).readUint53()} + return {value: new Decoder(bytes).readUint53(), datatype: "uint"} } else if (sizeTag % 16 === VALUE_TYPE.LEB128_INT) { - return {value: new Decoder(bytes).readInt53()} + return {value: new Decoder(bytes).readInt53(), datatype: "int"} } else if (sizeTag % 16 === VALUE_TYPE.IEEE754) { const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength) - if (bytes.byteLength === 4) { - return {value: view.getFloat32(0, true)} // true means little-endian - } else if (bytes.byteLength === 8) { - return {value: view.getFloat64(0, true)} + if (bytes.byteLength === 8) { + return {value: view.getFloat64(0, true), datatype: "float64"} } else { throw new RangeError(`Invalid length for floating point number: ${bytes.byteLength}`) } @@ -373,11 +364,11 @@ function decodeValueColumns(columns, colIndex, actorIds, result) { * Encodes an array of operations in a set of columns. The operations need to * be parsed with `parseAllOpIds()` beforehand. If `forDocument` is true, we use * the column structure of a whole document, otherwise we use the column - * structure for an individual change. Returns an array of `{id, name, encoder}` - * objects. + * structure for an individual change. Returns an array of + * `{columnId, columnName, encoder}` objects. */ function encodeOps(ops, forDocument) { - const columns : any = { + const columns = { objActor : new RLEEncoder('uint'), objCtr : new RLEEncoder('uint'), keyActor : new RLEEncoder('uint'), @@ -403,7 +394,7 @@ function encodeOps(ops, forDocument) { columns.predActor = new RLEEncoder('uint') } - for (const op of ops) { + for (let op of ops) { encodeObjectId(op, columns) encodeOperationKey(op, columns) columns.insert.appendValue(!!op.insert) @@ -437,22 +428,32 @@ function encodeOps(ops, forDocument) { } } - const columnList : any = [] - for (const {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { - if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]}) + let columnList = [] + for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { + if (columns[columnName]) columnList.push({columnId, columnName, encoder: columns[columnName]}) + } + return columnList.sort((a, b) => a.columnId - b.columnId) +} + +function validDatatype(value, datatype) { + if (datatype === undefined) { + return (typeof value === 'string' || typeof value === 'boolean' || value === null) + } else { + return typeof value === 'number' } - return columnList.sort((a, b) => a.id - b.id) } function expandMultiOps(ops, startOp, actor) { let opNum = startOp - const expandedOps : any = [] + let expandedOps = [] for (const op of ops) { if (op.action === 'set' && op.values && op.insert) { if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') let lastElemId = op.elemId + const datatype = op.datatype for (const value of op.values) { - expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, value, pred: [], insert: true}) + if (!validDatatype(value, datatype)) throw new RangeError(`Decode failed: bad value/datatype association (${value},${datatype})`) + expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, datatype, value, pred: [], insert: true}) lastElemId = `${opNum}@${actor}` opNum += 1 } @@ -480,12 +481,12 @@ function expandMultiOps(ops, startOp, actor) { * individual change. */ function decodeOps(ops, forDocument) { - const newOps : any = [] - for (const op of ops) { + const newOps = [] + for (let op of ops) { const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) const action = ACTIONS[op.action] || op.action - const newOp : any = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} + const newOp = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} newOp.insert = !!op.insert if (ACTIONS[op.action] === 'set' || ACTIONS[op.action] === 'inc') { newOp.value = op.valLen @@ -513,7 +514,7 @@ function decodeOps(ops, forDocument) { */ function checkSortedOpIds(opIds) { let last = null - for (const opId of opIds) { + for (let opId of opIds) { if (last && compareParsedOpIds(last, opId) !== -1) { throw new RangeError('operation IDs are not in ascending order') } @@ -521,7 +522,7 @@ function checkSortedOpIds(opIds) { } } -export function encoderByColumnId(columnId) { +function encoderByColumnId(columnId) { if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { return new DeltaEncoder() } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { @@ -535,7 +536,7 @@ export function encoderByColumnId(columnId) { } } -export function decoderByColumnId(columnId, buffer) { +function decoderByColumnId(columnId, buffer) { if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { return new DeltaDecoder(buffer) } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { @@ -549,10 +550,9 @@ export function decoderByColumnId(columnId, buffer) { } } -export function makeDecoders(columns, columnSpec) { +function makeDecoders(columns, columnSpec) { const emptyBuf = new Uint8Array(0) - const decoders : any = [] - let columnIndex = 0, specIndex = 0 + let decoders = [], columnIndex = 0, specIndex = 0 while (columnIndex < columns.length || specIndex < columnSpec.length) { if (columnIndex === columns.length || @@ -576,22 +576,20 @@ export function makeDecoders(columns, columnSpec) { function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) - const parsedRows : any = [] + let parsedRows = [] while (columns.some(col => !col.decoder.done)) { - const row = {} - let col = 0 + let row = {}, col = 0 while (col < columns.length) { const columnId = columns[col].columnId - const groupId = columnId >> 4 - let groupCols = 1 + let groupId = columnId >> 4, groupCols = 1 while (col + groupCols < columns.length && columns[col + groupCols].columnId >> 4 === groupId) { groupCols++ } if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { - const values : any = [], count = columns[col].decoder.readValue() + const values = [], count = columns[col].decoder.readValue() for (let i = 0; i < count; i++) { - const value = {} + let value = {} for (let colOffset = 1; colOffset < groupCols; colOffset++) { decodeValueColumns(columns, col + colOffset, actorIds, value) } @@ -613,8 +611,7 @@ function decodeColumnInfo(decoder) { // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 - let lastColumnId = -1 - const columns : any = [], numColumns = decoder.readUint53() + let lastColumnId = -1, columns = [], numColumns = decoder.readUint53() for (let i = 0; i < numColumns; i++) { const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { @@ -629,18 +626,18 @@ function decodeColumnInfo(decoder) { function encodeColumnInfo(encoder, columns) { const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0) encoder.appendUint53(nonEmptyColumns.length) - for (const column of nonEmptyColumns) { - encoder.appendUint53(column.id) + for (let column of nonEmptyColumns) { + encoder.appendUint53(column.columnId) encoder.appendUint53(column.encoder.buffer.byteLength) } } function decodeChangeHeader(decoder) { - const numDeps = decoder.readUint53(), deps : any = [] + const numDeps = decoder.readUint53(), deps = [] for (let i = 0; i < numDeps; i++) { deps.push(bytesToHexString(decoder.readRawBytes(32))) } - const change : any = { + let change = { actor: decoder.readHexString(), seq: decoder.readUint53(), startOp: decoder.readUint53(), @@ -696,7 +693,7 @@ function decodeContainerHeader(decoder, computeHash) { const hashStartOffset = decoder.offset const chunkType = decoder.readByte() const chunkLength = decoder.readUint53() - const header : any = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} + const header = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} if (computeHash) { const sha256 = new Hash() @@ -710,25 +707,14 @@ function decodeContainerHeader(decoder, computeHash) { return header } -/** - * Returns the checksum of a change (bytes 4 to 7) as a 32-bit unsigned integer. - */ -export function getChangeChecksum(change) { - if (change[0] !== MAGIC_BYTES[0] || change[1] !== MAGIC_BYTES[1] || - change[2] !== MAGIC_BYTES[2] || change[3] !== MAGIC_BYTES[3]) { - throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') - } - return ((change[4] << 24) | (change[5] << 16) | (change[6] << 8) | change[7]) >>> 0 -} - -export function encodeChange(changeObj) { +function encodeChange(changeObj) { const { changes, actorIds } = parseAllOpIds([changeObj], true) - const change : any = changes[0] + const change = changes[0] const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') encoder.appendUint53(change.deps.length) - for (const hash of change.deps.slice().sort()) { + for (let hash of change.deps.slice().sort()) { encoder.appendRawBytes(hexStringToBytes(hash)) } encoder.appendHexString(change.actor) @@ -737,11 +723,11 @@ export function encodeChange(changeObj) { encoder.appendInt53(change.time) encoder.appendPrefixedString(change.message || '') encoder.appendUint53(actorIds.length - 1) - for (const actor of actorIds.slice(1)) encoder.appendHexString(actor) + for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) - const columns : any = encodeOps(change.ops, false) + const columns = encodeOps(change.ops, false) encodeColumnInfo(encoder, columns) - for (const column of columns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of columns) encoder.appendRawBytes(column.encoder.buffer) if (change.extraBytes) encoder.appendRawBytes(change.extraBytes) }) @@ -752,16 +738,16 @@ export function encodeChange(changeObj) { return (bytes.byteLength >= DEFLATE_MIN_SIZE) ? deflateChange(bytes) : bytes } -export function decodeChangeColumns(buffer) { +function decodeChangeColumns(buffer) { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) const decoder = new Decoder(buffer) - const header : any = decodeContainerHeader(decoder, true) + const header = decodeContainerHeader(decoder, true) const chunkDecoder = new Decoder(header.chunkData) if (!decoder.done) throw new RangeError('Encoded change has trailing data') if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const change : any = decodeChangeHeader(chunkDecoder) - const columns : any = decodeColumnInfo(chunkDecoder) + const change = decodeChangeHeader(chunkDecoder) + const columns = decodeColumnInfo(chunkDecoder) for (let i = 0; i < columns.length; i++) { if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) { throw new RangeError('change must not contain deflated columns') @@ -781,8 +767,8 @@ export function decodeChangeColumns(buffer) { /** * Decodes one change in binary format into its JS object representation. */ -export function decodeChange(buffer) { - const change : any = decodeChangeColumns(buffer) +function decodeChange(buffer) { + const change = decodeChangeColumns(buffer) change.ops = decodeOps(decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), false) delete change.actorIds delete change.columns @@ -794,13 +780,13 @@ export function decodeChange(buffer) { * the operations. Saves work when we only need to inspect the headers. Only * computes the hash of the change if `computeHash` is true. */ -export function decodeChangeMeta(buffer, computeHash) : any { +function decodeChangeMeta(buffer, computeHash) { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) - const header : any = decodeContainerHeader(new Decoder(buffer), computeHash) + const header = decodeContainerHeader(new Decoder(buffer), computeHash) if (header.chunkType !== CHUNK_TYPE_CHANGE) { throw new RangeError('Buffer chunk type is not a change') } - const meta : any = decodeChangeHeader(new Decoder(header.chunkData)) + const meta = decodeChangeHeader(new Decoder(header.chunkData)) meta.change = buffer if (computeHash) meta.hash = header.hash return meta @@ -840,9 +826,8 @@ function inflateChange(buffer) { * Takes an Uint8Array that may contain multiple concatenated changes, and * returns an array of subarrays, each subarray containing one change. */ -export function splitContainers(buffer) { - const decoder = new Decoder(buffer), chunks : any = [] - let startOffset = 0 +function splitContainers(buffer) { + let decoder = new Decoder(buffer), chunks = [], startOffset = 0 while (!decoder.done) { decodeContainerHeader(decoder, false) chunks.push(buffer.subarray(startOffset, decoder.offset)) @@ -855,10 +840,10 @@ export function splitContainers(buffer) { * Decodes a list of changes from the binary format into JS objects. * `binaryChanges` is an array of `Uint8Array` objects. */ -export function decodeChanges(binaryChanges) { - let decoded : any = [] - for (const binaryChange of binaryChanges) { - for (const chunk of splitContainers(binaryChange)) { +function decodeChanges(binaryChanges) { + let decoded = [] + for (let binaryChange of binaryChanges) { + for (let chunk of splitContainers(binaryChange)) { if (chunk[8] === CHUNK_TYPE_DOCUMENT) { decoded = decoded.concat(decodeDocument(chunk)) } else if (chunk[8] === CHUNK_TYPE_CHANGE || chunk[8] === CHUNK_TYPE_DEFLATE) { @@ -883,84 +868,14 @@ function sortOpIds(a, b) { return 0 } -function groupDocumentOps(changes) { - const byObjectId = {}, byReference = {}, objectType = {} - for (const change of changes) { - for (let i = 0; i < change.ops.length; i++) { - const op = change.ops[i], opId = `${op.id.counter}@${op.id.actorId}` - const objectId = (op.obj === '_root') ? '_root' : `${op.obj.counter}@${op.obj.actorId}` - if (op.action.startsWith('make')) { - objectType[opId] = op.action - if (op.action === 'makeList' || op.action === 'makeText') { - byReference[opId] = {'_head': []} - } - } - - let key - if (objectId === '_root' || objectType[objectId] === 'makeMap' || objectType[objectId] === 'makeTable') { - key = op.key - } else if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { - if (op.insert) { - key = opId - const ref = (op.elemId === '_head') ? '_head' : `${op.elemId.counter}@${op.elemId.actorId}` - byReference[objectId][ref].push(opId) - byReference[objectId][opId] = [] - } else { - key = `${op.elemId.counter}@${op.elemId.actorId}` - } - } else { - throw new RangeError(`Unknown object type for object ${objectId}`) - } - - if (!byObjectId[objectId]) byObjectId[objectId] = {} - if (!byObjectId[objectId][key]) byObjectId[objectId][key] = {} - byObjectId[objectId][key][opId] = op - op.succ = [] - - for (const pred of op.pred) { - const predId = `${pred.counter}@${pred.actorId}` - if (!byObjectId[objectId][key][predId]) { - throw new RangeError(`No predecessor operation ${predId}`) - } - byObjectId[objectId][key][predId].succ.push(op.id) - } - } - } - - const ops : any[] = [] - for (const objectId of Object.keys(byObjectId).sort(sortOpIds)) { - let keys : string[] = [] - if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { - const stack = ['_head'] - while (stack.length > 0) { - const key : any = stack.pop() - if (key !== '_head') keys.push(key) - for (const opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) - } - } else { - // FIXME JavaScript sorts based on UTF-16 encoding. We should change this to use the UTF-8 - // encoding instead (the sort order will be different beyond the basic multilingual plane) - keys = Object.keys(byObjectId[objectId]).sort() - } - - for (const key of keys) { - for (const opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { - const op : any = byObjectId[objectId][key][opId] - if (op.action !== 'del') ops.push(op) - } - } - } - return ops -} - /** * Takes a set of operations `ops` loaded from an encoded document, and * reconstructs the changes that they originally came from. * Does not return anything, only mutates `changes`. */ function groupChangeOps(changes, ops) { - const changesByActor = {} // map from actorId to array of changes by that actor - for (const change of changes) { + let changesByActor = {} // map from actorId to array of changes by that actor + for (let change of changes) { change.ops = [] if (!changesByActor[change.actor]) changesByActor[change.actor] = [] if (change.seq !== changesByActor[change.actor].length + 1) { @@ -972,12 +887,12 @@ function groupChangeOps(changes, ops) { changesByActor[change.actor].push(change) } - const opsById : { [key:string]: Op } = {} - for (const op of ops) { + let opsById = {} + for (let op of ops) { if (op.action === 'del') throw new RangeError('document should not contain del operations') op.pred = opsById[op.id] ? opsById[op.id].pred : [] opsById[op.id] = op - for (const succ of op.succ) { + for (let succ of op.succ) { if (!opsById[succ]) { if (op.elemId) { const elemId = op.insert ? op.id : op.elemId @@ -990,11 +905,11 @@ function groupChangeOps(changes, ops) { } delete op.succ } - for (const op of Object.values(opsById)) { + for (let op of Object.values(opsById)) { if (op.action === 'del') ops.push(op) } - for (const op of ops) { + for (let op of ops) { const { counter, actorId } = parseOpId(op.id) const actorChanges = changesByActor[actorId] // Binary search to find the change that should contain this operation @@ -1013,7 +928,7 @@ function groupChangeOps(changes, ops) { actorChanges[left].ops.push(op) } - for (const change of changes) { + for (let change of changes) { change.ops.sort((op1, op2) => sortOpIds(op1.id, op2.id)) change.startOp = change.maxOp - change.ops.length + 1 delete change.maxOp @@ -1027,63 +942,12 @@ function groupChangeOps(changes, ops) { } } -function encodeDocumentChanges(changes) { - const columns = { // see DOCUMENT_COLUMNS - actor : new RLEEncoder('uint'), - seq : new DeltaEncoder(), - maxOp : new DeltaEncoder(), - time : new DeltaEncoder(), - message : new RLEEncoder('utf8'), - depsNum : new RLEEncoder('uint'), - depsIndex : new DeltaEncoder(), - extraLen : new RLEEncoder('uint'), - extraRaw : new Encoder() - } - const indexByHash = {} // map from change hash to its index in the changes array - const heads = {} // change hashes that are not a dependency of any other change - - for (let i = 0; i < changes.length; i++) { - const change = changes[i] - indexByHash[change.hash] = i - heads[change.hash] = true - - columns.actor.appendValue(change.actorNum) - columns.seq.appendValue(change.seq) - columns.maxOp.appendValue(change.startOp + change.ops.length - 1) - columns.time.appendValue(change.time) - columns.message.appendValue(change.message) - columns.depsNum.appendValue(change.deps.length) - - for (const dep of change.deps) { - if (typeof indexByHash[dep] !== 'number') { - throw new RangeError(`Unknown dependency hash: ${dep}`) - } - columns.depsIndex.appendValue(indexByHash[dep]) - if (heads[dep]) delete heads[dep] - } - - if (change.extraBytes) { - columns.extraLen.appendValue(change.extraBytes.byteLength << 4 | VALUE_TYPE.BYTES) - columns.extraRaw.appendRawBytes(change.extraBytes) - } else { - columns.extraLen.appendValue(VALUE_TYPE.BYTES) // zero-length byte array - } - } - - const changesColumns : any = [] - for (const {columnName, columnId} of DOCUMENT_COLUMNS) { - changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]}) - } - changesColumns.sort((a, b) => a.id - b.id) - return { changesColumns, heads: Object.keys(heads).sort() } -} - function decodeDocumentChanges(changes, expectedHeads) { - const heads = {} // change hashes that are not a dependency of any other change + let heads = {} // change hashes that are not a dependency of any other change for (let i = 0; i < changes.length; i++) { - const change = changes[i] + let change = changes[i] change.deps = [] - for (const index of change.depsNum.map(d => d.depsIndex)) { + for (let index of change.depsNum.map(d => d.depsIndex)) { if (!changes[index] || !changes[index].hash) { throw new RangeError(`No hash for index ${index} while processing index ${i}`) } @@ -1116,52 +980,47 @@ function decodeDocumentChanges(changes, expectedHeads) { } } -/** - * Transforms a list of changes into a binary representation of the document state. - */ -export function encodeDocument(binaryChanges) { - const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false) - const { changesColumns, heads } = encodeDocumentChanges(changes) - const opsColumns = encodeOps(groupDocumentOps(changes), true) - for (const column of changesColumns) deflateColumn(column) - for (const column of opsColumns) deflateColumn(column) +function encodeDocumentHeader(doc) { + const { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes } = doc + for (let column of changesColumns) deflateColumn(column) + for (let column of opsColumns) deflateColumn(column) return encodeContainer(CHUNK_TYPE_DOCUMENT, encoder => { encoder.appendUint53(actorIds.length) - for (const actor of actorIds) { + for (let actor of actorIds) { encoder.appendHexString(actor) } encoder.appendUint53(heads.length) - for (const head of heads.sort()) { + for (let head of heads.sort()) { encoder.appendRawBytes(hexStringToBytes(head)) } encodeColumnInfo(encoder, changesColumns) encodeColumnInfo(encoder, opsColumns) - // @ts-ignore - for (const column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) - // @ts-ignore - for (const column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let index of headsIndexes) encoder.appendUint53(index) + if (extraBytes) encoder.appendRawBytes(extraBytes) }).bytes } -export function decodeDocumentHeader(buffer) { +function decodeDocumentHeader(buffer) { const documentDecoder = new Decoder(buffer) const header = decodeContainerHeader(documentDecoder, true) const decoder = new Decoder(header.chunkData) if (!documentDecoder.done) throw new RangeError('Encoded document has trailing data') if (header.chunkType !== CHUNK_TYPE_DOCUMENT) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const actorIds : string[] = [], numActors = decoder.readUint53() + const actorIds = [], numActors = decoder.readUint53() for (let i = 0; i < numActors; i++) { actorIds.push(decoder.readHexString()) } - const heads : string[] = [], numHeads = decoder.readUint53() + const heads = [], headsIndexes = [], numHeads = decoder.readUint53() for (let i = 0; i < numHeads; i++) { heads.push(bytesToHexString(decoder.readRawBytes(32))) } - const changesColumns : any = decodeColumnInfo(decoder) - const opsColumns : any = decodeColumnInfo(decoder) + const changesColumns = decodeColumnInfo(decoder) + const opsColumns = decodeColumnInfo(decoder) for (let i = 0; i < changesColumns.length; i++) { changesColumns[i].buffer = decoder.readRawBytes(changesColumns[i].bufferLen) inflateColumn(changesColumns[i]) @@ -1170,12 +1029,15 @@ export function decodeDocumentHeader(buffer) { opsColumns[i].buffer = decoder.readRawBytes(opsColumns[i].bufferLen) inflateColumn(opsColumns[i]) } + if (!decoder.done) { + for (let i = 0; i < numHeads; i++) headsIndexes.push(decoder.readUint53()) + } const extraBytes = decoder.readRawBytes(decoder.buf.byteLength - decoder.offset) - return { changesColumns, opsColumns, actorIds, heads, extraBytes } + return { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes } } -export function decodeDocument(buffer) { +function decodeDocument(buffer) { const { changesColumns, opsColumns, actorIds, heads } = decodeDocumentHeader(buffer) const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS) const ops = decodeOps(decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), true) @@ -1190,7 +1052,7 @@ export function decodeDocument(buffer) { function deflateColumn(column) { if (column.encoder.buffer.byteLength >= DEFLATE_MIN_SIZE) { column.encoder = {buffer: pako.deflateRaw(column.encoder.buffer)} - column.id |= COLUMN_TYPE_DEFLATE + column.columnId |= COLUMN_TYPE_DEFLATE } } @@ -1204,230 +1066,9 @@ function inflateColumn(column) { } } -/** - * Takes all the operations for the same property (i.e. the same key in a map, or the same list - * element) and mutates the object patch to reflect the current value(s) of that property. There - * might be multiple values in the case of a conflict. `objects` is a map from objectId to the - * patch for that object. `property` contains `objId`, `key`, a list of `ops`, and `index` (the - * current list index if the object is a list). Returns true if one or more values are present, - * or false if the property has been deleted. - */ -function addPatchProperty(objects, property) { - const values : any = {} - let counter : any = null - for (const op of property.ops) { - // Apply counters and their increments regardless of the number of successor operations - if (op.actionName === 'set' && op.value.datatype === 'counter') { - if (!counter) counter = {opId: op.opId, value: 0, succ: {}} - counter.value += op.value.value - for (const succId of op.succ) counter.succ[succId] = true - } else if (op.actionName === 'inc') { - if (!counter) throw new RangeError(`inc operation ${op.opId} without a counter`) - counter.value += op.value.value - delete counter.succ[op.opId] - for (const succId of op.succ) counter.succ[succId] = true - - } else if (op.succ.length === 0) { // Ignore any ops that have been overwritten - if (op.actionName.startsWith('make')) { - values[op.opId] = objects[op.opId] - } else if (op.actionName === 'set') { - values[op.opId] = {value: op.value.value, type: 'value'} - if (op.value.datatype) { - values[op.opId].datatype = op.value.datatype - } - } else if (op.actionName === 'link') { - // NB. This assumes that the ID of the child object is greater than the ID of the current - // object. This is true as long as link operations are only used to redo undone make* - // operations, but it will cease to be true once subtree moves are allowed. - if (!op.childId) throw new RangeError(`link operation ${op.opId} without a childId`) - values[op.opId] = objects[op.childId] - } else { - throw new RangeError(`Unexpected action type: ${op.actionName}`) - } - } - } - - // If the counter had any successor operation that was not an increment, that means the counter - // must have been deleted, so we omit it from the patch. - if (counter && Object.keys(counter.succ).length === 0) { - values[counter.opId] = {type: 'value', value: counter.value, datatype: 'counter'} - } - - if (Object.keys(values).length > 0) { - const obj = objects[property.objId] - if (obj.type === 'map' || obj.type === 'table') { - obj.props[property.key] = values - } else if (obj.type === 'list' || obj.type === 'text') { - makeListEdits(obj, values, property.key, property.index) - } - return true - } else { - return false - } -} - -/** - * When constructing a patch to instantiate a loaded document, this function adds the edits to - * insert one list element. Usually there is one value, but in the case of a conflict there may be - * several values. `elemId` is the ID of the list element, and `index` is the list index at which - * the value(s) should be placed. - */ -function makeListEdits(list, values, elemId, index) { - let firstValue = true - const opIds = Object.keys(values).sort((id1, id2) => compareParsedOpIds(parseOpId(id1), parseOpId(id2))) - for (const opId of opIds) { - if (firstValue) { - list.edits.push({action: 'insert', value: values[opId], elemId, opId, index}) - } else { - list.edits.push({action: 'update', value: values[opId], opId, index}) - } - firstValue = false - } -} - -/** - * Recursively walks the patch tree, calling appendEdit on every list edit in order to consense - * consecutive sequences of insertions into multi-inserts. - */ -function condenseEdits(diff) { - if (diff.type === 'list' || diff.type === 'text') { - diff.edits.forEach(e => condenseEdits(e.value)) - const newEdits = diff.edits - diff.edits = [] - for (const edit of newEdits) appendEdit(diff.edits, edit) - } else if (diff.type === 'map' || diff.type === 'table') { - for (const prop of Object.keys(diff.props)) { - for (const opId of Object.keys(diff.props[prop])) { - condenseEdits(diff.props[prop][opId]) - } - } - } -} - -/** - * Appends a list edit operation (insert, update, remove) to an array of existing operations. If the - * last existing operation can be extended (as a multi-op), we do that. - */ -export function appendEdit(existingEdits, nextEdit) { - if (existingEdits.length === 0) { - existingEdits.push(nextEdit) - return - } - - const lastEdit = existingEdits[existingEdits.length - 1] - if (lastEdit.action === 'insert' && nextEdit.action === 'insert' && - lastEdit.index === nextEdit.index - 1 && - lastEdit.value.type === 'value' && nextEdit.value.type === 'value' && - lastEdit.elemId === lastEdit.opId && nextEdit.elemId === nextEdit.opId && - opIdDelta(lastEdit.elemId, nextEdit.elemId, 1)) { - lastEdit.action = 'multi-insert' - lastEdit.values = [lastEdit.value.value, nextEdit.value.value] - delete lastEdit.value - delete lastEdit.opId - - } else if (lastEdit.action === 'multi-insert' && nextEdit.action === 'insert' && - lastEdit.index + lastEdit.values.length === nextEdit.index && - nextEdit.value.type === 'value' && nextEdit.elemId === nextEdit.opId && - opIdDelta(lastEdit.elemId, nextEdit.elemId, lastEdit.values.length)) { - lastEdit.values.push(nextEdit.value.value) - - } else if (lastEdit.action === 'remove' && nextEdit.action === 'remove' && - lastEdit.index === nextEdit.index) { - lastEdit.count += nextEdit.count - - } else { - existingEdits.push(nextEdit) - } -} - -/** - * Returns true if the two given operation IDs have the same actor ID, and the counter of `id2` is - * exactly `delta` greater than the counter of `id1`. - */ -function opIdDelta(id1, id2, delta = 1) { - const parsed1 = parseOpId(id1), parsed2 = parseOpId(id2) - return parsed1.actorId === parsed2.actorId && parsed1.counter + delta === parsed2.counter -} - -/** - * Parses the document (in compressed binary format) given as `documentBuffer` - * and returns a patch that can be sent to the frontend to instantiate the - * current state of that document. - */ -export function constructPatch(documentBuffer) { - const { opsColumns, actorIds } = decodeDocumentHeader(documentBuffer) - const col : any = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( - (acc, col: any) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) - - const objects = {_root: {objectId: '_root', type: 'map', props: {}}} - let property : any = null - - while (!col.idActor.done) { - const opId = `${col.idCtr.readValue()}@${actorIds[col.idActor.readValue()]}` - const action = col.action.readValue(), actionName = ACTIONS[action] - if (action % 2 === 0) { // even-numbered actions are object creation - const type = OBJECT_TYPE[actionName] || 'unknown' - if (type === 'list' || type === 'text') { - objects[opId] = {objectId: opId, type, edits: []} - } else { - objects[opId] = {objectId: opId, type, props: {}} - } - } - - const objActor = col.objActor.readValue(), objCtr = col.objCtr.readValue() - const objId = objActor === null ? '_root' : `${objCtr}@${actorIds[objActor]}` - const obj = objects[objId] - if (!obj) throw new RangeError(`Operation for nonexistent object: ${objId}`) - - const keyActor = col.keyActor.readValue(), keyCtr = col.keyCtr.readValue() - const keyStr = col.keyStr.readValue(), insert = !!col.insert.readValue() - const chldActor = col.chldActor.readValue(), chldCtr = col.chldCtr.readValue() - const childId = chldActor === null ? null : `${chldCtr}@${actorIds[chldActor]}` - const sizeTag = col.valLen.readValue() - const rawValue = col.valRaw.readRawBytes(sizeTag >> 4) - const value = decodeValue(sizeTag, rawValue) - const succNum = col.succNum.readValue() - const succ : string[] = [] - for (let i = 0; i < succNum; i++) { - succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`) - } - - if (!actionName || obj.type === 'unknown') continue - - let key - if (obj.type === 'list' || obj.type === 'text') { - if (keyCtr === null || (keyCtr === 0 && !insert)) { - throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`) - } - key = insert ? opId : `${keyCtr}@${actorIds[keyActor]}` - } else { - if (keyStr === null) { - throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`) - } - key = keyStr - } - - if (!property || property.objId !== objId || property.key !== key) { - let index = 0 - if (property) { - index = property.index - if (addPatchProperty(objects, property)) index += 1 - if (property.objId !== objId) index = 0 - } - property = {objId, key, index, ops: []} - } - property.ops.push({opId, actionName, value, childId, succ}) - } - - if (property) addPatchProperty(objects, property) - condenseEdits(objects._root) - return objects._root -} - module.exports = { - COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS, + COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS, DOCUMENT_COLUMNS, encoderByColumnId, decoderByColumnId, makeDecoders, decodeValue, splitContainers, encodeChange, decodeChangeColumns, decodeChange, decodeChangeMeta, decodeChanges, - decodeDocumentHeader, encodeDocument, decodeDocument, - getChangeChecksum, appendEdit, constructPatch + encodeDocumentHeader, decodeDocumentHeader, decodeDocument } diff --git a/automerge-js/src/common.ts b/automerge-js/test/legacy/common.js similarity index 66% rename from automerge-js/src/common.ts rename to automerge-js/test/legacy/common.js index 9b5a7299..02e91392 100644 --- a/automerge-js/src/common.ts +++ b/automerge-js/test/legacy/common.js @@ -1,6 +1,4 @@ -import { UnknownObject } from './types'; - -export function isObject(obj: unknown) : obj is UnknownObject { +function isObject(obj) { return typeof obj === 'object' && obj !== null } @@ -8,28 +6,20 @@ export function isObject(obj: unknown) : obj is UnknownObject { * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`. * https://jsperf.com/cloning-large-objects/1 */ -/* -export function copyObject(obj: T) : T { - if (!isObject(obj)) throw RangeError(`Cannot copy object '${obj}'`) //return {} - const copy : UnknownObject = {} - for (const key of Object.keys(obj)) { +function copyObject(obj) { + if (!isObject(obj)) return {} + let copy = {} + for (let key of Object.keys(obj)) { copy[key] = obj[key] } return copy } -*/ /** * Takes a string in the form that is used to identify operations (a counter concatenated * with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`. */ - -interface OpIdObj { - counter: number, - actorId: string -} - -export function parseOpId(opId: string) : OpIdObj { +function parseOpId(opId) { const match = /^(\d+)@(.*)$/.exec(opId || '') if (!match) { throw new RangeError(`Not a valid opId: ${opId}`) @@ -40,7 +30,7 @@ export function parseOpId(opId: string) : OpIdObj { /** * Returns true if the two byte arrays contain the same data, false if not. */ -export function equalBytes(array1: Uint8Array, array2: Uint8Array) : boolean { +function equalBytes(array1, array2) { if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) { throw new TypeError('equalBytes can only compare Uint8Arrays') } @@ -51,3 +41,15 @@ export function equalBytes(array1: Uint8Array, array2: Uint8Array) : boolean { return true } +/** + * Creates an array containing the value `null` repeated `length` times. + */ +function createArrayOfNulls(length) { + const array = new Array(length) + for (let i = 0; i < length; i++) array[i] = null + return array +} + +module.exports = { + isObject, copyObject, parseOpId, equalBytes, createArrayOfNulls +} diff --git a/automerge-js/src/encoding.ts b/automerge-js/test/legacy/encoding.js similarity index 96% rename from automerge-js/src/encoding.ts rename to automerge-js/test/legacy/encoding.js index dac447ec..92b62df6 100644 --- a/automerge-js/src/encoding.ts +++ b/automerge-js/test/legacy/encoding.js @@ -8,29 +8,28 @@ const utf8encoder = new TextEncoder() const utf8decoder = new TextDecoder('utf-8') -export function stringToUtf8(s: string) : BufferSource { - return utf8encoder.encode(s) +function stringToUtf8(string) { + return utf8encoder.encode(string) } -export function utf8ToString(buffer: BufferSource) : string { +function utf8ToString(buffer) { return utf8decoder.decode(buffer) } /** * Converts a string consisting of hexadecimal digits into an Uint8Array. */ -export function hexStringToBytes(value: string) : Uint8Array { +function hexStringToBytes(value) { if (typeof value !== 'string') { throw new TypeError('value is not a string') } if (!/^([0-9a-f][0-9a-f])*$/.test(value)) { throw new RangeError('value is not hexadecimal') } - const match = value.match(/../g) - if (match === null) { + if (value === '') { return new Uint8Array(0) } else { - return new Uint8Array(match.map(b => parseInt(b, 16))) + return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) } } @@ -43,9 +42,8 @@ for (let i = 0; i < 256; i++) { /** * Converts a Uint8Array into the equivalent hexadecimal string. */ -export function bytesToHexString(bytes: Uint8Array) : string { - let hex = '' - const len = bytes.byteLength +function bytesToHexString(bytes) { + let hex = '', len = bytes.byteLength for (let i = 0; i < len; i++) { hex += BYTE_TO_HEX[bytes[i]] } @@ -56,10 +54,7 @@ export function bytesToHexString(bytes: Uint8Array) : string { * Wrapper around an Uint8Array that allows values to be appended to the buffer, * and that automatically grows the buffer when space runs out. */ -export class Encoder { - buf: Uint8Array; - offset: number; - +class Encoder { constructor() { this.buf = new Uint8Array(16) this.offset = 0 @@ -287,7 +282,6 @@ export class Encoder { * the buffer constructed by this Encoder. */ finish() { - return } } @@ -296,10 +290,7 @@ export class Encoder { * the current decoding position, and allows values to be incrementally read by * decoding the bytes at the current position. */ -export class Decoder { - buf: Uint8Array; - offset: number; - +class Decoder { constructor(buffer) { if (!(buffer instanceof Uint8Array)) { throw new TypeError(`Not a byte array: ${buffer}`) @@ -564,13 +555,7 @@ export class Decoder { * After one of these three has completed, the process repeats, starting again * with a repetition count, until we reach the end of the buffer. */ -export class RLEEncoder extends Encoder { - type: any - state: string - lastValue: any - count: number - literal: any - +class RLEEncoder extends Encoder { constructor(type) { super() this.type = type @@ -679,7 +664,7 @@ export class RLEEncoder extends Encoder { * Returns an object of the form `{nonNullValues, sum}` where `nonNullValues` is the number of * non-null values copied, and `sum` is the sum (only if the `sumValues` option is set). */ - copyFrom(decoder, options: any = {}) : any { + copyFrom(decoder, options = {}) { const { count, sumValues, sumShift } = options if (!(decoder instanceof RLEDecoder) || (decoder.type !== this.type)) { throw new TypeError('incompatible type of decoder') @@ -722,7 +707,7 @@ export class RLEEncoder extends Encoder { nonNullValues += numValues for (let i = 0; i < numValues; i++) { if (decoder.done) throw new RangeError('incomplete literal') - const value : any = decoder.readRawValue() + const value = decoder.readRawValue() if (value === decoder.lastValue) throw new RangeError('Repetition of values is not allowed in literal') decoder.lastValue = value this._appendValue(value) @@ -763,7 +748,7 @@ export class RLEEncoder extends Encoder { this.appendRawValue(this.lastValue) } else if (this.state === 'literal') { this.appendInt53(-this.literal.length) - for (const v of this.literal) this.appendRawValue(v) + for (let v of this.literal) this.appendRawValue(v) } else if (this.state === 'nulls') { this.appendInt32(0) this.appendUint53(this.count) @@ -801,12 +786,7 @@ export class RLEEncoder extends Encoder { * Counterpart to RLEEncoder: reads values from an RLE-compressed sequence, * returning nulls and repeated values as required. */ -export class RLEDecoder extends Decoder { - type: any; - lastValue: any; - count: number; - state: any; - +class RLEDecoder extends Decoder { constructor(type, buffer) { super(buffer) this.type = type @@ -949,9 +929,7 @@ export class RLEDecoder extends Decoder { * * Null values are also allowed, as with RLEEncoder. */ -export class DeltaEncoder extends RLEEncoder { - absoluteValue: number - +class DeltaEncoder extends RLEEncoder { constructor() { super('int') this.absoluteValue = 0 @@ -977,7 +955,7 @@ export class DeltaEncoder extends RLEEncoder { * contain the key `count`, indicating the number of values to copy. If not specified, copies * all remaining values in the decoder. */ - copyFrom(decoder, options: any = {}) : any { + copyFrom(decoder, options = {}) { if (options.sumValues) { throw new RangeError('unsupported options for DeltaEncoder.copyFrom()') } @@ -991,8 +969,7 @@ export class DeltaEncoder extends RLEEncoder { // Copy any null values, and the first non-null value, so that appendValue() computes the // difference between the encoder's last value and the decoder's first (absolute) value. - const value = decoder.readValue() - let nulls = 0 + let value = decoder.readValue(), nulls = 0 this.appendValue(value) if (value === null) { nulls = decoder.count + 1 @@ -1024,9 +1001,7 @@ export class DeltaEncoder extends RLEEncoder { * Counterpart to DeltaEncoder: reads values from a delta-compressed sequence of * numbers (may include null values). */ -export class DeltaDecoder extends RLEDecoder { - absoluteValue : number; - +class DeltaDecoder extends RLEDecoder { constructor(buffer) { super('int', buffer) this.absoluteValue = 0 @@ -1083,10 +1058,7 @@ export class DeltaDecoder extends RLEDecoder { * only encode the repetition count but not the actual value, since the values * just alternate between false and true (starting with false). */ -export class BooleanEncoder extends Encoder { - lastValue: boolean; - count: number; - +class BooleanEncoder extends Encoder { constructor() { super() this.lastValue = false @@ -1116,7 +1088,7 @@ export class BooleanEncoder extends Encoder { * contain the key `count`, indicating the number of values to copy. If not specified, copies * all remaining values in the decoder. */ - copyFrom(decoder, options: any = {}) : any { + copyFrom(decoder, options = {}) { if (!(decoder instanceof BooleanDecoder)) { throw new TypeError('incompatible type of decoder') } @@ -1166,11 +1138,7 @@ export class BooleanEncoder extends Encoder { * Counterpart to BooleanEncoder: reads boolean values from a runlength-encoded * sequence. */ -export class BooleanDecoder extends Decoder { - lastValue: boolean; - firstRun: boolean; - count: number; - +class BooleanDecoder extends Decoder { constructor(buffer) { super(buffer) this.lastValue = true // is negated the first time we read a count @@ -1235,3 +1203,7 @@ export class BooleanDecoder extends Decoder { } } +module.exports = { + stringToUtf8, utf8ToString, hexStringToBytes, bytesToHexString, + Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder +} diff --git a/automerge-js/test/legacy/sync.js b/automerge-js/test/legacy/sync.js new file mode 100644 index 00000000..3bb1571d --- /dev/null +++ b/automerge-js/test/legacy/sync.js @@ -0,0 +1,480 @@ +/** + * Implementation of the data synchronisation protocol that brings a local and a remote document + * into the same state. This is typically used when two nodes have been disconnected for some time, + * and need to exchange any changes that happened while they were disconnected. The two nodes that + * are syncing could be client and server, or server and client, or two peers with symmetric roles. + * + * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual + * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472 + * + * The protocol assumes that every time a node successfully syncs with another node, it remembers + * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The + * next time we try to sync with the same node, we start from the assumption that the other node's + * document version is no older than the outcome of the last sync, so we only need to exchange any + * changes that are more recent than the last sync. This assumption may not be true if the other + * node did not correctly persist its state (perhaps it crashed before writing the result of the + * last sync to disk), and we fall back to sending the entire document in this case. + */ + +const Backend = null //require('./backend') +const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding') +const { decodeChangeMeta } = require('./columnar') +const { copyObject } = require('./common') + +const HASH_SIZE = 32 // 256 bits = 32 bytes +const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification +const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identification + +// These constants correspond to a 1% false positive rate. The values can be changed without +// breaking compatibility of the network protocol, since the parameters used for a particular +// Bloom filter are encoded in the wire format. +const BITS_PER_ENTRY = 10, NUM_PROBES = 7 + +/** + * A Bloom filter implementation that can be serialised to a byte array for transmission + * over a network. The entries that are added are assumed to already be SHA-256 hashes, + * so this implementation does not perform its own hashing. + */ +class BloomFilter { + constructor (arg) { + if (Array.isArray(arg)) { + // arg is an array of SHA256 hashes in hexadecimal encoding + this.numEntries = arg.length + this.numBitsPerEntry = BITS_PER_ENTRY + this.numProbes = NUM_PROBES + this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + for (let hash of arg) this.addHash(hash) + } else if (arg instanceof Uint8Array) { + if (arg.byteLength === 0) { + this.numEntries = 0 + this.numBitsPerEntry = 0 + this.numProbes = 0 + this.bits = arg + } else { + const decoder = new Decoder(arg) + this.numEntries = decoder.readUint32() + this.numBitsPerEntry = decoder.readUint32() + this.numProbes = decoder.readUint32() + this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + } + } else { + throw new TypeError('invalid argument') + } + } + + /** + * Returns the Bloom filter state, encoded as a byte array. + */ + get bytes() { + if (this.numEntries === 0) return new Uint8Array(0) + const encoder = new Encoder() + encoder.appendUint32(this.numEntries) + encoder.appendUint32(this.numBitsPerEntry) + encoder.appendUint32(this.numProbes) + encoder.appendRawBytes(this.bits) + return encoder.buffer + } + + /** + * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits + * in the Bloom filter need to be tested or set for this particular entry. We do this by + * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers, + * and then using triple hashing to compute the probe indexes. The algorithm comes from: + * + * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification. + * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004. + * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf + */ + getProbes(hash) { + const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength + if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) + // on the next three lines, the right shift means interpret value as unsigned + let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo + let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo + let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo + const probes = [x] + for (let i = 1; i < this.numProbes; i++) { + x = (x + y) % modulo + y = (y + z) % modulo + probes.push(x) + } + return probes + } + + /** + * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string). + */ + addHash(hash) { + for (let probe of this.getProbes(hash)) { + this.bits[probe >>> 3] |= 1 << (probe & 7) + } + } + + /** + * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter. + */ + containsHash(hash) { + if (this.numEntries === 0) return false + for (let probe of this.getProbes(hash)) { + if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) { + return false + } + } + return true + } +} + +/** + * Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array. + */ +function encodeHashes(encoder, hashes) { + if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array') + encoder.appendUint32(hashes.length) + for (let i = 0; i < hashes.length; i++) { + if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted') + const bytes = hexStringToBytes(hashes[i]) + if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits') + encoder.appendRawBytes(bytes) + } +} + +/** + * Decodes a byte array in the format returned by encodeHashes(), and returns its content as an + * array of hex strings. + */ +function decodeHashes(decoder) { + let length = decoder.readUint32(), hashes = [] + for (let i = 0; i < length; i++) { + hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) + } + return hashes +} + +/** + * Takes a sync message of the form `{heads, need, have, changes}` and encodes it as a byte array for + * transmission. + */ +function encodeSyncMessage(message) { + const encoder = new Encoder() + encoder.appendByte(MESSAGE_TYPE_SYNC) + encodeHashes(encoder, message.heads) + encodeHashes(encoder, message.need) + encoder.appendUint32(message.have.length) + for (let have of message.have) { + encodeHashes(encoder, have.lastSync) + encoder.appendPrefixedBytes(have.bloom) + } + encoder.appendUint32(message.changes.length) + for (let change of message.changes) { + encoder.appendPrefixedBytes(change) + } + return encoder.buffer +} + +/** + * Takes a binary-encoded sync message and decodes it into the form `{heads, need, have, changes}`. + */ +function decodeSyncMessage(bytes) { + const decoder = new Decoder(bytes) + const messageType = decoder.readByte() + if (messageType !== MESSAGE_TYPE_SYNC) { + throw new RangeError(`Unexpected message type: ${messageType}`) + } + const heads = decodeHashes(decoder) + const need = decodeHashes(decoder) + const haveCount = decoder.readUint32() + let message = {heads, need, have: [], changes: []} + for (let i = 0; i < haveCount; i++) { + const lastSync = decodeHashes(decoder) + const bloom = decoder.readPrefixedBytes(decoder) + message.have.push({lastSync, bloom}) + } + const changeCount = decoder.readUint32() + for (let i = 0; i < changeCount; i++) { + const change = decoder.readPrefixedBytes() + message.changes.push(change) + } + // Ignore any trailing bytes -- they can be used for extensions by future versions of the protocol + return message +} + +/** + * Takes a SyncState and encodes as a byte array those parts of the state that should persist across + * an application restart or disconnect and reconnect. The ephemeral parts of the state that should + * be cleared on reconnect are not encoded. + */ +function encodeSyncState(syncState) { + const encoder = new Encoder() + encoder.appendByte(PEER_STATE_TYPE) + encodeHashes(encoder, syncState.sharedHeads) + return encoder.buffer +} + +/** + * Takes a persisted peer state as encoded by `encodeSyncState` and decodes it into a SyncState + * object. The parts of the peer state that were not encoded are initialised with default values. + */ +function decodeSyncState(bytes) { + const decoder = new Decoder(bytes) + const recordType = decoder.readByte() + if (recordType !== PEER_STATE_TYPE) { + throw new RangeError(`Unexpected record type: ${recordType}`) + } + const sharedHeads = decodeHashes(decoder) + return Object.assign(initSyncState(), { sharedHeads }) +} + +/** + * Constructs a Bloom filter containing all changes that are not one of the hashes in + * `lastSync` or its transitive dependencies. In other words, the filter contains those + * changes that have been applied since the version identified by `lastSync`. Returns + * an object of the form `{lastSync, bloom}` as required for the `have` field of a sync + * message. + */ +function makeBloomFilter(backend, lastSync) { + const newChanges = Backend.getChanges(backend, lastSync) + const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash) + return {lastSync, bloom: new BloomFilter(hashes).bytes} +} + +/** + * Call this function when a sync message is received from another node. The `message` argument + * needs to already have been decoded using `decodeSyncMessage()`. This function determines the + * changes that we need to send to the other node in response. Returns an array of changes (as + * byte arrays). + */ +function getChangesToSend(backend, have, need) { + if (have.length === 0) { + return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) + } + + let lastSyncHashes = {}, bloomFilters = [] + for (let h of have) { + for (let hash of h.lastSync) lastSyncHashes[hash] = true + bloomFilters.push(new BloomFilter(h.bloom)) + } + + // Get all changes that were added since the last sync + const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) + .map(change => decodeChangeMeta(change, true)) + + let changeHashes = {}, dependents = {}, hashesToSend = {} + for (let change of changes) { + changeHashes[change.hash] = true + + // For each change, make a list of changes that depend on it + for (let dep of change.deps) { + if (!dependents[dep]) dependents[dep] = [] + dependents[dep].push(change.hash) + } + + // Exclude any change hashes contained in one or more Bloom filters + if (bloomFilters.every(bloom => !bloom.containsHash(change.hash))) { + hashesToSend[change.hash] = true + } + } + + // Include any changes that depend on a Bloom-negative change + let stack = Object.keys(hashesToSend) + while (stack.length > 0) { + const hash = stack.pop() + if (dependents[hash]) { + for (let dep of dependents[hash]) { + if (!hashesToSend[dep]) { + hashesToSend[dep] = true + stack.push(dep) + } + } + } + } + + // Include any explicitly requested changes + let changesToSend = [] + for (let hash of need) { + hashesToSend[hash] = true + if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? + const change = Backend.getChangeByHash(backend, hash) + if (change) changesToSend.push(change) + } + } + + // Return changes in the order they were returned by getMissingChanges() + for (let change of changes) { + if (hashesToSend[change.hash]) changesToSend.push(change.change) + } + return changesToSend +} + +function initSyncState() { + return { + sharedHeads: [], + lastSentHeads: [], + theirHeads: null, + theirNeed: null, + theirHave: null, + sentHashes: {}, + } +} + +function compareArrays(a, b) { + return (a.length === b.length) && a.every((v, i) => v === b[i]) +} + +/** + * Given a backend and what we believe to be the state of our peer, generate a message which tells + * them about we have and includes any changes we believe they need + */ +function generateSyncMessage(backend, syncState) { + if (!backend) { + throw new Error("generateSyncMessage called with no Automerge document") + } + if (!syncState) { + throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + } + + let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState + const ourHeads = Backend.getHeads(backend) + + // Hashes to explicitly request from the remote peer: any missing dependencies of unapplied + // changes, and any of the remote peer's heads that we don't know about + const ourNeed = Backend.getMissingDeps(backend, theirHeads || []) + + // There are two reasons why ourNeed may be nonempty: 1. we might be missing dependencies due to + // Bloom filter false positives; 2. we might be missing heads that the other peer mentioned + // because they (intentionally) only sent us a subset of changes. In case 1, we leave the `have` + // field of the message empty because we just want to fill in the missing dependencies for now. + // In case 2, or if ourNeed is empty, we send a Bloom filter to request any unsent changes. + let ourHave = [] + if (!theirHeads || ourNeed.every(hash => theirHeads.includes(hash))) { + ourHave = [makeBloomFilter(backend, sharedHeads)] + } + + // Fall back to a full re-sync if the sender's last sync state includes hashes + // that we don't know. This could happen if we crashed after the last sync and + // failed to persist changes that the other node already sent us. + if (theirHave && theirHave.length > 0) { + const lastSync = theirHave[0].lastSync + if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) { + // we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need + const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []} + return [syncState, encodeSyncMessage(resetMsg)] + } + } + + // XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size + // these changes should ideally be RLE encoded but we haven't implemented that yet. + let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : [] + + // If the heads are equal, we're in sync and don't need to do anything further + const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) + const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) + if (headsUnchanged && headsEqual && changesToSend.length === 0) { + // no need to send a sync message if we know we're synced! + return [syncState, null] + } + + // TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the + // unnecessary recomputation + changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash]) + + // Regular response to a sync message: send any changes that the other node + // doesn't have. We leave the "have" field empty because the previous message + // generated by `syncStart` already indicated what changes we have. + const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend} + if (changesToSend.length > 0) { + sentHashes = copyObject(sentHashes) + for (const change of changesToSend) { + sentHashes[decodeChangeMeta(change, true).hash] = true + } + } + + syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes}) + return [syncState, encodeSyncMessage(syncMessage)] +} + +/** + * Computes the heads that we share with a peer after we have just received some changes from that + * peer and applied them. This may not be sufficient to bring our heads in sync with the other + * peer's heads, since they may have only sent us a subset of their outstanding changes. + * + * `myOldHeads` are the local heads before the most recent changes were applied, `myNewHeads` are + * the local heads after those changes were applied, and `ourOldSharedHeads` is the previous set of + * shared heads. Applying the changes will have replaced some heads with others, but some heads may + * have remained unchanged (because they are for branches on which no changes have been added). Any + * such unchanged heads remain in the sharedHeads. Any sharedHeads that were replaced by applying + * changes are also replaced as sharedHeads. This is safe because if we received some changes from + * another peer, that means that peer had those changes, and therefore we now both know about them. + */ +function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { + const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head)) + const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head)) + const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort() + return advancedHeads +} + + +/** + * Given a backend, a message message and the state of our peer, apply any changes, update what + * we believe about the peer, and (if there were applied changes) produce a patch for the frontend + */ +function receiveSyncMessage(backend, oldSyncState, binaryMessage) { + if (!backend) { + throw new Error("generateSyncMessage called with no Automerge document") + } + if (!oldSyncState) { + throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + } + + let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null + const message = decodeSyncMessage(binaryMessage) + const beforeHeads = Backend.getHeads(backend) + + // If we received changes, we try to apply them to the document. There may still be missing + // dependencies due to Bloom filter false positives, in which case the backend will enqueue the + // changes without applying them. The set of changes may also be incomplete if the sender decided + // to break a large set of changes into chunks. + if (message.changes.length > 0) { + [backend, patch] = Backend.applyChanges(backend, message.changes) + sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads) + } + + // If heads are equal, indicate we don't need to send a response message + if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) { + lastSentHeads = message.heads + } + + // If all of the remote heads are known to us, that means either our heads are equal, or we are + // ahead of the remote peer. In this case, take the remote heads to be our shared heads. + const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head)) + if (knownHeads.length === message.heads.length) { + sharedHeads = message.heads + // If the remote peer has lost all its data, reset our state to perform a full resync + if (message.heads.length === 0) { + lastSentHeads = [] + sentHashes = [] + } + } else { + // If some remote heads are unknown to us, we add all the remote heads we know to + // sharedHeads, but don't remove anything from sharedHeads. This might cause sharedHeads to + // contain some redundant hashes (where one hash is actually a transitive dependency of + // another), but this will be cleared up as soon as we know all the remote heads. + sharedHeads = [...new Set(knownHeads.concat(sharedHeads))].sort() + } + + const syncState = { + sharedHeads, // what we have in common to generate an efficient bloom filter + lastSentHeads, + theirHave: message.have, // the information we need to calculate the changes they need + theirHeads: message.heads, + theirNeed: message.need, + sentHashes + } + return [backend, syncState, patch] +} + +module.exports = { + receiveSyncMessage, generateSyncMessage, + encodeSyncMessage, decodeSyncMessage, + initSyncState, encodeSyncState, decodeSyncState, + BloomFilter // BloomFilter is a private API, exported only for testing purposes +} diff --git a/automerge-js/test/legacy_tests.ts b/automerge-js/test/legacy_tests.ts index 044b7eef..50cecbc4 100644 --- a/automerge-js/test/legacy_tests.ts +++ b/automerge-js/test/legacy_tests.ts @@ -1,7 +1,7 @@ import * as assert from 'assert' import * as Automerge from '../src' import { assertEqualsOneOf } from './helpers' -import { decodeChange } from '../src/columnar' +import { decodeChange } from './legacy/columnar' import * as AutomergeWASM from "automerge-wasm" Automerge.use(AutomergeWASM) diff --git a/automerge-js/test/sync_test.ts b/automerge-js/test/sync_test.ts index 0118776c..7b1e52ef 100644 --- a/automerge-js/test/sync_test.ts +++ b/automerge-js/test/sync_test.ts @@ -1,7 +1,7 @@ import * as assert from 'assert' import * as Automerge from '../src' -import { BloomFilter } from '../src/bloom' -import { decodeChangeMeta } from '../src/columnar' +import { BloomFilter } from './legacy/sync' +import { decodeChangeMeta } from './legacy/columnar' import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" import * as AutomergeWASM from "automerge-wasm" diff --git a/automerge-js/test/text_test.ts b/automerge-js/test/text_test.ts index 51424c91..e55287ce 100644 --- a/automerge-js/test/text_test.ts +++ b/automerge-js/test/text_test.ts @@ -603,7 +603,8 @@ describe('Automerge.Text', () => { applyDeltaDocToAutomergeText(delta, doc) }) - assert.strictEqual(s2.text.join(''), 'Hello reader') + //assert.strictEqual(s2.text.join(''), 'Hello reader') + assert.strictEqual(s2.text.toString(), 'Hello reader') }) it('should apply an insert with control characters', () => { diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json index 26fa7e8f..01500ed5 100644 --- a/automerge-js/tsconfig.json +++ b/automerge-js/tsconfig.json @@ -2,7 +2,7 @@ "compilerOptions": { "target": "es2016", "sourceMap": false, - "declaration": true, + "declaration": false, "resolveJsonModule": true, "module": "commonjs", "moduleResolution": "node", @@ -12,7 +12,7 @@ "strict": true, "noFallthroughCasesInSwitch": true, "skipLibCheck": true, - "outDir": "./dist/cjs" + "outDir": "./dist" }, "include": [ "src/**/*" ], "exclude": [ From df8cae8a2be9a5796f94b82d3d49d1c90a9e714c Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Mon, 23 May 2022 19:25:23 +0200 Subject: [PATCH 14/17] README --- automerge-js/README.md | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/automerge-js/README.md b/automerge-js/README.md index 3c5cde33..7b8da950 100644 --- a/automerge-js/README.md +++ b/automerge-js/README.md @@ -1,6 +1,27 @@ -## Todo +## Automerge JS + +This is a reimplementation of Automerge as a JavaScript wrapper around the "automerge-wasm". + +This package is in alpha and feedback in welcome. + +The primary differences between using this package and "automerge" are as follows: + +1. The low level api needs to plugged in via the use function. The only current implementation of "automerge-wasm" but another could used in theory. + +```js +import * as Automerge from "automerge-js" +import * as wasm_api from "automerge-wasm" + +// browsers require an async wasm load - see automerge-wasm docs +Automerge.use(wasm_api) +``` + +2. There is no front-end back-end split, and no patch format or patch observer. These concepts don't make sense with the wasm implementation. + +3. The basic `Doc` object is now a Proxy object and will behave differently in a repl environment. + +4. The 'Text' class is currently very slow and needs to be re-worked. + +Beyond this please refer to the Automerge [README](http://github.com/automerge/automerge/) for further information. -1. write a readme -1. publish package -1. make sure the example code works with published packages From 9460d5948e653949905c3c7b7b8ab3b877446917 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Fri, 27 May 2022 08:52:36 -0700 Subject: [PATCH 15/17] patch2 wip --- automerge-wasm/src/interop.rs | 6 ++ automerge-wasm/src/lib.rs | 47 ++++++++---- automerge-wasm/test/patch.ts | 113 +++++++++++++++++++++++++++++ automerge/examples/watch.rs | 45 ++++++------ automerge/src/automerge.rs | 30 ++++---- automerge/src/automerge/tests.rs | 4 + automerge/src/lib.rs | 1 + automerge/src/op_observer.rs | 90 ++++++++++++++++++----- automerge/src/op_set.rs | 40 ++++++++-- automerge/src/path.rs | 21 ++++++ automerge/src/transaction/inner.rs | 15 +++- edit-trace/automerge-wasm.js | 5 ++ 12 files changed, 333 insertions(+), 84 deletions(-) create mode 100644 automerge-wasm/test/patch.ts create mode 100644 automerge/src/path.rs diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index bc17c018..17042501 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -346,6 +346,12 @@ pub(crate) fn to_objtype( } } +pub(crate) fn export_path(path: Vec, key: Prop) -> Array { + let path: Array = path.into_iter().map(|p| JsValue::from(p)).collect(); + path.push(&key.into()); + return path; +} + pub(crate) fn get_heads(heads: Option) -> Option> { let heads = heads?; let heads: Result, _> = heads.iter().map(|j| j.into_serde()).collect(); diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index db948704..870eaee4 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -43,8 +43,8 @@ mod sync; mod value; use interop::{ - get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, to_js_err, - to_objtype, to_prop, AR, JS, + export_path, get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, + to_js_err, to_objtype, to_prop, AR, JS, }; use sync::SyncState; use value::{datatype, ScalarValue}; @@ -451,17 +451,19 @@ impl Automerge { .map_or_else(Vec::new, |o| o.take_patches()); let result = Array::new(); for p in patches { - let patch = Object::new(); + let patch = Array::new(); match p { Patch::Put { obj, + path, key, value, conflict, } => { js_set(&patch, "action", "put")?; - js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "key", key)?; + //js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "path", export_path(path, key))?; + //js_set(&patch, "key", key)?; match value { (Value::Object(obj_type), obj_id) => { js_set(&patch, "datatype", obj_type.to_string())?; @@ -472,13 +474,19 @@ impl Automerge { js_set(&patch, "value", ScalarValue(value))?; } }; - js_set(&patch, "conflict", conflict)?; + //js_set(&patch, "conflict", conflict)?; } - Patch::Insert { obj, index, value } => { + Patch::Insert { + obj, + path, + index, + value, + } => { js_set(&patch, "action", "insert")?; - js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "key", index as f64)?; + //js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "path", export_path(path, index.into()))?; + //js_set(&patch, "key", index as f64)?; match value { (Value::Object(obj_type), obj_id) => { js_set(&patch, "datatype", obj_type.to_string())?; @@ -491,17 +499,24 @@ impl Automerge { }; } - Patch::Increment { obj, key, value } => { + Patch::Increment { + obj, + path, + key, + value, + } => { js_set(&patch, "action", "increment")?; - js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "key", key)?; - js_set(&patch, "value", value.0)?; + //js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "path", export_path(path, key))?; + //js_set(&patch, "key", key)?; + js_set(&patch, "value", value.0 as f64)?; } - Patch::Delete { obj, key } => { + Patch::Delete { obj, path, key } => { js_set(&patch, "action", "delete")?; - js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "key", key)?; + //js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "path", export_path(path, key))?; + //js_set(&patch, "key", key)?; } } result.push(&patch); diff --git a/automerge-wasm/test/patch.ts b/automerge-wasm/test/patch.ts new file mode 100644 index 00000000..ad7edadf --- /dev/null +++ b/automerge-wasm/test/patch.ts @@ -0,0 +1,113 @@ +import { describe, it } from 'mocha'; +//@ts-ignore +import assert from 'assert' +//@ts-ignore +import init, { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { Prop } from '..'; + +function patchValue(patch: any) : any { + switch (patch.datatype) { + case "map": + return {} + case "list": + case "text": + return [] + default: + return patch.value + } +} + +function applyPatch(obj: any, path: Prop[], patch: any) : any { + let prop = path.shift(); + if (typeof prop === 'number' && Array.isArray(obj)) { + return applyPatchToArray(obj, prop, path, patch) + } + if (typeof prop === 'string' && typeof obj === 'object') { + return applyPatchToObject(obj, prop, path, patch) + } + return obj +} + +type Obj = { [key:string]: any } + +function applyPatchToObject(obj: Obj, prop: string, path: Prop[], patch: any) : any { + if (path.length === 0) { + switch (patch.action) { + case "increment": + return { ... obj, [prop]: obj[prop] + patchValue(patch) } + case "put": + return { ... obj, [prop]: patchValue(patch) } + case "delete": + let tmp = { ... obj } + delete tmp[prop] + return tmp + default: + throw new RangeError(`Invalid patch ${patch}`) + } + } else { + return { ... obj, [prop]: applyPatch(obj[prop], path, patch) } + } +} + +function applyPatchToArray(obj: Array, prop: number, path: Prop[], patch: any) : any { + if (path.length === 0) { + switch (patch.action) { + case "increment": + return [ ... obj.slice(0,prop), obj[prop] + patchValue(patch), ... obj.slice(prop + 1) ] + case "put": + return [ ... obj.slice(0,prop), patchValue(patch), ... obj.slice(prop + 1) ] + case "insert": + return [ ... obj.slice(0,prop), patchValue(patch), ... obj.slice(prop) ] + case "delete": + console.log("obj=", obj) + let tmp = [... obj.slice(0,prop), ... obj.slice(prop + 1) ] + console.log("tmp=", tmp) + return [... obj.slice(0,prop), ... obj.slice(prop + 1) ] + default: + throw new RangeError(`Invalid patch ${patch}`) + } + } else { + return [ ... obj.slice(0,prop), applyPatch(obj[prop], path, patch), ... obj.slice(prop + 1) ] + } +} + +function applyPatches(obj: any, patches: any) { + for (let patch of patches) { + obj = applyPatch(obj, patch.path, patch) + } + return obj +} + +describe('Automerge', () => { + describe('patches', () => { + it.only('can apply nested patches', () => { + const doc1 = create() + doc1.enablePatches(true) + doc1.put("/", "str", "value") + doc1.put("/", "num", 0) + doc1.delete("/", "num") + doc1.put("/", "counter", 0, "counter") + doc1.increment("/", "counter", 100) + doc1.increment("/", "counter", 1) + doc1.put("/", "bin", new Uint8Array([1,2,3])) + doc1.put("/", "bool", true) + let sub = doc1.putObject("/", "sub", {}) + let list = doc1.putObject("/", "list", [1,2,3,4,5,6]) + doc1.push("/list", 100, "counter"); + doc1.increment("/list", 6, 10); + let sublist = doc1.putObject("/sub", "list", [1,2,3,4,[ 1,2,3,[4,{ five: "six" } ] ] ]) + doc1.put(sub, "str", "value") + //doc1.delete("/sub/list", 0) + doc1.put("/sub", "num", 0) + doc1.put("/sub", "bin", new Uint8Array([1,2,3])) + doc1.put("/sub", "bool", true) + let subsub = doc1.putObject("/sub", "sub", {}) + doc1.put("/sub/sub", "num", 0) + doc1.put("/sub/sub", "bin", new Uint8Array([1,2,3])) + doc1.put("/sub/sub", "bool", true) + let patches = doc1.popPatches() + let js = applyPatches({}, patches) + assert.deepEqual(js,doc1.materialize("/")) + }) + }) +}) diff --git a/automerge/examples/watch.rs b/automerge/examples/watch.rs index d9668497..9d67f73d 100644 --- a/automerge/examples/watch.rs +++ b/automerge/examples/watch.rs @@ -19,7 +19,7 @@ fn main() { }, ) .unwrap(); - get_changes(&doc, observer.take_patches()); + get_changes(observer.take_patches()); let mut tx = doc.transaction(); let map = tx @@ -37,50 +37,49 @@ fn main() { let m = tx.insert_object(&list, 2, automerge::ObjType::Map).unwrap(); tx.put(&m, "hi", 2).unwrap(); let _heads3 = tx.commit_with(CommitOptions::default().with_op_observer(&mut observer)); - get_changes(&doc, observer.take_patches()); + get_changes(observer.take_patches()); } -fn get_changes(doc: &Automerge, patches: Vec) { +fn get_changes(patches: Vec) { for patch in patches { match patch { Patch::Put { obj, + path, key, value, conflict: _, } => { println!( "put {:?} at {:?} in obj {:?}, object path {:?}", - value, - key, - obj, - doc.path_to_object(&obj) + value, key, obj, path, ) } - Patch::Insert { obj, index, value } => { + Patch::Insert { + obj, + index, + value, + path, + } => { println!( "insert {:?} at {:?} in obj {:?}, object path {:?}", - value, - index, - obj, - doc.path_to_object(&obj) + value, index, obj, path, ) } - Patch::Increment { obj, key, value } => { + Patch::Increment { + obj, + key, + value, + path, + } => { println!( "increment {:?} in obj {:?} by {:?}, object path {:?}", - key, - obj, - value, - doc.path_to_object(&obj) + key, obj, value, path, ) } - Patch::Delete { obj, key } => println!( - "delete {:?} in obj {:?}, object path {:?}", - key, - obj, - doc.path_to_object(&obj) - ), + Patch::Delete { obj, key, path } => { + println!("delete {:?} in obj {:?}, object path {:?}", key, obj, path) + } } } } diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index 41ac9579..f0a29acc 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -230,8 +230,8 @@ impl Automerge { None } else { self.ops - .parent_object(&obj) - .map(|(id, key)| (self.id_to_exid(id.0), self.export_key(id, key))) + .parent_prop(&obj) + .map(|(id, prop)| (self.id_to_exid(id.0), prop)) } } else { None @@ -249,20 +249,22 @@ impl Automerge { path } - /// Export a key to a prop. - fn export_key(&self, obj: ObjId, key: Key) -> Prop { - match key { - Key::Map(m) => Prop::Map(self.ops.m.props.get(m).into()), - Key::Seq(opid) => { - let i = self - .ops - .search(&obj, query::ElemIdPos::new(opid)) - .index() - .unwrap(); - Prop::Seq(i) + /* + /// Export a key to a prop. + fn export_key(&self, obj: ObjId, key: Key) -> Prop { + match key { + Key::Map(m) => Prop::Map(self.ops.m.props.get(m).into()), + Key::Seq(opid) => { + let i = self + .ops + .search(&obj, query::ElemIdPos::new(opid)) + .index() + .unwrap(); + Prop::Seq(i) + } } } - } + */ /// Get the keys of the object `obj`. /// diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index dc4204e1..b832168e 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1446,6 +1446,7 @@ fn observe_counter_change_application_overwrite() { observer.take_patches(), vec![Patch::Put { obj: ExId::Root, + path: vec![], key: Prop::Map("counter".into()), value: ( ScalarValue::Str("mystring".into()).into(), @@ -1488,6 +1489,7 @@ fn observe_counter_change_application() { vec![ Patch::Put { obj: ExId::Root, + path: vec![], key: Prop::Map("counter".into()), value: ( ScalarValue::counter(1).into(), @@ -1497,11 +1499,13 @@ fn observe_counter_change_application() { }, Patch::Increment { obj: ExId::Root, + path: vec![], key: Prop::Map("counter".into()), value: (2, ExId::Id(2, doc.get_actor().clone(), 0)), }, Patch::Increment { obj: ExId::Root, + path: vec![], key: Prop::Map("counter".into()), value: (5, ExId::Id(3, doc.get_actor().clone(), 0)), } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index c011d2de..29c34142 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -76,6 +76,7 @@ mod op_set; mod op_tree; mod options; mod parents; +mod path; mod query; pub mod sync; pub mod transaction; diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index 96139bab..5d664054 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -1,4 +1,5 @@ use crate::exid::ExId; +use crate::path::Path; use crate::Prop; use crate::Value; @@ -6,45 +7,66 @@ use crate::Value; pub trait OpObserver { /// A new value has been inserted into the given object. /// - /// - `objid`: the object that has been inserted into. + /// - `obj`: the object that has been inserted into. /// - `index`: the index the new value has been inserted at. /// - `tagged_value`: the value that has been inserted and the id of the operation that did the /// insert. - fn insert(&mut self, objid: ExId, index: usize, tagged_value: (Value<'_>, ExId)); + fn insert(&mut self, obj: ExId, path: Path<'_>, index: usize, tagged_value: (Value<'_>, ExId)); /// A new value has been put into the given object. /// - /// - `objid`: the object that has been put into. + /// - `obj`: the object that has been put into. /// - `key`: the key that the value as been put at. /// - `tagged_value`: the value that has been put into the object and the id of the operation /// that did the put. /// - `conflict`: whether this put conflicts with other operations. - fn put(&mut self, objid: ExId, key: Prop, tagged_value: (Value<'_>, ExId), conflict: bool); + fn put( + &mut self, + obj: ExId, + path: Path<'_>, + key: Prop, + tagged_value: (Value<'_>, ExId), + conflict: bool, + ); /// A counter has been incremented. /// - /// - `objid`: the object that contains the counter. + /// - `obj`: the object that contains the counter. /// - `key`: they key that the chounter is at. /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the /// increment operation. - fn increment(&mut self, objid: ExId, key: Prop, tagged_value: (i64, ExId)); + fn increment(&mut self, obj: ExId, path: Path<'_>, key: Prop, tagged_value: (i64, ExId)); /// A value has beeen deleted. /// - /// - `objid`: the object that has been deleted in. + /// - `obj`: the object that has been deleted in. /// - `key`: the key of the value that has been deleted. - fn delete(&mut self, objid: ExId, key: Prop); + fn delete(&mut self, obj: ExId, path: Path<'_>, key: Prop); } impl OpObserver for () { - fn insert(&mut self, _objid: ExId, _index: usize, _tagged_value: (Value<'_>, ExId)) {} - - fn put(&mut self, _objid: ExId, _key: Prop, _tagged_value: (Value<'_>, ExId), _conflict: bool) { + fn insert( + &mut self, + _obj: ExId, + _path: Path<'_>, + _index: usize, + _tagged_value: (Value<'_>, ExId), + ) { } - fn increment(&mut self, _objid: ExId, _key: Prop, _tagged_value: (i64, ExId)) {} + fn put( + &mut self, + _obj: ExId, + _path: Path<'_>, + _key: Prop, + _tagged_value: (Value<'_>, ExId), + _conflict: bool, + ) { + } - fn delete(&mut self, _objid: ExId, _key: Prop) {} + fn increment(&mut self, _obj: ExId, _path: Path<'_>, _key: Prop, _tagged_value: (i64, ExId)) {} + + fn delete(&mut self, _obj: ExId, _path: Path<'_>, _key: Prop) {} } /// Capture operations into a [`Vec`] and store them as patches. @@ -62,33 +84,57 @@ impl VecOpObserver { } impl OpObserver for VecOpObserver { - fn insert(&mut self, obj_id: ExId, index: usize, (value, id): (Value<'_>, ExId)) { + fn insert( + &mut self, + obj_id: ExId, + path: Path<'_>, + index: usize, + (value, id): (Value<'_>, ExId), + ) { + let mut path = path.collect::>(); + path.reverse(); self.patches.push(Patch::Insert { obj: obj_id, + path, index, value: (value.into_owned(), id), }); } - fn put(&mut self, objid: ExId, key: Prop, (value, id): (Value<'_>, ExId), conflict: bool) { + fn put( + &mut self, + obj: ExId, + path: Path<'_>, + key: Prop, + (value, id): (Value<'_>, ExId), + conflict: bool, + ) { + let mut path = path.collect::>(); + path.reverse(); self.patches.push(Patch::Put { - obj: objid, + obj, + path, key, value: (value.into_owned(), id), conflict, }); } - fn increment(&mut self, objid: ExId, key: Prop, tagged_value: (i64, ExId)) { + fn increment(&mut self, obj: ExId, path: Path<'_>, key: Prop, tagged_value: (i64, ExId)) { + let mut path = path.collect::>(); + path.reverse(); self.patches.push(Patch::Increment { - obj: objid, + obj, + path, key, value: tagged_value, }); } - fn delete(&mut self, objid: ExId, key: Prop) { - self.patches.push(Patch::Delete { obj: objid, key }) + fn delete(&mut self, obj: ExId, path: Path<'_>, key: Prop) { + let mut path = path.collect::>(); + path.reverse(); + self.patches.push(Patch::Delete { obj, path, key }) } } @@ -99,6 +145,7 @@ pub enum Patch { Put { /// The object that was put into. obj: ExId, + path: Vec, /// The key that the new value was put at. key: Prop, /// The value that was put, and the id of the operation that put it there. @@ -110,6 +157,7 @@ pub enum Patch { Insert { /// The object that was inserted into. obj: ExId, + path: Vec, /// The index that the new value was inserted at. index: usize, /// The value that was inserted, and the id of the operation that inserted it there. @@ -119,6 +167,7 @@ pub enum Patch { Increment { /// The object that was incremented in. obj: ExId, + path: Vec, /// The key that was incremented. key: Prop, /// The amount that the counter was incremented by, and the id of the operation that @@ -129,6 +178,7 @@ pub enum Patch { Delete { /// The object that was deleted from. obj: ExId, + path: Vec, /// The key that was deleted. key: Prop, }, diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index e1fe7501..79d42730 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -2,8 +2,9 @@ use crate::clock::Clock; use crate::exid::ExId; use crate::indexed_cache::IndexedCache; use crate::op_tree::{self, OpTree}; +use crate::path::Path; use crate::query::{self, OpIdSearch, TreeQuery}; -use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpType}; +use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpType, Prop}; use crate::{ObjType, OpObserver}; use fxhash::FxBuildHasher; use std::cmp::Ordering; @@ -220,21 +221,21 @@ impl OpSetInternal { if op.insert { let value = (op.value(), self.id_to_exid(op.id)); - observer.insert(ex_obj, seen, value); + observer.insert(ex_obj, self.path(obj), seen, value); } else if op.is_delete() { if let Some(winner) = &values.last() { let value = (winner.value(), self.id_to_exid(winner.id)); let conflict = values.len() > 1; - observer.put(ex_obj, key, value, conflict); + observer.put(ex_obj, self.path(obj), key, value, conflict); } else { - observer.delete(ex_obj, key); + observer.delete(ex_obj, self.path(obj), key); } } else if let Some(value) = op.get_increment_value() { // only observe this increment if the counter is visible, i.e. the counter's // create op is in the values if values.iter().any(|value| op.pred.contains(&value.id)) { // we have observed the value - observer.increment(ex_obj, key, (value, self.id_to_exid(op.id))); + observer.increment(ex_obj, self.path(obj), key, (value, self.id_to_exid(op.id))); } } else { let winner = if let Some(last_value) = values.last() { @@ -248,10 +249,10 @@ impl OpSetInternal { }; let value = (winner.value(), self.id_to_exid(winner.id)); if op.is_list_op() && !had_value_before { - observer.insert(ex_obj, seen, value); + observer.insert(ex_obj, self.path(obj), seen, value); } else { let conflict = !values.is_empty(); - observer.put(ex_obj, key, value, conflict); + observer.put(ex_obj, self.path(obj), key, value, conflict); } } @@ -277,6 +278,31 @@ impl OpSetInternal { dot::render(&graph, &mut out).unwrap(); String::from_utf8_lossy(&out[..]).to_string() } + + pub(crate) fn parent_prop(&self, obj: &ObjId) -> Option<(ObjId, Prop)> { + self.parent_object(&obj) + .map(|(id, key)| (id, self.export_key(&id, key))) + } + + pub(crate) fn path(&self, obj: &ObjId) -> Path<'_> { + Path { + obj: *obj, + op_set: self, + } + } + + pub(crate) fn export_key(&self, obj: &ObjId, key: Key) -> Prop { + match key { + Key::Map(m) => Prop::Map(self.m.props.get(m).into()), + Key::Seq(opid) => { + let i = self + .search(&obj, query::ElemIdPos::new(opid)) + .index() + .unwrap(); + Prop::Seq(i) + } + } + } } impl Default for OpSetInternal { diff --git a/automerge/src/path.rs b/automerge/src/path.rs new file mode 100644 index 00000000..6d2064f5 --- /dev/null +++ b/automerge/src/path.rs @@ -0,0 +1,21 @@ +use crate::op_set::OpSet; +use crate::types::{ObjId, Prop}; + +#[derive(Debug)] +pub struct Path<'a> { + pub(crate) obj: ObjId, + pub(crate) op_set: &'a OpSet, +} + +impl<'a> Iterator for Path<'a> { + type Item = Prop; + + fn next(&mut self) -> Option { + if let Some((obj, prop)) = self.op_set.parent_prop(&self.obj) { + self.obj = obj; + Some(prop) + } else { + None + } + } +} diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index 6969e317..c4c726bb 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -49,15 +49,22 @@ impl TransactionInner { let value = (op.value(), doc.id_to_exid(op.id)); match prop { Prop::Map(_) => panic!("insert into a map"), - Prop::Seq(index) => observer.insert(ex_obj, *index, value), + Prop::Seq(index) => { + observer.insert(ex_obj, doc.ops.path(obj), *index, value) + } } } else if op.is_delete() { - observer.delete(ex_obj, prop.clone()); + observer.delete(ex_obj, doc.ops.path(obj), prop.clone()); } else if let Some(value) = op.get_increment_value() { - observer.increment(ex_obj, prop.clone(), (value, doc.id_to_exid(op.id))); + observer.increment( + ex_obj, + doc.ops.path(obj), + prop.clone(), + (value, doc.id_to_exid(op.id)), + ); } else { let value = (op.value(), doc.ops.id_to_exid(op.id)); - observer.put(ex_obj, prop.clone(), value, false); + observer.put(ex_obj, doc.ops.path(obj), prop.clone(), value, false); } } } diff --git a/edit-trace/automerge-wasm.js b/edit-trace/automerge-wasm.js index cd153c2d..108ab010 100644 --- a/edit-trace/automerge-wasm.js +++ b/edit-trace/automerge-wasm.js @@ -10,6 +10,7 @@ const Automerge = require('../automerge-wasm') const start = new Date() let doc = Automerge.create(); +doc.enablePatches(true) let text = doc.putObject("_root", "text", "", "text") for (let i = 0; i < edits.length; i++) { @@ -28,6 +29,10 @@ let t_time = new Date() let t = doc.text(text); console.log(`doc.text in ${new Date() - t_time} ms`) +let p_time = new Date() +let p = doc.popPatches(); +console.log(`doc.popPatches in ${new Date() - p_time} ms`) + if (doc.text(text) !== finalText) { throw new RangeError('ERROR: final text did not match expectation') } From 76a172a838893a79aa4e90c060056d2f022af597 Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Sat, 4 Jun 2022 21:16:31 -0400 Subject: [PATCH 16/17] replaced path with parents --- automerge-wasm/src/interop.rs | 6 +-- automerge-wasm/src/lib.rs | 18 ++------ automerge-wasm/test/patch.ts | 17 ++++++-- automerge/src/automerge.rs | 22 +--------- automerge/src/lib.rs | 1 - automerge/src/op_observer.rs | 55 +++++++++++++++---------- automerge/src/op_set.rs | 66 +++++++++++++++++++++++------- automerge/src/parents.rs | 6 ++- automerge/src/path.rs | 21 ---------- automerge/src/transaction/inner.rs | 11 +++-- 10 files changed, 117 insertions(+), 106 deletions(-) delete mode 100644 automerge/src/path.rs diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 17042501..73a97add 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -346,10 +346,10 @@ pub(crate) fn to_objtype( } } -pub(crate) fn export_path(path: Vec, key: Prop) -> Array { - let path: Array = path.into_iter().map(|p| JsValue::from(p)).collect(); +pub(crate) fn export_path(path: Vec<(ObjId, Prop)>, key: Prop) -> Array { + let path: Array = path.into_iter().map(|(_, p)| JsValue::from(p)).collect(); path.push(&key.into()); - return path; + path } pub(crate) fn get_heads(heads: Option) -> Option> { diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 870eaee4..5fb43cbc 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -454,11 +454,7 @@ impl Automerge { let patch = Array::new(); match p { Patch::Put { - obj, - path, - key, - value, - conflict, + path, key, value, .. } => { js_set(&patch, "action", "put")?; //js_set(&patch, "obj", obj.to_string())?; @@ -478,10 +474,7 @@ impl Automerge { } Patch::Insert { - obj, - path, - index, - value, + path, index, value, .. } => { js_set(&patch, "action", "insert")?; //js_set(&patch, "obj", obj.to_string())?; @@ -500,10 +493,7 @@ impl Automerge { } Patch::Increment { - obj, - path, - key, - value, + path, key, value, .. } => { js_set(&patch, "action", "increment")?; //js_set(&patch, "obj", obj.to_string())?; @@ -512,7 +502,7 @@ impl Automerge { js_set(&patch, "value", value.0 as f64)?; } - Patch::Delete { obj, path, key } => { + Patch::Delete { path, key, .. } => { js_set(&patch, "action", "delete")?; //js_set(&patch, "obj", obj.to_string())?; js_set(&patch, "path", export_path(path, key))?; diff --git a/automerge-wasm/test/patch.ts b/automerge-wasm/test/patch.ts index ad7edadf..3ef28272 100644 --- a/automerge-wasm/test/patch.ts +++ b/automerge-wasm/test/patch.ts @@ -59,9 +59,6 @@ function applyPatchToArray(obj: Array, prop: number, path: Prop[], patch: a case "insert": return [ ... obj.slice(0,prop), patchValue(patch), ... obj.slice(prop) ] case "delete": - console.log("obj=", obj) - let tmp = [... obj.slice(0,prop), ... obj.slice(prop + 1) ] - console.log("tmp=", tmp) return [... obj.slice(0,prop), ... obj.slice(prop + 1) ] default: throw new RangeError(`Invalid patch ${patch}`) @@ -73,8 +70,11 @@ function applyPatchToArray(obj: Array, prop: number, path: Prop[], patch: a function applyPatches(obj: any, patches: any) { for (let patch of patches) { + console.log("obj",obj) + console.log("patch",patch) obj = applyPatch(obj, patch.path, patch) } + console.log("obj",obj) return obj } @@ -97,7 +97,6 @@ describe('Automerge', () => { doc1.increment("/list", 6, 10); let sublist = doc1.putObject("/sub", "list", [1,2,3,4,[ 1,2,3,[4,{ five: "six" } ] ] ]) doc1.put(sub, "str", "value") - //doc1.delete("/sub/list", 0) doc1.put("/sub", "num", 0) doc1.put("/sub", "bin", new Uint8Array([1,2,3])) doc1.put("/sub", "bool", true) @@ -109,5 +108,15 @@ describe('Automerge', () => { let js = applyPatches({}, patches) assert.deepEqual(js,doc1.materialize("/")) }) + it.only('can handle deletes with nested patches', () => { + const doc1 = create() + doc1.enablePatches(true) + let list = doc1.putObject("/", "list", [1,2,3,['a','b','c']]) + //doc1.delete("/list", 1); + doc1.push("/list", 'hello'); + let patches = doc1.popPatches() + let js = applyPatches({}, patches) + assert.deepEqual(js,doc1.materialize("/")) + }) }) }) diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index f0a29acc..bdd31b4c 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -240,7 +240,7 @@ impl Automerge { /// Get an iterator over the parents of an object. pub fn parents(&self, obj: ExId) -> Parents<'_> { - Parents { obj, doc: self } + self.ops.parents(&obj) } pub fn path_to_object>(&self, obj: O) -> Vec<(ExId, Prop)> { @@ -416,25 +416,7 @@ impl Automerge { } pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result { - match id { - ExId::Root => Ok(ObjId::root()), - ExId::Id(ctr, actor, idx) => { - // do a direct get here b/c this could be foriegn and not be within the array - // bounds - if self.ops.m.actors.cache.get(*idx) == Some(actor) { - Ok(ObjId(OpId(*ctr, *idx))) - } else { - // FIXME - make a real error - let idx = self - .ops - .m - .actors - .lookup(actor) - .ok_or(AutomergeError::Fail)?; - Ok(ObjId(OpId(*ctr, idx))) - } - } - } + self.ops.exid_to_obj(id) } pub(crate) fn id_to_exid(&self, id: OpId) -> ExId { diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index 29c34142..c011d2de 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -76,7 +76,6 @@ mod op_set; mod op_tree; mod options; mod parents; -mod path; mod query; pub mod sync; pub mod transaction; diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index 5d664054..f2682d96 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::path::Path; +use crate::parents::Parents; use crate::Prop; use crate::Value; @@ -11,7 +11,13 @@ pub trait OpObserver { /// - `index`: the index the new value has been inserted at. /// - `tagged_value`: the value that has been inserted and the id of the operation that did the /// insert. - fn insert(&mut self, obj: ExId, path: Path<'_>, index: usize, tagged_value: (Value<'_>, ExId)); + fn insert( + &mut self, + obj: ExId, + parents: Parents<'_>, + index: usize, + tagged_value: (Value<'_>, ExId), + ); /// A new value has been put into the given object. /// @@ -23,7 +29,7 @@ pub trait OpObserver { fn put( &mut self, obj: ExId, - path: Path<'_>, + parents: Parents<'_>, key: Prop, tagged_value: (Value<'_>, ExId), conflict: bool, @@ -35,20 +41,20 @@ pub trait OpObserver { /// - `key`: they key that the chounter is at. /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the /// increment operation. - fn increment(&mut self, obj: ExId, path: Path<'_>, key: Prop, tagged_value: (i64, ExId)); + fn increment(&mut self, obj: ExId, parents: Parents<'_>, key: Prop, tagged_value: (i64, ExId)); /// A value has beeen deleted. /// /// - `obj`: the object that has been deleted in. /// - `key`: the key of the value that has been deleted. - fn delete(&mut self, obj: ExId, path: Path<'_>, key: Prop); + fn delete(&mut self, obj: ExId, parents: Parents<'_>, key: Prop); } impl OpObserver for () { fn insert( &mut self, _obj: ExId, - _path: Path<'_>, + _parents: Parents<'_>, _index: usize, _tagged_value: (Value<'_>, ExId), ) { @@ -57,16 +63,23 @@ impl OpObserver for () { fn put( &mut self, _obj: ExId, - _path: Path<'_>, + _parents: Parents<'_>, _key: Prop, _tagged_value: (Value<'_>, ExId), _conflict: bool, ) { } - fn increment(&mut self, _obj: ExId, _path: Path<'_>, _key: Prop, _tagged_value: (i64, ExId)) {} + fn increment( + &mut self, + _obj: ExId, + _parents: Parents<'_>, + _key: Prop, + _tagged_value: (i64, ExId), + ) { + } - fn delete(&mut self, _obj: ExId, _path: Path<'_>, _key: Prop) {} + fn delete(&mut self, _obj: ExId, _parents: Parents<'_>, _key: Prop) {} } /// Capture operations into a [`Vec`] and store them as patches. @@ -87,11 +100,11 @@ impl OpObserver for VecOpObserver { fn insert( &mut self, obj_id: ExId, - path: Path<'_>, + parents: Parents<'_>, index: usize, (value, id): (Value<'_>, ExId), ) { - let mut path = path.collect::>(); + let mut path = parents.collect::>(); path.reverse(); self.patches.push(Patch::Insert { obj: obj_id, @@ -104,12 +117,12 @@ impl OpObserver for VecOpObserver { fn put( &mut self, obj: ExId, - path: Path<'_>, + parents: Parents<'_>, key: Prop, (value, id): (Value<'_>, ExId), conflict: bool, ) { - let mut path = path.collect::>(); + let mut path = parents.collect::>(); path.reverse(); self.patches.push(Patch::Put { obj, @@ -120,8 +133,8 @@ impl OpObserver for VecOpObserver { }); } - fn increment(&mut self, obj: ExId, path: Path<'_>, key: Prop, tagged_value: (i64, ExId)) { - let mut path = path.collect::>(); + fn increment(&mut self, obj: ExId, parents: Parents<'_>, key: Prop, tagged_value: (i64, ExId)) { + let mut path = parents.collect::>(); path.reverse(); self.patches.push(Patch::Increment { obj, @@ -131,8 +144,8 @@ impl OpObserver for VecOpObserver { }); } - fn delete(&mut self, obj: ExId, path: Path<'_>, key: Prop) { - let mut path = path.collect::>(); + fn delete(&mut self, obj: ExId, parents: Parents<'_>, key: Prop) { + let mut path = parents.collect::>(); path.reverse(); self.patches.push(Patch::Delete { obj, path, key }) } @@ -145,7 +158,7 @@ pub enum Patch { Put { /// The object that was put into. obj: ExId, - path: Vec, + path: Vec<(ExId, Prop)>, /// The key that the new value was put at. key: Prop, /// The value that was put, and the id of the operation that put it there. @@ -157,7 +170,7 @@ pub enum Patch { Insert { /// The object that was inserted into. obj: ExId, - path: Vec, + path: Vec<(ExId, Prop)>, /// The index that the new value was inserted at. index: usize, /// The value that was inserted, and the id of the operation that inserted it there. @@ -167,7 +180,7 @@ pub enum Patch { Increment { /// The object that was incremented in. obj: ExId, - path: Vec, + path: Vec<(ExId, Prop)>, /// The key that was incremented. key: Prop, /// The amount that the counter was incremented by, and the id of the operation that @@ -178,7 +191,7 @@ pub enum Patch { Delete { /// The object that was deleted from. obj: ExId, - path: Vec, + path: Vec<(ExId, Prop)>, /// The key that was deleted. key: Prop, }, diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 79d42730..9a02aa9a 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -2,9 +2,10 @@ use crate::clock::Clock; use crate::exid::ExId; use crate::indexed_cache::IndexedCache; use crate::op_tree::{self, OpTree}; -use crate::path::Path; use crate::query::{self, OpIdSearch, TreeQuery}; use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpType, Prop}; +use crate::AutomergeError; +use crate::Parents; use crate::{ObjType, OpObserver}; use fxhash::FxBuildHasher; use std::cmp::Ordering; @@ -54,7 +55,7 @@ impl OpSetInternal { } } - pub(crate) fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key)> { + pub(crate) fn parent(&self, obj: &ObjId) -> Option<(ObjId, Key)> { let parent = self.trees.get(obj)?.parent?; let key = self.search(&parent, OpIdSearch::new(obj.0)).key().unwrap(); Some((parent, key)) @@ -221,21 +222,25 @@ impl OpSetInternal { if op.insert { let value = (op.value(), self.id_to_exid(op.id)); - observer.insert(ex_obj, self.path(obj), seen, value); + let parents = self.parents(&ex_obj); + observer.insert(ex_obj, parents, seen, value); } else if op.is_delete() { if let Some(winner) = &values.last() { let value = (winner.value(), self.id_to_exid(winner.id)); let conflict = values.len() > 1; - observer.put(ex_obj, self.path(obj), key, value, conflict); + let parents = self.parents(&ex_obj); + observer.put(ex_obj, parents, key, value, conflict); } else { - observer.delete(ex_obj, self.path(obj), key); + let parents = self.parents(&ex_obj); + observer.delete(ex_obj, parents, key); } } else if let Some(value) = op.get_increment_value() { // only observe this increment if the counter is visible, i.e. the counter's // create op is in the values if values.iter().any(|value| op.pred.contains(&value.id)) { // we have observed the value - observer.increment(ex_obj, self.path(obj), key, (value, self.id_to_exid(op.id))); + let parents = self.parents(&ex_obj); + observer.increment(ex_obj, parents, key, (value, self.id_to_exid(op.id))); } } else { let winner = if let Some(last_value) = values.last() { @@ -249,10 +254,12 @@ impl OpSetInternal { }; let value = (winner.value(), self.id_to_exid(winner.id)); if op.is_list_op() && !had_value_before { - observer.insert(ex_obj, self.path(obj), seen, value); + let parents = self.parents(&ex_obj); + observer.insert(ex_obj, parents, seen, value); } else { let conflict = !values.is_empty(); - observer.put(ex_obj, self.path(obj), key, value, conflict); + let parents = self.parents(&ex_obj); + observer.put(ex_obj, parents, key, value, conflict); } } @@ -280,14 +287,45 @@ impl OpSetInternal { } pub(crate) fn parent_prop(&self, obj: &ObjId) -> Option<(ObjId, Prop)> { - self.parent_object(&obj) + self.parent(obj) .map(|(id, key)| (id, self.export_key(&id, key))) } - pub(crate) fn path(&self, obj: &ObjId) -> Path<'_> { - Path { - obj: *obj, - op_set: self, + pub(crate) fn parents(&self, obj: &ExId) -> Parents<'_> { + Parents { + obj: obj.clone(), + doc: self, + } + } + + pub(crate) fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + if obj == ObjId::root() { + // root has no parent + None + } else { + self.parent_prop(&obj) + .map(|(id, prop)| (self.id_to_exid(id.0), prop)) + } + } else { + None + } + } + + pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result { + match id { + ExId::Root => Ok(ObjId::root()), + ExId::Id(ctr, actor, idx) => { + // do a direct get here b/c this could be foriegn and not be within the array + // bounds + if self.m.actors.cache.get(*idx) == Some(actor) { + Ok(ObjId(OpId(*ctr, *idx))) + } else { + // FIXME - make a real error + let idx = self.m.actors.lookup(actor).ok_or(AutomergeError::Fail)?; + Ok(ObjId(OpId(*ctr, idx))) + } + } } } @@ -296,7 +334,7 @@ impl OpSetInternal { Key::Map(m) => Prop::Map(self.m.props.get(m).into()), Key::Seq(opid) => { let i = self - .search(&obj, query::ElemIdPos::new(opid)) + .search(obj, query::ElemIdPos::new(opid)) .index() .unwrap(); Prop::Seq(i) diff --git a/automerge/src/parents.rs b/automerge/src/parents.rs index a6c891bd..d562a9d8 100644 --- a/automerge/src/parents.rs +++ b/automerge/src/parents.rs @@ -1,9 +1,11 @@ -use crate::{exid::ExId, Automerge, Prop}; +use crate::exid::ExId; +use crate::op_set::OpSet; +use crate::Prop; #[derive(Debug)] pub struct Parents<'a> { pub(crate) obj: ExId, - pub(crate) doc: &'a Automerge, + pub(crate) doc: &'a OpSet, } impl<'a> Iterator for Parents<'a> { diff --git a/automerge/src/path.rs b/automerge/src/path.rs deleted file mode 100644 index 6d2064f5..00000000 --- a/automerge/src/path.rs +++ /dev/null @@ -1,21 +0,0 @@ -use crate::op_set::OpSet; -use crate::types::{ObjId, Prop}; - -#[derive(Debug)] -pub struct Path<'a> { - pub(crate) obj: ObjId, - pub(crate) op_set: &'a OpSet, -} - -impl<'a> Iterator for Path<'a> { - type Item = Prop; - - fn next(&mut self) -> Option { - if let Some((obj, prop)) = self.op_set.parent_prop(&self.obj) { - self.obj = obj; - Some(prop) - } else { - None - } - } -} diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index c4c726bb..ce6d0e73 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -45,26 +45,25 @@ impl TransactionInner { if let Some(observer) = op_observer { for (obj, prop, op) in &self.operations { let ex_obj = doc.ops.id_to_exid(obj.0); + let parents = doc.ops.parents(&ex_obj); if op.insert { let value = (op.value(), doc.id_to_exid(op.id)); match prop { Prop::Map(_) => panic!("insert into a map"), - Prop::Seq(index) => { - observer.insert(ex_obj, doc.ops.path(obj), *index, value) - } + Prop::Seq(index) => observer.insert(ex_obj, parents, *index, value), } } else if op.is_delete() { - observer.delete(ex_obj, doc.ops.path(obj), prop.clone()); + observer.delete(ex_obj, parents, prop.clone()); } else if let Some(value) = op.get_increment_value() { observer.increment( ex_obj, - doc.ops.path(obj), + parents, prop.clone(), (value, doc.id_to_exid(op.id)), ); } else { let value = (op.value(), doc.ops.id_to_exid(op.id)); - observer.put(ex_obj, doc.ops.path(obj), prop.clone(), value, false); + observer.put(ex_obj, parents, prop.clone(), value, false); } } } From 68ed77a5c68f2d8a0b421ab9a6a72ba7dc0a17fc Mon Sep 17 00:00:00 2001 From: Orion Henry Date: Tue, 7 Jun 2022 11:29:43 +0200 Subject: [PATCH 17/17] merge OpObserver into AutoCommit --- automerge-js/tsconfig.json | 2 +- automerge-wasm/src/interop.rs | 69 ++++++- automerge-wasm/src/lib.rs | 180 +++--------------- automerge-wasm/test/patch.ts | 64 ++++++- automerge/examples/quickstart.rs | 6 +- automerge/examples/watch.rs | 6 +- automerge/src/autocommit.rs | 120 ++++++------ automerge/src/automerge.rs | 46 +++-- automerge/src/automerge/tests.rs | 37 ++-- automerge/src/change.rs | 2 +- automerge/src/lib.rs | 1 - automerge/src/op_observer.rs | 99 ++-------- automerge/src/op_set.rs | 4 +- automerge/src/options.rs | 18 +- automerge/src/sync.rs | 8 +- automerge/src/transaction/commit.rs | 12 +- automerge/src/transaction/inner.rs | 71 ++++--- .../src/transaction/manual_transaction.rs | 8 +- automerge/tests/test.rs | 12 +- 19 files changed, 349 insertions(+), 416 deletions(-) diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json index 01500ed5..bad1409e 100644 --- a/automerge-js/tsconfig.json +++ b/automerge-js/tsconfig.json @@ -11,7 +11,7 @@ "forceConsistentCasingInFileNames": true, "strict": true, "noFallthroughCasesInSwitch": true, - "skipLibCheck": true, + "skipLibCheck": false, "outDir": "./dist" }, "include": [ "src/**/*" ], diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs index 73a97add..9f863e80 100644 --- a/automerge-wasm/src/interop.rs +++ b/automerge-wasm/src/interop.rs @@ -1,5 +1,6 @@ use automerge as am; use automerge::transaction::Transactable; +use automerge::Patch; use automerge::{Change, ChangeHash, Prop}; use js_sys::{Array, Object, Reflect, Uint8Array}; use std::collections::{BTreeSet, HashSet}; @@ -7,7 +8,7 @@ use std::fmt::Display; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; -use crate::{ObjId, ScalarValue, Value}; +use crate::{datatype, ObjId, ScalarValue, Value}; pub(crate) struct JS(pub(crate) JsValue); pub(crate) struct AR(pub(crate) Array); @@ -352,6 +353,72 @@ pub(crate) fn export_path(path: Vec<(ObjId, Prop)>, key: Prop) -> Array { path } +pub(crate) fn export_patches(patches: Vec) -> Result { + let result = Array::new(); + for p in patches { + let patch = Array::new(); + match p { + Patch::Put { + path, key, value, .. + } => { + js_set(&patch, "action", "put")?; + //js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "path", export_path(path, key))?; + //js_set(&patch, "key", key)?; + match value { + (Value::Object(obj_type), obj_id) => { + js_set(&patch, "datatype", obj_type.to_string())?; + js_set(&patch, "value", obj_id.to_string())?; + } + (Value::Scalar(value), _) => { + js_set(&patch, "datatype", datatype(&value))?; + js_set(&patch, "value", ScalarValue(value))?; + } + }; + //js_set(&patch, "conflict", conflict)?; + } + + Patch::Insert { + path, index, value, .. + } => { + js_set(&patch, "action", "insert")?; + //js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "path", export_path(path, index.into()))?; + //js_set(&patch, "key", index as f64)?; + match value { + (Value::Object(obj_type), obj_id) => { + js_set(&patch, "datatype", obj_type.to_string())?; + js_set(&patch, "value", obj_id.to_string())?; + } + (Value::Scalar(value), _) => { + js_set(&patch, "datatype", datatype(&value))?; + js_set(&patch, "value", ScalarValue(value))?; + } + }; + } + + Patch::Increment { + path, key, value, .. + } => { + js_set(&patch, "action", "increment")?; + //js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "path", export_path(path, key))?; + //js_set(&patch, "key", key)?; + js_set(&patch, "value", value.0 as f64)?; + } + + Patch::Delete { path, key, .. } => { + js_set(&patch, "action", "delete")?; + //js_set(&patch, "obj", obj.to_string())?; + js_set(&patch, "path", export_path(path, key))?; + //js_set(&patch, "key", key)?; + } + } + result.push(&patch); + } + Ok(result) +} + pub(crate) fn get_heads(heads: Option) -> Option> { let heads = heads?; let heads: Result, _> = heads.iter().map(|j| j.into_serde()).collect(); diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 5fb43cbc..f4a8da07 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -28,10 +28,7 @@ #![allow(clippy::unused_unit)] use am::transaction::CommitOptions; use am::transaction::Transactable; -use am::ApplyOptions; use automerge as am; -use automerge::Patch; -use automerge::VecOpObserver; use automerge::{Change, ObjId, Prop, Value, ROOT}; use js_sys::{Array, Object, Uint8Array}; use std::convert::TryInto; @@ -43,7 +40,7 @@ mod sync; mod value; use interop::{ - export_path, get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, + export_patches, get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, to_js_err, to_objtype, to_prop, AR, JS, }; use sync::SyncState; @@ -64,7 +61,6 @@ static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[derive(Debug)] pub struct Automerge { doc: automerge::AutoCommit, - observer: Option, } #[wasm_bindgen] @@ -75,28 +71,13 @@ impl Automerge { let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); automerge.set_actor(a); } - Ok(Automerge { - doc: automerge, - observer: None, - }) - } - - fn ensure_transaction_closed(&mut self) { - if self.doc.pending_ops() > 0 { - let mut opts = CommitOptions::default(); - if let Some(observer) = self.observer.as_mut() { - opts.set_op_observer(observer); - } - self.doc.commit_with(opts); - } + Ok(Automerge { doc: automerge }) } #[allow(clippy::should_implement_trait)] pub fn clone(&mut self, actor: Option) -> Result { - self.ensure_transaction_closed(); let mut automerge = Automerge { doc: self.doc.clone(), - observer: None, }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -106,10 +87,8 @@ impl Automerge { } pub fn fork(&mut self, actor: Option) -> Result { - self.ensure_transaction_closed(); let mut automerge = Automerge { doc: self.doc.fork(), - observer: None, }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -123,7 +102,6 @@ impl Automerge { let deps: Vec<_> = JS(heads).try_into()?; let mut automerge = Automerge { doc: self.doc.fork_at(&deps)?, - observer: None, }; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); @@ -147,21 +125,12 @@ impl Automerge { if let Some(time) = time { commit_opts.set_time(time as i64); } - if let Some(observer) = self.observer.as_mut() { - commit_opts.set_op_observer(observer); - } let hash = self.doc.commit_with(commit_opts); JsValue::from_str(&hex::encode(&hash.0)) } pub fn merge(&mut self, other: &mut Automerge) -> Result { - self.ensure_transaction_closed(); - let options = if let Some(observer) = self.observer.as_mut() { - ApplyOptions::default().with_op_observer(observer) - } else { - ApplyOptions::default() - }; - let heads = self.doc.merge_with(&mut other.doc, options)?; + let heads = self.doc.merge(&mut other.doc)?; let heads: Array = heads .iter() .map(|h| JsValue::from_str(&hex::encode(&h.0))) @@ -424,18 +393,21 @@ impl Automerge { } #[wasm_bindgen(js_name = enablePatches)] - pub fn enable_patches(&mut self, enable: JsValue) -> Result<(), JsValue> { + pub fn enable_patches(&mut self, enable: JsValue) -> Result { let enable = enable .as_bool() .ok_or_else(|| to_js_err("expected boolean"))?; if enable { - if self.observer.is_none() { - self.observer = Some(VecOpObserver::default()); - } + self.doc + .enable_observer() + .map(|mut p| export_patches(p.take_patches())) + .unwrap_or_else(|| Ok(Array::new())) } else { - self.observer = None; + self.doc + .disable_observer() + .map(|mut p| export_patches(p.take_patches())) + .unwrap_or_else(|| Ok(Array::new())) } - Ok(()) } #[wasm_bindgen(js_name = popPatches)] @@ -443,75 +415,8 @@ impl Automerge { // transactions send out observer updates as they occur, not waiting for them to be // committed. // If we pop the patches then we won't be able to revert them. - self.ensure_transaction_closed(); - let patches = self - .observer - .as_mut() - .map_or_else(Vec::new, |o| o.take_patches()); - let result = Array::new(); - for p in patches { - let patch = Array::new(); - match p { - Patch::Put { - path, key, value, .. - } => { - js_set(&patch, "action", "put")?; - //js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "path", export_path(path, key))?; - //js_set(&patch, "key", key)?; - match value { - (Value::Object(obj_type), obj_id) => { - js_set(&patch, "datatype", obj_type.to_string())?; - js_set(&patch, "value", obj_id.to_string())?; - } - (Value::Scalar(value), _) => { - js_set(&patch, "datatype", datatype(&value))?; - js_set(&patch, "value", ScalarValue(value))?; - } - }; - //js_set(&patch, "conflict", conflict)?; - } - - Patch::Insert { - path, index, value, .. - } => { - js_set(&patch, "action", "insert")?; - //js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "path", export_path(path, index.into()))?; - //js_set(&patch, "key", index as f64)?; - match value { - (Value::Object(obj_type), obj_id) => { - js_set(&patch, "datatype", obj_type.to_string())?; - js_set(&patch, "value", obj_id.to_string())?; - } - (Value::Scalar(value), _) => { - js_set(&patch, "datatype", datatype(&value))?; - js_set(&patch, "value", ScalarValue(value))?; - } - }; - } - - Patch::Increment { - path, key, value, .. - } => { - js_set(&patch, "action", "increment")?; - //js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "path", export_path(path, key))?; - //js_set(&patch, "key", key)?; - js_set(&patch, "value", value.0 as f64)?; - } - - Patch::Delete { path, key, .. } => { - js_set(&patch, "action", "delete")?; - //js_set(&patch, "obj", obj.to_string())?; - js_set(&patch, "path", export_path(path, key))?; - //js_set(&patch, "key", key)?; - } - } - result.push(&patch); - } - Ok(result) + export_patches(self.doc.take_patches()) } pub fn length(&self, obj: JsValue, heads: Option) -> Result { @@ -531,51 +436,31 @@ impl Automerge { } pub fn save(&mut self) -> Uint8Array { - self.ensure_transaction_closed(); Uint8Array::from(self.doc.save().as_slice()) } #[wasm_bindgen(js_name = saveIncremental)] pub fn save_incremental(&mut self) -> Uint8Array { - self.ensure_transaction_closed(); let bytes = self.doc.save_incremental(); Uint8Array::from(bytes.as_slice()) } #[wasm_bindgen(js_name = loadIncremental)] pub fn load_incremental(&mut self, data: Uint8Array) -> Result { - self.ensure_transaction_closed(); let data = data.to_vec(); - let options = if let Some(observer) = self.observer.as_mut() { - ApplyOptions::default().with_op_observer(observer) - } else { - ApplyOptions::default() - }; - let len = self - .doc - .load_incremental_with(&data, options) - .map_err(to_js_err)?; + let len = self.doc.load_incremental(&data).map_err(to_js_err)?; Ok(len as f64) } #[wasm_bindgen(js_name = applyChanges)] pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> { - self.ensure_transaction_closed(); let changes: Vec<_> = JS(changes).try_into()?; - let options = if let Some(observer) = self.observer.as_mut() { - ApplyOptions::default().with_op_observer(observer) - } else { - ApplyOptions::default() - }; - self.doc - .apply_changes_with(changes, options) - .map_err(to_js_err)?; + self.doc.apply_changes(changes).map_err(to_js_err)?; Ok(()) } #[wasm_bindgen(js_name = getChanges)] pub fn get_changes(&mut self, have_deps: JsValue) -> Result { - self.ensure_transaction_closed(); let deps: Vec<_> = JS(have_deps).try_into()?; let changes = self.doc.get_changes(&deps)?; let changes: Array = changes @@ -587,7 +472,6 @@ impl Automerge { #[wasm_bindgen(js_name = getChangeByHash)] pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result { - self.ensure_transaction_closed(); let hash = hash.into_serde().map_err(to_js_err)?; let change = self.doc.get_change_by_hash(&hash); if let Some(c) = change { @@ -599,7 +483,6 @@ impl Automerge { #[wasm_bindgen(js_name = getChangesAdded)] pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result { - self.ensure_transaction_closed(); let changes = self.doc.get_changes_added(&mut other.doc); let changes: Array = changes .iter() @@ -610,7 +493,6 @@ impl Automerge { #[wasm_bindgen(js_name = getHeads)] pub fn get_heads(&mut self) -> Array { - self.ensure_transaction_closed(); let heads = self.doc.get_heads(); let heads: Array = heads .iter() @@ -627,7 +509,6 @@ impl Automerge { #[wasm_bindgen(js_name = getLastLocalChange)] pub fn get_last_local_change(&mut self) -> Result { - self.ensure_transaction_closed(); if let Some(change) = self.doc.get_last_local_change() { Ok(Uint8Array::from(change.raw_bytes())) } else { @@ -636,13 +517,11 @@ impl Automerge { } pub fn dump(&mut self) { - self.ensure_transaction_closed(); self.doc.dump() } #[wasm_bindgen(js_name = getMissingDeps)] pub fn get_missing_deps(&mut self, heads: Option) -> Result { - self.ensure_transaction_closed(); let heads = get_heads(heads).unwrap_or_default(); let deps = self.doc.get_missing_deps(&heads); let deps: Array = deps @@ -658,23 +537,16 @@ impl Automerge { state: &mut SyncState, message: Uint8Array, ) -> Result<(), JsValue> { - self.ensure_transaction_closed(); let message = message.to_vec(); let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?; - let options = if let Some(observer) = self.observer.as_mut() { - ApplyOptions::default().with_op_observer(observer) - } else { - ApplyOptions::default() - }; self.doc - .receive_sync_message_with(&mut state.0, message, options) + .receive_sync_message(&mut state.0, message) .map_err(to_js_err)?; Ok(()) } #[wasm_bindgen(js_name = generateSyncMessage)] pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { - self.ensure_transaction_closed(); if let Some(message) = self.doc.generate_sync_message(&mut state.0) { Ok(Uint8Array::from(message.encode().as_slice()).into()) } else { @@ -834,17 +706,23 @@ pub fn init(actor: Option) -> Result { #[wasm_bindgen(js_name = loadDoc)] pub fn load(data: Uint8Array, actor: Option) -> Result { let data = data.to_vec(); - let observer = None; - let options = ApplyOptions::<()>::default(); - let mut automerge = am::AutoCommit::load_with(&data, options).map_err(to_js_err)?; + let mut automerge = am::AutoCommit::load(&data).map_err(to_js_err)?; if let Some(s) = actor { let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); automerge.set_actor(actor); } - Ok(Automerge { - doc: automerge, - observer, - }) + Ok(Automerge { doc: automerge }) +} + +#[wasm_bindgen(js_name = loadWithPatches)] +pub fn load_with_patches(data: Uint8Array, actor: Option) -> Result { + let data = data.to_vec(); + let mut automerge = am::AutoCommit::load_with_observer(&data).map_err(to_js_err)?; + if let Some(s) = actor { + let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + automerge.set_actor(actor); + } + Ok(Automerge { doc: automerge }) } #[wasm_bindgen(js_name = encodeChange)] diff --git a/automerge-wasm/test/patch.ts b/automerge-wasm/test/patch.ts index 3ef28272..dd76bb63 100644 --- a/automerge-wasm/test/patch.ts +++ b/automerge-wasm/test/patch.ts @@ -9,19 +9,31 @@ function patchValue(patch: any) : any { switch (patch.datatype) { case "map": return {} - case "list": - case "text": + case "list": return [] + case "text": + return "" default: return patch.value } } +function patchTextValue(patch: any) : any { + if (typeof patch.value === "string" && patch.value.length == 1) { + return patch.value + } else { + return "\uFFFC" + } +} + function applyPatch(obj: any, path: Prop[], patch: any) : any { let prop = path.shift(); if (typeof prop === 'number' && Array.isArray(obj)) { return applyPatchToArray(obj, prop, path, patch) } + if (typeof prop === 'number' && typeof obj === 'string') { + return applyPatchToText(obj, prop, path, patch) + } if (typeof prop === 'string' && typeof obj === 'object') { return applyPatchToObject(obj, prop, path, patch) } @@ -41,7 +53,7 @@ function applyPatchToObject(obj: Obj, prop: string, path: Prop[], patch: any) : let tmp = { ... obj } delete tmp[prop] return tmp - default: + default: throw new RangeError(`Invalid patch ${patch}`) } } else { @@ -60,7 +72,7 @@ function applyPatchToArray(obj: Array, prop: number, path: Prop[], patch: a return [ ... obj.slice(0,prop), patchValue(patch), ... obj.slice(prop) ] case "delete": return [... obj.slice(0,prop), ... obj.slice(prop + 1) ] - default: + default: throw new RangeError(`Invalid patch ${patch}`) } } else { @@ -68,13 +80,29 @@ function applyPatchToArray(obj: Array, prop: number, path: Prop[], patch: a } } +function applyPatchToText(obj: string, prop: number, path: Prop[], patch: any) : any { + if (path.length === 0) { + switch (patch.action) { + case "increment": + return obj + case "put": + return obj.slice(0,prop) + patchTextValue(patch) + obj.slice(prop + 1) + case "insert": + return obj.slice(0,prop) + patchTextValue(patch) + obj.slice(prop) + case "delete": + return obj.slice(0,prop) + obj.slice(prop + 1) + default: + throw new RangeError(`Invalid patch ${patch}`) + } + } else { + return obj + } +} + function applyPatches(obj: any, patches: any) { for (let patch of patches) { - console.log("obj",obj) - console.log("patch",patch) obj = applyPatch(obj, patch.path, patch) } - console.log("obj",obj) return obj } @@ -112,11 +140,31 @@ describe('Automerge', () => { const doc1 = create() doc1.enablePatches(true) let list = doc1.putObject("/", "list", [1,2,3,['a','b','c']]) - //doc1.delete("/list", 1); + doc1.delete("/list", 1); doc1.push("/list", 'hello'); let patches = doc1.popPatches() let js = applyPatches({}, patches) assert.deepEqual(js,doc1.materialize("/")) }) + + it.only('can handle patches with deletes withlists holding objects', () => { + const doc1 = create() + doc1.enablePatches(true) + + let list = doc1.putObject("/", "list", [1,2,3,[{n:1},{n:2},{n:3}]]) + doc1.delete("/list", 1); + doc1.put("/list/2/0", "n", 100); + doc1.delete("/list", 1); + doc1.put("/list/1/1", "n", 200); + doc1.insertObject("/list/1", 3, {n:400}) + + let text = doc1.putObject("/", "text", "hello world"); + doc1.insertObject("/text", 3, {n:1}) + let patches = doc1.popPatches() + console.log(doc1.materialize("/")) + let js = applyPatches({}, patches) + + assert.deepEqual(js,doc1.materialize("/")) + }) }) }) diff --git a/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs index a041730c..089ebced 100644 --- a/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -8,7 +8,7 @@ use automerge::{Automerge, ROOT}; fn main() { let mut doc1 = Automerge::new(); let (cards, card1) = doc1 - .transact_with::<_, _, AutomergeError, _, ()>( + .transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Add card".to_owned()), |tx| { let cards = tx.put_object(ROOT, "cards", ObjType::List).unwrap(); @@ -30,7 +30,7 @@ fn main() { let binary = doc1.save(); let mut doc2 = Automerge::load(&binary).unwrap(); - doc1.transact_with::<_, _, AutomergeError, _, ()>( + doc1.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Mark card as done".to_owned()), |tx| { tx.put(&card1, "done", true)?; @@ -39,7 +39,7 @@ fn main() { ) .unwrap(); - doc2.transact_with::<_, _, AutomergeError, _, ()>( + doc2.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Delete card".to_owned()), |tx| { tx.delete(&cards, 0)?; diff --git a/automerge/examples/watch.rs b/automerge/examples/watch.rs index 9d67f73d..05e6f653 100644 --- a/automerge/examples/watch.rs +++ b/automerge/examples/watch.rs @@ -3,15 +3,15 @@ use automerge::transaction::Transactable; use automerge::Automerge; use automerge::AutomergeError; use automerge::Patch; -use automerge::VecOpObserver; +use automerge::OpObserver; use automerge::ROOT; fn main() { let mut doc = Automerge::new(); - let mut observer = VecOpObserver::default(); + let mut observer = OpObserver::default(); // a simple scalar change in the root object - doc.transact_with::<_, _, AutomergeError, _, _>( + doc.transact_with::<_, _, AutomergeError, _>( |_result| CommitOptions::default().with_op_observer(&mut observer), |tx| { tx.put(ROOT, "hello", "world").unwrap(); diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs index 22efd155..abc90572 100644 --- a/automerge/src/autocommit.rs +++ b/automerge/src/autocommit.rs @@ -1,11 +1,11 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::op_observer::OpObserver; use crate::transaction::{CommitOptions, Transactable}; +use crate::op_observer::OpObserver; use crate::{ sync, ApplyOptions, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, - Parents, ScalarValue, + Parents, ScalarValue, Patch }; use crate::{ transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, @@ -13,10 +13,11 @@ use crate::{ }; /// An automerge document that automatically manages transactions. -#[derive(Debug, Clone)] +#[derive(Debug,Clone)] pub struct AutoCommit { doc: Automerge, transaction: Option, + observer: Option, } impl Default for AutoCommit { @@ -30,6 +31,7 @@ impl AutoCommit { Self { doc: Automerge::new(), transaction: None, + observer: None, } } @@ -56,17 +58,41 @@ impl AutoCommit { self.doc.get_actor() } + pub fn enable_observer(&mut self) -> Option { + self.ensure_transaction_closed(); + self.observer.replace(OpObserver::default()) + } + + pub fn disable_observer(&mut self) -> Option { + self.ensure_transaction_closed(); + self.observer.take() + } + + pub fn with_observer(mut self) -> Self { + self.ensure_transaction_closed(); + self.enable_observer(); + self + } + + pub fn without_observer(mut self) -> Self { + self.ensure_transaction_closed(); + self.disable_observer(); + self + } + fn ensure_transaction_open(&mut self) { - if self.transaction.is_none() { - self.transaction = Some(self.doc.transaction_inner()); - } + if self.transaction.is_none() { + let observer = self.observer.as_ref().map(|_| OpObserver::default()); + self.transaction = Some(self.doc.transaction_inner(observer)) + } } pub fn fork(&mut self) -> Self { self.ensure_transaction_closed(); Self { doc: self.doc.fork(), - transaction: self.transaction.clone(), + transaction: None, + observer: None, } } @@ -74,13 +100,14 @@ impl AutoCommit { self.ensure_transaction_closed(); Ok(Self { doc: self.doc.fork_at(heads)?, - transaction: self.transaction.clone(), + transaction: None, + observer: None, }) } fn ensure_transaction_closed(&mut self) { if let Some(tx) = self.transaction.take() { - tx.commit::<()>(&mut self.doc, None, None, None); + tx.commit(&mut self.doc, None, None, self.observer.as_mut()); } } @@ -89,64 +116,38 @@ impl AutoCommit { Ok(Self { doc, transaction: None, + observer: None, }) } - pub fn load_with( + pub fn load_with_observer( data: &[u8], - options: ApplyOptions<'_, Obs>, ) -> Result { + let mut observer = OpObserver::default(); + let options = ApplyOptions::default().with_op_observer(&mut observer); let doc = Automerge::load_with(data, options)?; Ok(Self { doc, transaction: None, + observer: Some(observer), }) } pub fn load_incremental(&mut self, data: &[u8]) -> Result { self.ensure_transaction_closed(); - self.doc.load_incremental(data) - } - - pub fn load_incremental_with<'a, Obs: OpObserver>( - &mut self, - data: &[u8], - options: ApplyOptions<'a, Obs>, - ) -> Result { - self.ensure_transaction_closed(); - self.doc.load_incremental_with(data, options) + self.doc.load_incremental_with(data, self.observer.as_mut().into() ) } pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc.apply_changes(changes) - } - - pub fn apply_changes_with( - &mut self, - changes: Vec, - options: ApplyOptions<'_, Obs>, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_closed(); - self.doc.apply_changes_with(changes, options) + self.doc.apply_changes_with(changes, self.observer.as_mut().into()) } /// Takes all the changes in `other` which are not in `self` and applies them pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); - self.doc.merge(&mut other.doc) - } - - /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge_with<'a, Obs: OpObserver>( - &mut self, - other: &mut Self, - options: ApplyOptions<'a, Obs>, - ) -> Result, AutomergeError> { - self.ensure_transaction_closed(); - other.ensure_transaction_closed(); - self.doc.merge_with(&mut other.doc, options) + self.doc.merge_with(&mut other.doc, self.observer.as_mut().into()) } pub fn save(&mut self) -> Vec { @@ -183,7 +184,7 @@ impl AutoCommit { self.doc.get_change_by_hash(hash) } - pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> { + pub fn get_changes_added<'b>(&mut self, other: &'b mut Self) -> Vec<&'b Change> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); self.doc.get_changes_added(&other.doc) @@ -209,18 +210,7 @@ impl AutoCommit { message: sync::Message, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc.receive_sync_message(sync_state, message) - } - - pub fn receive_sync_message_with<'a, Obs: OpObserver>( - &mut self, - sync_state: &mut sync::State, - message: sync::Message, - options: ApplyOptions<'a, Obs>, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_closed(); - self.doc - .receive_sync_message_with(sync_state, message, options) + self.doc.receive_sync_message_with(sync_state, message, self.observer.as_mut().into()) } #[cfg(feature = "optree-visualisation")] @@ -237,7 +227,7 @@ impl AutoCommit { } pub fn commit(&mut self) -> ChangeHash { - self.commit_with::<()>(CommitOptions::default()) + self.commit_with(CommitOptions::default()) } /// Commit the current operations with some options. @@ -253,11 +243,14 @@ impl AutoCommit { /// doc.put_object(&ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; - /// doc.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(&mut self, options: CommitOptions<'_, Obs>) -> ChangeHash { + pub fn commit_with<'a>(&'a mut self, mut options: CommitOptions<'a>) -> ChangeHash { // ensure that even no changes triggers a change self.ensure_transaction_open(); + if let Some(observer) = &mut self.observer { + options = options.with_op_observer(observer); + } let tx = self.transaction.take().unwrap(); tx.commit( &mut self.doc, @@ -273,6 +266,15 @@ impl AutoCommit { .map(|tx| tx.rollback(&mut self.doc)) .unwrap_or(0) } + + pub fn take_patches(&mut self) -> Vec { + self.ensure_transaction_closed(); + if let Some(observer) = &mut self.observer { + observer.take_patches() + } else { + Vec::new() + } + } } impl Transactable for AutoCommit { diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs index bdd31b4c..175c4891 100644 --- a/automerge/src/automerge.rs +++ b/automerge/src/automerge.rs @@ -112,12 +112,20 @@ impl Automerge { /// Start a transaction. pub fn transaction(&mut self) -> Transaction<'_> { Transaction { - inner: Some(self.transaction_inner()), + inner: Some(self.transaction_inner(None)), doc: self, } } - pub(crate) fn transaction_inner(&mut self) -> TransactionInner { + /// Start a transaction. + pub fn transaction_with_observer(&mut self) -> Transaction<'_> { + Transaction { + inner: Some(self.transaction_inner(Some(OpObserver::default()))), + doc: self, + } + } + + pub(crate) fn transaction_inner(&mut self, op_observer: Option) -> TransactionInner { let actor = self.get_actor_index(); let seq = self.states.get(&actor).map_or(0, |v| v.len()) as u64 + 1; let mut deps = self.get_heads(); @@ -137,6 +145,7 @@ impl Automerge { message: None, extra_bytes: Default::default(), hash: None, + op_observer, operations: vec![], deps, } @@ -144,7 +153,7 @@ impl Automerge { /// Run a transaction on this document in a closure, automatically handling commit or rollback /// afterwards. - pub fn transact(&mut self, f: F) -> transaction::Result + pub fn transact(&mut self, f: F) -> transaction::Result where F: FnOnce(&mut Transaction<'_>) -> Result, { @@ -163,11 +172,10 @@ impl Automerge { } /// Like [`Self::transact`] but with a function for generating the commit options. - pub fn transact_with<'a, F, O, E, C, Obs>(&mut self, c: C, f: F) -> transaction::Result + pub fn transact_with<'a, F, O, E, C>(&mut self, c: C, f: F) -> transaction::Result where F: FnOnce(&mut Transaction<'_>) -> Result, - C: FnOnce(&O) -> CommitOptions<'a, Obs>, - Obs: 'a + OpObserver, + C: FnOnce(&O) -> CommitOptions<'a>, { let mut tx = self.transaction(); let result = f(&mut tx); @@ -555,13 +563,13 @@ impl Automerge { /// Load a document. pub fn load(data: &[u8]) -> Result { - Self::load_with::<()>(data, ApplyOptions::default()) + Self::load_with(data, ApplyOptions::default()) } /// Load a document. - pub fn load_with( + pub fn load_with( data: &[u8], - options: ApplyOptions<'_, Obs>, + options: ApplyOptions<'_>, ) -> Result { let changes = Change::load_document(data)?; let mut doc = Self::new(); @@ -571,14 +579,14 @@ impl Automerge { /// Load an incremental save of a document. pub fn load_incremental(&mut self, data: &[u8]) -> Result { - self.load_incremental_with::<()>(data, ApplyOptions::default()) + self.load_incremental_with(data, ApplyOptions::default()) } /// Load an incremental save of a document. - pub fn load_incremental_with( + pub fn load_incremental_with( &mut self, data: &[u8], - options: ApplyOptions<'_, Obs>, + options: ApplyOptions<'_>, ) -> Result { let changes = Change::load_document(data)?; let start = self.ops.len(); @@ -602,14 +610,14 @@ impl Automerge { &mut self, changes: impl IntoIterator, ) -> Result<(), AutomergeError> { - self.apply_changes_with::<_, ()>(changes, ApplyOptions::default()) + self.apply_changes_with::<_>(changes, ApplyOptions::default()) } /// Apply changes to this document. - pub fn apply_changes_with, Obs: OpObserver>( + pub fn apply_changes_with>( &mut self, changes: I, - mut options: ApplyOptions<'_, Obs>, + mut options: ApplyOptions<'_>, ) -> Result<(), AutomergeError> { for c in changes { if !self.history_index.contains_key(&c.hash) { @@ -634,7 +642,7 @@ impl Automerge { Ok(()) } - fn apply_change(&mut self, change: Change, observer: &mut Option<&mut Obs>) { + fn apply_change(&mut self, change: Change, observer: &mut Option<&mut OpObserver>) { let ops = self.import_ops(&change); self.update_history(change, ops.len()); if let Some(observer) = observer { @@ -706,14 +714,14 @@ impl Automerge { /// Takes all the changes in `other` which are not in `self` and applies them pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { - self.merge_with::<()>(other, ApplyOptions::default()) + self.merge_with(other, ApplyOptions::default()) } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge_with<'a, Obs: OpObserver>( + pub fn merge_with<'a>( &mut self, other: &mut Self, - options: ApplyOptions<'a, Obs>, + options: ApplyOptions<'a>, ) -> Result, AutomergeError> { // TODO: Make this fallible and figure out how to do this transactionally let changes = self diff --git a/automerge/src/automerge/tests.rs b/automerge/src/automerge/tests.rs index b832168e..721ab39e 100644 --- a/automerge/src/automerge/tests.rs +++ b/automerge/src/automerge/tests.rs @@ -1434,16 +1434,12 @@ fn observe_counter_change_application_overwrite() { doc1.increment(ROOT, "counter", 5).unwrap(); doc1.commit(); - let mut observer = VecOpObserver::default(); - let mut doc3 = doc1.clone(); - doc3.merge_with( - &mut doc2, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); + let mut doc3 = doc1.fork(); + doc3.enable_observer(); + doc3.merge(&mut doc2).unwrap(); assert_eq!( - observer.take_patches(), + doc3.take_patches(), vec![Patch::Put { obj: ExId::Root, path: vec![], @@ -1456,16 +1452,12 @@ fn observe_counter_change_application_overwrite() { }] ); - let mut observer = VecOpObserver::default(); - let mut doc4 = doc2.clone(); - doc4.merge_with( - &mut doc1, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); + let mut doc4 = doc2.fork(); + doc4.enable_observer(); + doc4.merge(&mut doc1).unwrap(); // no patches as the increments operate on an invisible counter - assert_eq!(observer.take_patches(), vec![]); + assert_eq!(doc4.take_patches(), vec![]); } #[test] @@ -1474,18 +1466,13 @@ fn observe_counter_change_application() { doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); doc.increment(ROOT, "counter", 2).unwrap(); doc.increment(ROOT, "counter", 5).unwrap(); - let changes = doc.get_changes(&[]).unwrap().into_iter().cloned().collect(); + let changes : Vec<_> = doc.get_changes(&[]).unwrap().into_iter().cloned().collect(); let mut new_doc = AutoCommit::new(); - let mut observer = VecOpObserver::default(); - new_doc - .apply_changes_with( - changes, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); + new_doc.enable_observer(); + new_doc.apply_changes(changes).unwrap(); assert_eq!( - observer.take_patches(), + new_doc.take_patches(), vec![ Patch::Put { obj: ExId::Root, diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 1cf55de0..b5bc60bf 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -508,7 +508,7 @@ pub(crate) fn export_change( operations: change .operations .iter() - .map(|(obj, _, op)| export_op(op, obj, actors, props)) + .map(|(obj, op)| export_op(op, obj, actors, props)) .collect(), extra_bytes: change.extra_bytes, } diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index c011d2de..ea443ddc 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -102,7 +102,6 @@ pub use map_range::MapRange; pub use map_range_at::MapRangeAt; pub use op_observer::OpObserver; pub use op_observer::Patch; -pub use op_observer::VecOpObserver; pub use options::ApplyOptions; pub use parents::Parents; pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs index f2682d96..65a72724 100644 --- a/automerge/src/op_observer.rs +++ b/automerge/src/op_observer.rs @@ -3,101 +3,26 @@ use crate::parents::Parents; use crate::Prop; use crate::Value; -/// An observer of operations applied to the document. -pub trait OpObserver { - /// A new value has been inserted into the given object. - /// - /// - `obj`: the object that has been inserted into. - /// - `index`: the index the new value has been inserted at. - /// - `tagged_value`: the value that has been inserted and the id of the operation that did the - /// insert. - fn insert( - &mut self, - obj: ExId, - parents: Parents<'_>, - index: usize, - tagged_value: (Value<'_>, ExId), - ); - - /// A new value has been put into the given object. - /// - /// - `obj`: the object that has been put into. - /// - `key`: the key that the value as been put at. - /// - `tagged_value`: the value that has been put into the object and the id of the operation - /// that did the put. - /// - `conflict`: whether this put conflicts with other operations. - fn put( - &mut self, - obj: ExId, - parents: Parents<'_>, - key: Prop, - tagged_value: (Value<'_>, ExId), - conflict: bool, - ); - - /// A counter has been incremented. - /// - /// - `obj`: the object that contains the counter. - /// - `key`: they key that the chounter is at. - /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the - /// increment operation. - fn increment(&mut self, obj: ExId, parents: Parents<'_>, key: Prop, tagged_value: (i64, ExId)); - - /// A value has beeen deleted. - /// - /// - `obj`: the object that has been deleted in. - /// - `key`: the key of the value that has been deleted. - fn delete(&mut self, obj: ExId, parents: Parents<'_>, key: Prop); -} - -impl OpObserver for () { - fn insert( - &mut self, - _obj: ExId, - _parents: Parents<'_>, - _index: usize, - _tagged_value: (Value<'_>, ExId), - ) { - } - - fn put( - &mut self, - _obj: ExId, - _parents: Parents<'_>, - _key: Prop, - _tagged_value: (Value<'_>, ExId), - _conflict: bool, - ) { - } - - fn increment( - &mut self, - _obj: ExId, - _parents: Parents<'_>, - _key: Prop, - _tagged_value: (i64, ExId), - ) { - } - - fn delete(&mut self, _obj: ExId, _parents: Parents<'_>, _key: Prop) {} -} +use std::fmt::Debug; /// Capture operations into a [`Vec`] and store them as patches. #[derive(Default, Debug, Clone)] -pub struct VecOpObserver { - patches: Vec, +pub struct OpObserver { + pub(crate) patches: Vec, } -impl VecOpObserver { +impl OpObserver { /// Take the current list of patches, leaving the internal list empty and ready for new /// patches. pub fn take_patches(&mut self) -> Vec { std::mem::take(&mut self.patches) } -} -impl OpObserver for VecOpObserver { - fn insert( + pub(crate) fn merge(&mut self, other: Self) { + self.patches.extend(other.patches) + } + + pub fn insert( &mut self, obj_id: ExId, parents: Parents<'_>, @@ -114,7 +39,7 @@ impl OpObserver for VecOpObserver { }); } - fn put( + pub fn put( &mut self, obj: ExId, parents: Parents<'_>, @@ -133,7 +58,7 @@ impl OpObserver for VecOpObserver { }); } - fn increment(&mut self, obj: ExId, parents: Parents<'_>, key: Prop, tagged_value: (i64, ExId)) { + pub fn increment(&mut self, obj: ExId, parents: Parents<'_>, key: Prop, tagged_value: (i64, ExId)) { let mut path = parents.collect::>(); path.reverse(); self.patches.push(Patch::Increment { @@ -144,7 +69,7 @@ impl OpObserver for VecOpObserver { }); } - fn delete(&mut self, obj: ExId, parents: Parents<'_>, key: Prop) { + pub fn delete(&mut self, obj: ExId, parents: Parents<'_>, key: Prop) { let mut path = parents.collect::>(); path.reverse(); self.patches.push(Patch::Delete { obj, path, key }) diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 9a02aa9a..c82df10d 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -197,11 +197,11 @@ impl OpSetInternal { op } - pub(crate) fn insert_op_with_observer( + pub(crate) fn insert_op_with_observer( &mut self, obj: &ObjId, op: Op, - observer: &mut Obs, + observer: &mut OpObserver, ) -> Op { let q = self.search(obj, query::SeekOpWithPatch::new(&op)); diff --git a/automerge/src/options.rs b/automerge/src/options.rs index e0fd991f..841ddecb 100644 --- a/automerge/src/options.rs +++ b/automerge/src/options.rs @@ -1,16 +1,24 @@ +use crate::op_observer::OpObserver; + #[derive(Debug, Default)] -pub struct ApplyOptions<'a, Obs> { - pub op_observer: Option<&'a mut Obs>, +pub struct ApplyOptions<'a> { + pub op_observer: Option<&'a mut OpObserver>, } -impl<'a, Obs> ApplyOptions<'a, Obs> { - pub fn with_op_observer(mut self, op_observer: &'a mut Obs) -> Self { +impl<'a> ApplyOptions<'a> { + pub fn with_op_observer(mut self, op_observer: &'a mut OpObserver) -> Self { self.op_observer = Some(op_observer); self } - pub fn set_op_observer(&mut self, op_observer: &'a mut Obs) -> &mut Self { + pub fn set_op_observer(&mut self, op_observer: &'a mut OpObserver) -> &mut Self { self.op_observer = Some(op_observer); self } } + +impl<'a> From> for ApplyOptions<'a> { + fn from(o: Option<&'a mut OpObserver>) -> Self { + ApplyOptions { op_observer: o } + } +} diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 99961397..c5ba1ad6 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -8,7 +8,7 @@ use std::{ use crate::{ decoding, decoding::Decoder, encoding::Encodable, types::HASH_SIZE, ApplyOptions, Automerge, - AutomergeError, Change, ChangeHash, OpObserver, + AutomergeError, Change, ChangeHash, }; mod bloom; @@ -99,14 +99,14 @@ impl Automerge { sync_state: &mut State, message: Message, ) -> Result<(), AutomergeError> { - self.receive_sync_message_with::<()>(sync_state, message, ApplyOptions::default()) + self.receive_sync_message_with(sync_state, message, ApplyOptions::default()) } - pub fn receive_sync_message_with<'a, Obs: OpObserver>( + pub fn receive_sync_message_with<'a>( &mut self, sync_state: &mut State, message: Message, - options: ApplyOptions<'a, Obs>, + options: ApplyOptions<'a>, ) -> Result<(), AutomergeError> { let before_heads = self.get_heads(); diff --git a/automerge/src/transaction/commit.rs b/automerge/src/transaction/commit.rs index f9e6f3c2..ccf360e8 100644 --- a/automerge/src/transaction/commit.rs +++ b/automerge/src/transaction/commit.rs @@ -1,12 +1,14 @@ +use crate::op_observer::OpObserver; + /// Optional metadata for a commit. #[derive(Debug, Default)] -pub struct CommitOptions<'a, Obs> { +pub struct CommitOptions<'a> { pub message: Option, pub time: Option, - pub op_observer: Option<&'a mut Obs>, + pub op_observer: Option<&'a mut OpObserver>, } -impl<'a, Obs> CommitOptions<'a, Obs> { +impl<'a> CommitOptions<'a> { /// Add a message to the commit. pub fn with_message>(mut self, message: S) -> Self { self.message = Some(message.into()); @@ -31,12 +33,12 @@ impl<'a, Obs> CommitOptions<'a, Obs> { self } - pub fn with_op_observer(mut self, op_observer: &'a mut Obs) -> Self { + pub fn with_op_observer(mut self, op_observer: &'a mut OpObserver) -> Self { self.op_observer = Some(op_observer); self } - pub fn set_op_observer(&mut self, op_observer: &'a mut Obs) -> &mut Self { + pub fn set_op_observer(&mut self, op_observer: &'a mut OpObserver) -> &mut Self { self.op_observer = Some(op_observer); self } diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs index ce6d0e73..40e46e7f 100644 --- a/automerge/src/transaction/inner.rs +++ b/automerge/src/transaction/inner.rs @@ -7,7 +7,7 @@ use crate::types::{Key, ObjId, OpId}; use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop}; use crate::{AutomergeError, ObjType, OpObserver, OpType, ScalarValue}; -#[derive(Debug, Clone)] +#[derive(Debug,Clone)] pub(crate) struct TransactionInner { pub(crate) actor: usize, pub(crate) seq: u64, @@ -17,7 +17,8 @@ pub(crate) struct TransactionInner { pub(crate) extra_bytes: Vec, pub(crate) hash: Option, pub(crate) deps: Vec, - pub(crate) operations: Vec<(ObjId, Prop, Op)>, + pub(crate) op_observer: Option, + pub(crate) operations: Vec<(ObjId, Op)>, } impl TransactionInner { @@ -25,47 +26,54 @@ impl TransactionInner { self.operations.len() } - /// Commit the operations performed in this transaction, returning the hashes corresponding to - /// the new heads. - pub(crate) fn commit( - mut self, - doc: &mut Automerge, - message: Option, - time: Option, - op_observer: Option<&mut Obs>, - ) -> ChangeHash { - if message.is_some() { - self.message = message; - } - - if let Some(t) = time { - self.time = t; - } - - if let Some(observer) = op_observer { - for (obj, prop, op) in &self.operations { + fn observe_op(&mut self, doc: &mut Automerge, obj: ObjId, prop: Prop, op: &Op) { + if let Some(observer) = &mut self.op_observer { let ex_obj = doc.ops.id_to_exid(obj.0); let parents = doc.ops.parents(&ex_obj); if op.insert { let value = (op.value(), doc.id_to_exid(op.id)); match prop { Prop::Map(_) => panic!("insert into a map"), - Prop::Seq(index) => observer.insert(ex_obj, parents, *index, value), + Prop::Seq(index) => observer.insert(ex_obj, parents, index, value), } } else if op.is_delete() { - observer.delete(ex_obj, parents, prop.clone()); + observer.delete(ex_obj, parents, prop); } else if let Some(value) = op.get_increment_value() { observer.increment( ex_obj, parents, - prop.clone(), + prop, (value, doc.id_to_exid(op.id)), ); } else { let value = (op.value(), doc.ops.id_to_exid(op.id)); - observer.put(ex_obj, parents, prop.clone(), value, false); + observer.put(ex_obj, parents, prop, value, false); } - } + } + } + + /// Commit the operations performed in this transaction, returning the hashes corresponding to + /// the new heads. + pub(crate) fn commit( + mut self, + doc: &mut Automerge, + message: Option, + time: Option, + observer: Option<&mut OpObserver>, + ) -> ChangeHash { + + if let Some(tx_observer) = self.op_observer.take() { + if let Some(observer) = observer { + observer.merge(tx_observer) + } + } + + if message.is_some() { + self.message = message; + } + + if let Some(t) = time { + self.time = t; } let num_ops = self.pending_ops(); @@ -81,7 +89,7 @@ impl TransactionInner { pub(crate) fn rollback(self, doc: &mut Automerge) -> usize { let num = self.pending_ops(); // remove in reverse order so sets are removed before makes etc... - for (obj, _prop, op) in self.operations.into_iter().rev() { + for (obj, op) in self.operations.into_iter().rev() { for pred_id in &op.pred { if let Some(p) = doc.ops.search(&obj, OpIdSearch::new(*pred_id)).index() { doc.ops.replace(&obj, p, |o| o.remove_succ(&op)); @@ -178,7 +186,9 @@ impl TransactionInner { doc.ops.insert(pos, &obj, op.clone()); } - self.operations.push((obj, prop, op)); + self.observe_op(doc, obj, prop, &op); + + self.operations.push((obj, op)); } pub(crate) fn insert>( @@ -230,7 +240,10 @@ impl TransactionInner { }; doc.ops.insert(query.pos(), &obj, op.clone()); - self.operations.push((obj, Prop::Seq(index), op)); + + self.observe_op(doc, obj, Prop::Seq(index), &op); + + self.operations.push((obj, op)); Ok(id) } diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs index 7be7932e..960ac326 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/automerge/src/transaction/manual_transaction.rs @@ -1,7 +1,7 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValue, Value, Values}; +use crate::{Automerge, ChangeHash, KeysAt, ObjType, Prop, ScalarValue, Value, Values}; use crate::{AutomergeError, Keys}; use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt}; @@ -39,7 +39,7 @@ impl<'a> Transaction<'a> { self.inner .take() .unwrap() - .commit::<()>(self.doc, None, None, None) + .commit(self.doc, None, None, None) } /// Commit the operations in this transaction with some options. @@ -56,9 +56,9 @@ impl<'a> Transaction<'a> { /// tx.put_object(ROOT, "todos", ObjType::List).unwrap(); /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as /// i64; - /// tx.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(mut self, options: CommitOptions<'_, Obs>) -> ChangeHash { + pub fn commit_with(mut self, options: CommitOptions<'_>) -> ChangeHash { self.inner.take().unwrap().commit( self.doc, options.message, diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index f13bcd2b..ad5202ae 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -1,7 +1,7 @@ use automerge::transaction::Transactable; use automerge::{ - ActorId, ApplyOptions, AutoCommit, Automerge, AutomergeError, ObjType, ScalarValue, Value, - VecOpObserver, ROOT, + ActorId, AutoCommit, Automerge, AutomergeError, ObjType, ScalarValue, Value, + ROOT, }; mod helpers; @@ -941,12 +941,8 @@ fn observe_counter_change_application() { let changes = doc.get_changes(&[]).unwrap().into_iter().cloned().collect(); let mut doc = AutoCommit::new(); - let mut observer = VecOpObserver::default(); - doc.apply_changes_with( - changes, - ApplyOptions::default().with_op_observer(&mut observer), - ) - .unwrap(); + doc.enable_observer(); + doc.apply_changes(changes).unwrap(); } #[test]