Compare commits
10 commits
main
...
rebase-tex
Author | SHA1 | Date | |
---|---|---|---|
|
0dd1b858a0 | ||
|
3049bcae19 | ||
|
835f9d839f | ||
|
1a1dd2ede6 | ||
|
9e4e2a7570 | ||
|
c584a229e0 | ||
|
3940972dd8 | ||
|
9b7abcb28a | ||
|
cee363a074 | ||
|
7946f22fa0 |
48 changed files with 2048 additions and 1474 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -3,3 +3,4 @@ perf.*
|
|||
/Cargo.lock
|
||||
build/
|
||||
.vim/*
|
||||
/target
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
// Properties of the document root object
|
||||
//const OPTIONS = Symbol('_options') // object containing options passed to init()
|
||||
//const CACHE = Symbol('_cache') // map from objectId to immutable object
|
||||
//export const STATE = Symbol.for('_am_state') // object containing metadata about current state (e.g. sequence numbers)
|
||||
export const STATE = Symbol.for('_am_meta') // object containing metadata about current state (e.g. sequence numbers)
|
||||
export const HEADS = Symbol.for('_am_heads') // object containing metadata about current state (e.g. sequence numbers)
|
||||
export const TRACE = Symbol.for('_am_trace') // object containing metadata about current state (e.g. sequence numbers)
|
||||
export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current state (e.g. sequence numbers)
|
||||
export const IS_PROXY = Symbol.for('_am_isProxy') // object containing metadata about current state (e.g. sequence numbers)
|
||||
export const READ_ONLY = Symbol.for('_am_readOnly') // object containing metadata about current state (e.g. sequence numbers)
|
||||
export const FROZEN = Symbol.for('_am_frozen') // object containing metadata about current state (e.g. sequence numbers)
|
||||
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
/** @hidden **/
|
||||
export {/** @hidden */ uuid} from './uuid'
|
||||
|
||||
import {rootProxy, listProxy, textProxy, mapProxy} from "./proxies"
|
||||
import {STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN} from "./constants"
|
||||
import {rootProxy, listProxy, mapProxy} from "./proxies"
|
||||
import {STATE, HEADS, TRACE, IS_PROXY, OBJECT_ID, READ_ONLY, FROZEN} from "./constants"
|
||||
|
||||
import {AutomergeValue, Text, Counter} from "./types"
|
||||
export {AutomergeValue, Text, Counter, Int, Uint, Float64, ScalarValue} from "./types"
|
||||
export {AutomergeValue, Counter, Int, Uint, Float64, ScalarValue} from "./types"
|
||||
|
||||
import {type API, type Patch} from "@automerge/automerge-wasm";
|
||||
export { type Patch, PutPatch, DelPatch, SplicePatch, IncPatch, SyncMessage, } from "@automerge/automerge-wasm"
|
||||
|
@ -116,15 +116,6 @@ function _trace<T>(doc: Doc<T>): string | undefined {
|
|||
return Reflect.get(doc, TRACE) as string
|
||||
}
|
||||
|
||||
function _set_heads<T>(doc: Doc<T>, heads: Heads) {
|
||||
_state(doc).heads = heads
|
||||
}
|
||||
|
||||
function _clear_heads<T>(doc: Doc<T>) {
|
||||
Reflect.set(doc, HEADS, undefined)
|
||||
Reflect.set(doc, TRACE, undefined)
|
||||
}
|
||||
|
||||
function _obj<T>(doc: Doc<T>): ObjID | null {
|
||||
if (!(typeof doc === 'object') || doc === null) {
|
||||
return null
|
||||
|
@ -161,7 +152,6 @@ export function init<T>(_opts?: ActorId | InitOptions<T>): Doc<T> {
|
|||
handle.enablePatches(true)
|
||||
handle.enableFreeze(!!opts.freeze)
|
||||
handle.registerDatatype("counter", (n) => new Counter(n))
|
||||
handle.registerDatatype("text", (n) => new Text(n))
|
||||
const doc = handle.materialize("/", undefined, {handle, heads: undefined, freeze, patchCallback}) as Doc<T>
|
||||
return doc
|
||||
}
|
||||
|
@ -406,7 +396,6 @@ export function load<T>(data: Uint8Array, _opts?: ActorId | InitOptions<T>): Doc
|
|||
handle.enablePatches(true)
|
||||
handle.enableFreeze(!!opts.freeze)
|
||||
handle.registerDatatype("counter", (n) => new Counter(n))
|
||||
handle.registerDatatype("text", (n) => new Text(n))
|
||||
const doc: any = handle.materialize("/", undefined, {handle, heads: undefined, patchCallback}) as Doc<T>
|
||||
return doc
|
||||
}
|
||||
|
@ -516,7 +505,7 @@ function conflictAt(context: Automerge, objectId: ObjID, prop: Prop): Conflicts
|
|||
result[fullVal[1]] = listProxy(context, fullVal[1], [prop], true)
|
||||
break;
|
||||
case "text":
|
||||
result[fullVal[1]] = textProxy(context, fullVal[1], [prop], true)
|
||||
result[fullVal[1]] = context.text(fullVal[1])
|
||||
break;
|
||||
//case "table":
|
||||
//case "cursor":
|
||||
|
@ -614,8 +603,17 @@ export function getLastLocalChange<T>(doc: Doc<T>): Change | undefined {
|
|||
* This is useful to determine if something is actually an automerge document,
|
||||
* if `doc` is not an automerge document this will return null.
|
||||
*/
|
||||
export function getObjectId(doc: any): ObjID | null {
|
||||
export function getObjectId(doc: any, prop?: Prop): ObjID | null {
|
||||
if (prop) {
|
||||
const state = _state(doc, false)
|
||||
const objectId = _obj(doc)
|
||||
if (!state || !objectId) {
|
||||
throw new RangeError("invalid object for splice")
|
||||
}
|
||||
return state.handle.get(objectId, prop) as ObjID
|
||||
} else {
|
||||
return _obj(doc)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -813,6 +811,25 @@ export function getMissingDeps<T>(doc: Doc<T>, heads: Heads): Heads {
|
|||
return state.handle.getMissingDeps(heads)
|
||||
}
|
||||
|
||||
export function splice<T>(doc: Doc<T>, prop: Prop, index: number, del: number, newText?: string) {
|
||||
if (!Reflect.get(doc, IS_PROXY)) {
|
||||
throw new RangeError("object cannot be modified outside of a change block")
|
||||
}
|
||||
const state = _state(doc, false)
|
||||
const objectId = _obj(doc)
|
||||
if (!objectId) {
|
||||
throw new RangeError("invalid object for splice")
|
||||
}
|
||||
const value = state.handle.getWithType(objectId, prop)
|
||||
if (value === null) {
|
||||
throw new RangeError("Cannot splice, not a valid value");
|
||||
} else if (value[0] === 'text') {
|
||||
return state.handle.splice(value[1], index, del, newText)
|
||||
} else {
|
||||
throw new RangeError(`Cannot splice, value is of type '${value[0]}', must be 'text'`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the hashes of the heads of this document
|
||||
*/
|
||||
|
|
|
@ -4,7 +4,7 @@ import { Prop } from "@automerge/automerge-wasm"
|
|||
import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./types"
|
||||
import { Counter, getWriteableCounter } from "./counter"
|
||||
import { Text } from "./text"
|
||||
import { STATE, HEADS, TRACE, FROZEN, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants"
|
||||
import { STATE, HEADS, TRACE, FROZEN, IS_PROXY, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants"
|
||||
|
||||
function parseListIndex(key) {
|
||||
if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10)
|
||||
|
@ -30,7 +30,8 @@ function valueAt(target, prop: Prop) : AutomergeValue | undefined {
|
|||
case undefined: return;
|
||||
case "map": return mapProxy(context, val, [ ... path, prop ], readonly, heads);
|
||||
case "list": return listProxy(context, val, [ ... path, prop ], readonly, heads);
|
||||
case "text": return textProxy(context, val, [ ... path, prop ], readonly, heads);
|
||||
//case "text": return textProxy(context, val, [ ... path, prop ], readonly, heads);
|
||||
case "text": return context.text(val, heads);
|
||||
//case "table":
|
||||
//case "cursor":
|
||||
case "str": return val;
|
||||
|
@ -66,8 +67,8 @@ function import_value(value) {
|
|||
return [ value.value, "f64" ]
|
||||
} else if (value[COUNTER]) {
|
||||
return [ value.value, "counter" ]
|
||||
} else if (value[TEXT]) {
|
||||
return [ value, "text" ]
|
||||
//} else if (value[TEXT]) {
|
||||
// return [ value, "text" ]
|
||||
} else if (value instanceof Date) {
|
||||
return [ value.getTime(), "timestamp" ]
|
||||
} else if (value instanceof Uint8Array) {
|
||||
|
@ -92,7 +93,7 @@ function import_value(value) {
|
|||
}
|
||||
break;
|
||||
case 'string':
|
||||
return [ value ]
|
||||
return [ value, "text" ]
|
||||
break;
|
||||
default:
|
||||
throw new RangeError(`Unsupported type of value: ${typeof value}`)
|
||||
|
@ -104,11 +105,12 @@ const MapHandler = {
|
|||
const { context, objectId, readonly, frozen, heads, cache } = target
|
||||
if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] }
|
||||
if (key === OBJECT_ID) return objectId
|
||||
if (key === IS_PROXY) return true
|
||||
if (key === READ_ONLY) return readonly
|
||||
if (key === FROZEN) return frozen
|
||||
if (key === HEADS) return heads
|
||||
if (key === TRACE) return target.trace
|
||||
if (key === STATE) return context;
|
||||
if (key === STATE) return { handle: context };
|
||||
if (!cache[key]) {
|
||||
cache[key] = valueAt(target, key)
|
||||
}
|
||||
|
@ -150,11 +152,14 @@ const MapHandler = {
|
|||
break
|
||||
}
|
||||
case "text": {
|
||||
const text = context.putObject(objectId, key, "", "text")
|
||||
context.putObject(objectId, key, value, "text")
|
||||
/*
|
||||
const text = context.putObject(objectId, key, value, "text")
|
||||
const proxyText = textProxy(context, text, [ ... path, key ], readonly );
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
proxyText[i] = value.get(i)
|
||||
}
|
||||
*/
|
||||
break
|
||||
}
|
||||
case "map": {
|
||||
|
@ -212,11 +217,12 @@ const ListHandler = {
|
|||
if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } }
|
||||
if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
|
||||
if (index === OBJECT_ID) return objectId
|
||||
if (index === IS_PROXY) return true
|
||||
if (index === READ_ONLY) return readonly
|
||||
if (index === FROZEN) return frozen
|
||||
if (index === HEADS) return heads
|
||||
if (index === TRACE) return target.trace
|
||||
if (index === STATE) return context;
|
||||
if (index === STATE) return { handle: context };
|
||||
if (index === 'length') return context.length(objectId, heads);
|
||||
if (typeof index === 'number') {
|
||||
return valueAt(target, index)
|
||||
|
@ -268,12 +274,12 @@ const ListHandler = {
|
|||
case "text": {
|
||||
let text
|
||||
if (index >= context.length(objectId)) {
|
||||
text = context.insertObject(objectId, index, "", "text")
|
||||
text = context.insertObject(objectId, index, value, "text")
|
||||
} else {
|
||||
text = context.putObject(objectId, index, "", "text")
|
||||
text = context.putObject(objectId, index, value, "text")
|
||||
}
|
||||
const proxyText = textProxy(context, text, [ ... path, index ], readonly);
|
||||
proxyText.splice(0,0,...value)
|
||||
//const proxyText = textProxy(context, text, [ ... path, index ], readonly);
|
||||
//proxyText.splice(0,0,...value)
|
||||
break;
|
||||
}
|
||||
case "map": {
|
||||
|
@ -350,11 +356,12 @@ const TextHandler = Object.assign({}, ListHandler, {
|
|||
if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
|
||||
if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } }
|
||||
if (index === OBJECT_ID) return objectId
|
||||
if (index === IS_PROXY) return true
|
||||
if (index === READ_ONLY) return readonly
|
||||
if (index === FROZEN) return frozen
|
||||
if (index === HEADS) return heads
|
||||
if (index === TRACE) return target.trace
|
||||
if (index === STATE) return context;
|
||||
if (index === STATE) return { handle: context };
|
||||
if (index === 'length') return context.length(objectId, heads);
|
||||
if (typeof index === 'number') {
|
||||
return valueAt(target, index)
|
||||
|
@ -377,11 +384,13 @@ export function listProxy(context: Automerge, objectId: ObjID, path?: Prop[], re
|
|||
return new Proxy(target, ListHandler)
|
||||
}
|
||||
|
||||
/*
|
||||
export function textProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : TextValue {
|
||||
const target = []
|
||||
Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}})
|
||||
return new Proxy(target, TextHandler)
|
||||
}
|
||||
*/
|
||||
|
||||
export function rootProxy<T>(context: Automerge, readonly?: boolean) : T {
|
||||
/* eslint-disable-next-line */
|
||||
|
@ -406,8 +415,12 @@ function listMethods(target) {
|
|||
start = parseListIndex(start || 0)
|
||||
end = parseListIndex(end || length)
|
||||
for (let i = start; i < Math.min(end, length); i++) {
|
||||
if (datatype === "text" || datatype === "list" || datatype === "map") {
|
||||
context.putObject(objectId, i, value, datatype)
|
||||
} else {
|
||||
context.put(objectId, i, value, datatype)
|
||||
}
|
||||
}
|
||||
return this
|
||||
},
|
||||
|
||||
|
@ -482,9 +495,7 @@ function listMethods(target) {
|
|||
break;
|
||||
}
|
||||
case "text": {
|
||||
const text = context.insertObject(objectId, index, "", "text")
|
||||
const proxyText = textProxy(context, text, [ ... path, index ], readonly);
|
||||
proxyText.splice(0,0,...value)
|
||||
context.insertObject(objectId, index, value)
|
||||
break;
|
||||
}
|
||||
case "map": {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import * as assert from 'assert'
|
||||
import {Counter} from 'automerge'
|
||||
import * as Automerge from '../src'
|
||||
import * as WASM from "@automerge/automerge-wasm"
|
||||
|
||||
describe('Automerge', () => {
|
||||
describe('basics', () => {
|
||||
|
@ -198,10 +199,9 @@ describe('Automerge', () => {
|
|||
})
|
||||
it('handle text', () => {
|
||||
let doc1 = Automerge.init()
|
||||
let tmp = new Automerge.Text("hello")
|
||||
let doc2 = Automerge.change(doc1, (d) => {
|
||||
d.list = new Automerge.Text("hello")
|
||||
d.list.insertAt(2,"Z")
|
||||
d.list = "hello"
|
||||
Automerge.splice(d, "list", 2, 0, "Z")
|
||||
})
|
||||
let changes = Automerge.getChanges(doc1, doc2)
|
||||
let docB1 = Automerge.init()
|
||||
|
@ -209,6 +209,15 @@ describe('Automerge', () => {
|
|||
assert.deepEqual(docB2, doc2);
|
||||
})
|
||||
|
||||
it('handle non-text strings', () => {
|
||||
let doc1 = WASM.create();
|
||||
doc1.put("_root", "text", "hello world");
|
||||
let doc2 = Automerge.load(doc1.save())
|
||||
assert.throws(() => {
|
||||
Automerge.change(doc2, (d) => { Automerge.splice(d, "text", 1, 0, "Z") })
|
||||
}, /Cannot splice/)
|
||||
})
|
||||
|
||||
it('have many list methods', () => {
|
||||
let doc1 = Automerge.from({ list: [1,2,3] })
|
||||
assert.deepEqual(doc1, { list: [1,2,3] });
|
||||
|
@ -240,7 +249,7 @@ describe('Automerge', () => {
|
|||
})
|
||||
|
||||
it('lists and text have indexof', () => {
|
||||
let doc = Automerge.from({ list: [0,1,2,3,4,5,6], text: new Automerge.Text("hello world") })
|
||||
let doc = Automerge.from({ list: [0,1,2,3,4,5,6], text: "hello world" })
|
||||
console.log(doc.list.indexOf(5))
|
||||
console.log(doc.text.indexOf("world"))
|
||||
})
|
||||
|
@ -329,7 +338,7 @@ describe('Automerge', () => {
|
|||
"date": new Date(),
|
||||
"counter": new Automerge.Counter(),
|
||||
"bytes": new Uint8Array(10),
|
||||
"text": new Automerge.Text(),
|
||||
"text": "",
|
||||
"list": [],
|
||||
"map": {}
|
||||
})
|
||||
|
@ -348,7 +357,7 @@ describe('Automerge', () => {
|
|||
})
|
||||
|
||||
it("should return non-null for map, list, text, and objects", () => {
|
||||
assert.notEqual(Automerge.getObjectId(s1.text), null)
|
||||
assert.equal(Automerge.getObjectId(s1.text), null)
|
||||
assert.notEqual(Automerge.getObjectId(s1.list), null)
|
||||
assert.notEqual(Automerge.getObjectId(s1.map), null)
|
||||
})
|
||||
|
|
|
@ -4,7 +4,7 @@ import { assertEqualsOneOf } from './helpers'
|
|||
import { decodeChange } from './legacy/columnar'
|
||||
|
||||
const UUID_PATTERN = /^[0-9a-f]{32}$/
|
||||
const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/
|
||||
const OPID_PATTERN = /^[0-9]+@([0-9a-f][0-9a-f])*$/
|
||||
|
||||
// CORE FEATURES
|
||||
//
|
||||
|
@ -75,7 +75,7 @@ describe('Automerge', () => {
|
|||
describe('sequential use', () => {
|
||||
let s1, s2
|
||||
beforeEach(() => {
|
||||
s1 = Automerge.init()
|
||||
s1 = Automerge.init("aabbcc")
|
||||
})
|
||||
|
||||
it('should not mutate objects', () => {
|
||||
|
@ -93,7 +93,11 @@ describe('Automerge', () => {
|
|||
assert.deepStrictEqual(change, {
|
||||
actor: change.actor, deps: [], seq: 1, startOp: 1,
|
||||
hash: change.hash, message: '', time: change.time,
|
||||
ops: [{obj: '_root', key: 'foo', action: 'set', insert: false, value: 'bar', pred: []}]
|
||||
ops: [
|
||||
{obj: '_root', key: 'foo', action: 'makeText', insert: false, pred: []},
|
||||
{action: 'set', elemId: '_head', insert: true, obj: '1@aabbcc', pred: [], value: 'b' },
|
||||
{action: 'set', elemId: '2@aabbcc', insert: true, obj: '1@aabbcc', pred: [], value: 'a' },
|
||||
{action: 'set', elemId: '3@aabbcc', insert: true, obj: '1@aabbcc', pred: [], value: 'r' }]
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -287,11 +291,12 @@ describe('Automerge', () => {
|
|||
}, doc => {
|
||||
doc.birds = ['Goldfinch']
|
||||
})
|
||||
assert.strictEqual(callbacks.length, 2)
|
||||
assert.deepStrictEqual(callbacks[0].patch, { action: "put", path: ["birds"], value: [], conflict: false})
|
||||
assert.deepStrictEqual(callbacks[1].patch, { action: "splice", path: ["birds",0], values: ["Goldfinch"] })
|
||||
assert.strictEqual(callbacks.length, 1)
|
||||
assert.deepStrictEqual(callbacks[0].patch[0], { action: "put", path: ["birds"], value: [] })
|
||||
assert.deepStrictEqual(callbacks[0].patch[1], { action: "insert", path: ["birds",0], values: [""] })
|
||||
assert.deepStrictEqual(callbacks[0].patch[2], { action: "splice", path: ["birds",0, 0], value: "Goldfinch" })
|
||||
assert.strictEqual(callbacks[0].before, s1)
|
||||
assert.strictEqual(callbacks[1].after, s2)
|
||||
assert.strictEqual(callbacks[0].after, s2)
|
||||
})
|
||||
|
||||
it('should call a patchCallback set up on document initialisation', () => {
|
||||
|
@ -302,8 +307,11 @@ describe('Automerge', () => {
|
|||
const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch')
|
||||
const actor = Automerge.getActorId(s1)
|
||||
assert.strictEqual(callbacks.length, 1)
|
||||
assert.deepStrictEqual(callbacks[0].patch, {
|
||||
action: "put", path: ["bird"], value: "Goldfinch", conflict: false
|
||||
assert.deepStrictEqual(callbacks[0].patch[0], {
|
||||
action: "put", path: ["bird"], value: ""
|
||||
})
|
||||
assert.deepStrictEqual(callbacks[0].patch[1], {
|
||||
action: "splice", path: ["bird", 0], value: "Goldfinch"
|
||||
})
|
||||
assert.strictEqual(callbacks[0].before, s1)
|
||||
assert.strictEqual(callbacks[0].after, s2)
|
||||
|
@ -868,20 +876,20 @@ describe('Automerge', () => {
|
|||
s1 = Automerge.change(s1, doc => doc.birds = ['finch'])
|
||||
s2 = Automerge.merge(s2, s1)
|
||||
s1 = Automerge.change(s1, doc => doc.birds[0] = 'greenfinch')
|
||||
s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch')
|
||||
s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch_')
|
||||
s3 = Automerge.merge(s1, s2)
|
||||
if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
|
||||
assert.deepStrictEqual(s3.birds, ['greenfinch'])
|
||||
} else {
|
||||
assert.deepStrictEqual(s3.birds, ['goldfinch'])
|
||||
assert.deepStrictEqual(s3.birds, ['goldfinch_'])
|
||||
}
|
||||
assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), {
|
||||
[`3@${Automerge.getActorId(s1)}`]: 'greenfinch',
|
||||
[`3@${Automerge.getActorId(s2)}`]: 'goldfinch'
|
||||
[`8@${Automerge.getActorId(s1)}`]: 'greenfinch',
|
||||
[`8@${Automerge.getActorId(s2)}`]: 'goldfinch_'
|
||||
})
|
||||
})
|
||||
|
||||
it.skip('should handle assignment conflicts of different types', () => {
|
||||
it('should handle assignment conflicts of different types', () => {
|
||||
s1 = Automerge.change(s1, doc => doc.field = 'string')
|
||||
s2 = Automerge.change(s2, doc => doc.field = ['list'])
|
||||
s3 = Automerge.change(s3, doc => doc.field = {thing: 'map'})
|
||||
|
@ -906,8 +914,7 @@ describe('Automerge', () => {
|
|||
})
|
||||
})
|
||||
|
||||
// FIXME - difficult bug here - patches arrive for conflicted subobject
|
||||
it.skip('should handle changes within a conflicting list element', () => {
|
||||
it('should handle changes within a conflicting list element', () => {
|
||||
s1 = Automerge.change(s1, doc => doc.list = ['hello'])
|
||||
s2 = Automerge.merge(s2, s1)
|
||||
s1 = Automerge.change(s1, doc => doc.list[0] = {map1: true})
|
||||
|
@ -921,8 +928,8 @@ describe('Automerge', () => {
|
|||
assert.deepStrictEqual(s3.list, [{map2: true, key: 2}])
|
||||
}
|
||||
assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), {
|
||||
[`3@${Automerge.getActorId(s1)}`]: {map1: true, key: 1},
|
||||
[`3@${Automerge.getActorId(s2)}`]: {map2: true, key: 2}
|
||||
[`8@${Automerge.getActorId(s1)}`]: {map1: true, key: 1},
|
||||
[`8@${Automerge.getActorId(s2)}`]: {map2: true, key: 2}
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1154,7 +1161,8 @@ describe('Automerge', () => {
|
|||
hash: changes12[0].hash, actor: '01234567', seq: 1, startOp: 1,
|
||||
time: changes12[0].time, message: '', deps: [], ops: [
|
||||
{obj: '_root', action: 'makeList', key: 'list', insert: false, pred: []},
|
||||
{obj: listId, action: 'set', elemId: '_head', insert: true, value: 'a', pred: []}
|
||||
{obj: listId, action: 'makeText', elemId: '_head', insert: true, pred: []},
|
||||
{obj: "2@01234567", action: 'set', elemId: '_head', insert: true, value: 'a', pred: []}
|
||||
]
|
||||
}])
|
||||
const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0))
|
||||
|
@ -1163,9 +1171,10 @@ describe('Automerge', () => {
|
|||
const changes45 = Automerge.getAllChanges(s5).map(decodeChange)
|
||||
assert.deepStrictEqual(s5, {list: ['b']})
|
||||
assert.deepStrictEqual(changes45[2], {
|
||||
hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 4,
|
||||
hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 5,
|
||||
time: changes45[2].time, message: '', deps: [changes45[1].hash], ops: [
|
||||
{obj: listId, action: 'set', elemId: '_head', insert: true, value: 'b', pred: []}
|
||||
{obj: listId, action: 'makeText', elemId: '_head', insert: true, pred: []},
|
||||
{obj: "5@01234567", action: 'set', elemId: '_head', insert: true, value: 'b', pred: []}
|
||||
]
|
||||
})
|
||||
})
|
||||
|
@ -1305,8 +1314,8 @@ describe('Automerge', () => {
|
|||
|
||||
// TEXT
|
||||
it('should handle updates to a text object', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('ab'))
|
||||
let s2 = Automerge.change(s1, doc => doc.text.set(0, 'A'))
|
||||
let s1 = Automerge.change(Automerge.init(), doc => doc.text = 'ab')
|
||||
let s2 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 1, "A"))
|
||||
let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2))
|
||||
assert.deepStrictEqual([...s3.text], ['A', 'b'])
|
||||
})
|
||||
|
@ -1352,11 +1361,12 @@ describe('Automerge', () => {
|
|||
callbacks.push({patch, before, after})
|
||||
}
|
||||
})
|
||||
assert.strictEqual(callbacks.length, 2)
|
||||
assert.deepStrictEqual(callbacks[0].patch, { action: 'put', path: ["birds"], value: [], conflict: false })
|
||||
assert.deepStrictEqual(callbacks[1].patch, { action: 'splice', path: ["birds",0], values: ["Goldfinch"] })
|
||||
assert.strictEqual(callbacks.length, 1)
|
||||
assert.deepStrictEqual(callbacks[0].patch[0], { action: 'put', path: ["birds"], value: [] })
|
||||
assert.deepStrictEqual(callbacks[0].patch[1], { action: 'insert', path: ["birds",0], values: [""] })
|
||||
assert.deepStrictEqual(callbacks[0].patch[2], { action: 'splice', path: ["birds",0,0], value: "Goldfinch" })
|
||||
assert.strictEqual(callbacks[0].before, before)
|
||||
assert.strictEqual(callbacks[1].after, after)
|
||||
assert.strictEqual(callbacks[0].after, after)
|
||||
})
|
||||
|
||||
it('should merge multiple applied changes into one patch', () => {
|
||||
|
@ -1364,23 +1374,24 @@ describe('Automerge', () => {
|
|||
const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch'))
|
||||
const patches = [], actor = Automerge.getActorId(s2)
|
||||
Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2),
|
||||
{patchCallback: p => patches.push(p)})
|
||||
{patchCallback: p => patches.push(... p)})
|
||||
assert.deepStrictEqual(patches, [
|
||||
{ action: 'put', conflict: false, path: [ 'birds' ], value: [] },
|
||||
{ action: "splice", path: [ "birds", 0 ], values: [ "Goldfinch", "Chaffinch" ] }
|
||||
{ action: 'put', path: [ 'birds' ], value: [] },
|
||||
{ action: "insert", path: [ "birds", 0 ], values: [ "" ] },
|
||||
{ action: "splice", path: [ "birds", 0, 0 ], value: "Goldfinch" },
|
||||
{ action: "insert", path: [ "birds", 1 ], values: [ "" ] },
|
||||
{ action: "splice", path: [ "birds", 1, 0 ], value: "Chaffinch" }
|
||||
])
|
||||
})
|
||||
|
||||
it('should call a patchCallback registered on doc initialisation', () => {
|
||||
const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch')
|
||||
const patches = [], actor = Automerge.getActorId(s1)
|
||||
const before = Automerge.init({patchCallback: p => patches.push(p)})
|
||||
const before = Automerge.init({patchCallback: p => patches.push(... p)})
|
||||
Automerge.applyChanges(before, Automerge.getAllChanges(s1))
|
||||
assert.deepStrictEqual(patches, [{
|
||||
action: "put",
|
||||
conflict: false,
|
||||
path: [ "bird" ],
|
||||
value: "Goldfinch" }
|
||||
assert.deepStrictEqual(patches, [
|
||||
{ action: "put", path: [ "bird" ], value: "" },
|
||||
{ action: "splice", path: [ "bird", 0 ], value: "Goldfinch" }
|
||||
])
|
||||
})
|
||||
})
|
||||
|
|
|
@ -527,6 +527,7 @@ describe('Data sync protocol', () => {
|
|||
assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort())
|
||||
})
|
||||
|
||||
// FIXME - this has a periodic failure
|
||||
it('should sync two nodes with connection reset', () => {
|
||||
s1 = decodeSyncState(encodeSyncState(s1))
|
||||
s2 = decodeSyncState(encodeSyncState(s2))
|
||||
|
|
|
@ -197,502 +197,101 @@ function applyDeltaDocToAutomergeText(delta, doc) {
|
|||
describe('Automerge.Text', () => {
|
||||
let s1, s2
|
||||
beforeEach(() => {
|
||||
s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text())
|
||||
s1 = Automerge.change(Automerge.init(), doc => doc.text = "")
|
||||
s2 = Automerge.merge(Automerge.init(), s1)
|
||||
})
|
||||
|
||||
it('should support insertion', () => {
|
||||
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a'))
|
||||
s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "a"))
|
||||
assert.strictEqual(s1.text.length, 1)
|
||||
assert.strictEqual(s1.text.get(0), 'a')
|
||||
assert.strictEqual(s1.text.toString(), 'a')
|
||||
assert.strictEqual(s1.text[0], 'a')
|
||||
assert.strictEqual(s1.text, 'a')
|
||||
//assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`)
|
||||
})
|
||||
|
||||
it('should support deletion', () => {
|
||||
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
|
||||
s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1))
|
||||
s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc"))
|
||||
s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 1))
|
||||
assert.strictEqual(s1.text.length, 2)
|
||||
assert.strictEqual(s1.text.get(0), 'a')
|
||||
assert.strictEqual(s1.text.get(1), 'c')
|
||||
assert.strictEqual(s1.text.toString(), 'ac')
|
||||
assert.strictEqual(s1.text[0], 'a')
|
||||
assert.strictEqual(s1.text[1], 'c')
|
||||
assert.strictEqual(s1.text, 'ac')
|
||||
})
|
||||
|
||||
it("should support implicit and explicit deletion", () => {
|
||||
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c"))
|
||||
s1 = Automerge.change(s1, doc => doc.text.deleteAt(1))
|
||||
s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0))
|
||||
s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc"))
|
||||
s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 1))
|
||||
s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 0))
|
||||
assert.strictEqual(s1.text.length, 2)
|
||||
assert.strictEqual(s1.text.get(0), "a")
|
||||
assert.strictEqual(s1.text.get(1), "c")
|
||||
assert.strictEqual(s1.text.toString(), "ac")
|
||||
assert.strictEqual(s1.text[0], "a")
|
||||
assert.strictEqual(s1.text[1], "c")
|
||||
assert.strictEqual(s1.text, "ac")
|
||||
})
|
||||
|
||||
it('should handle concurrent insertion', () => {
|
||||
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
|
||||
s2 = Automerge.change(s2, doc => doc.text.insertAt(0, 'x', 'y', 'z'))
|
||||
s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc"))
|
||||
s2 = Automerge.change(s2, doc => Automerge.splice(doc, "text", 0, 0, "xyz"))
|
||||
s1 = Automerge.merge(s1, s2)
|
||||
assert.strictEqual(s1.text.length, 6)
|
||||
assertEqualsOneOf(s1.text.toString(), 'abcxyz', 'xyzabc')
|
||||
assertEqualsOneOf(s1.text.join(''), 'abcxyz', 'xyzabc')
|
||||
assertEqualsOneOf(s1.text, 'abcxyz', 'xyzabc')
|
||||
})
|
||||
|
||||
it('should handle text and other ops in the same change', () => {
|
||||
s1 = Automerge.change(s1, doc => {
|
||||
doc.foo = 'bar'
|
||||
doc.text.insertAt(0, 'a')
|
||||
Automerge.splice(doc, "text", 0, 0, 'a')
|
||||
})
|
||||
assert.strictEqual(s1.foo, 'bar')
|
||||
assert.strictEqual(s1.text.toString(), 'a')
|
||||
assert.strictEqual(s1.text.join(''), 'a')
|
||||
assert.strictEqual(s1.text, 'a')
|
||||
assert.strictEqual(s1.text, 'a')
|
||||
})
|
||||
|
||||
it('should serialize to JSON as a simple string', () => {
|
||||
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', '"', 'b'))
|
||||
s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, 'a"b'))
|
||||
assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}')
|
||||
})
|
||||
|
||||
it('should allow modification before an object is assigned to a document', () => {
|
||||
s1 = Automerge.change(Automerge.init(), doc => {
|
||||
const text = new Automerge.Text()
|
||||
text.insertAt(0, 'a', 'b', 'c', 'd')
|
||||
text.deleteAt(2)
|
||||
doc.text = text
|
||||
assert.strictEqual(doc.text.toString(), 'abd')
|
||||
assert.strictEqual(doc.text.join(''), 'abd')
|
||||
})
|
||||
assert.strictEqual(s1.text.toString(), 'abd')
|
||||
assert.strictEqual(s1.text.join(''), 'abd')
|
||||
})
|
||||
|
||||
it('should allow modification after an object is assigned to a document', () => {
|
||||
s1 = Automerge.change(Automerge.init(), doc => {
|
||||
const text = new Automerge.Text()
|
||||
doc.text = text
|
||||
doc.text.insertAt(0, 'a', 'b', 'c', 'd')
|
||||
doc.text.deleteAt(2)
|
||||
assert.strictEqual(doc.text.toString(), 'abd')
|
||||
assert.strictEqual(doc.text.join(''), 'abd')
|
||||
doc.text = ""
|
||||
Automerge.splice(doc ,"text", 0, 0, 'abcd')
|
||||
Automerge.splice(doc ,"text", 2, 1)
|
||||
assert.strictEqual(doc.text, 'abd')
|
||||
})
|
||||
assert.strictEqual(s1.text.join(''), 'abd')
|
||||
assert.strictEqual(s1.text, 'abd')
|
||||
})
|
||||
|
||||
it('should not allow modification outside of a change callback', () => {
|
||||
assert.throws(() => s1.text.insertAt(0, 'a'), /object cannot be modified outside of a change block/)
|
||||
assert.throws(() => Automerge.splice(s1 ,"text", 0, 0, 'a'), /object cannot be modified outside of a change block/)
|
||||
})
|
||||
|
||||
describe('with initial value', () => {
|
||||
it('should accept a string as initial value', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('init'))
|
||||
assert.strictEqual(s1.text.length, 4)
|
||||
assert.strictEqual(s1.text.get(0), 'i')
|
||||
assert.strictEqual(s1.text.get(1), 'n')
|
||||
assert.strictEqual(s1.text.get(2), 'i')
|
||||
assert.strictEqual(s1.text.get(3), 't')
|
||||
assert.strictEqual(s1.text.toString(), 'init')
|
||||
})
|
||||
|
||||
it('should accept an array as initial value', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text(['i', 'n', 'i', 't']))
|
||||
assert.strictEqual(s1.text.length, 4)
|
||||
assert.strictEqual(s1.text.get(0), 'i')
|
||||
assert.strictEqual(s1.text.get(1), 'n')
|
||||
assert.strictEqual(s1.text.get(2), 'i')
|
||||
assert.strictEqual(s1.text.get(3), 't')
|
||||
assert.strictEqual(s1.text.toString(), 'init')
|
||||
})
|
||||
|
||||
it('should initialize text in Automerge.from()', () => {
|
||||
let s1 = Automerge.from({text: new Automerge.Text('init')})
|
||||
let s1 = Automerge.from({text: 'init'})
|
||||
assert.strictEqual(s1.text.length, 4)
|
||||
assert.strictEqual(s1.text.get(0), 'i')
|
||||
assert.strictEqual(s1.text.get(1), 'n')
|
||||
assert.strictEqual(s1.text.get(2), 'i')
|
||||
assert.strictEqual(s1.text.get(3), 't')
|
||||
assert.strictEqual(s1.text.toString(), 'init')
|
||||
assert.strictEqual(s1.text[0], 'i')
|
||||
assert.strictEqual(s1.text[1], 'n')
|
||||
assert.strictEqual(s1.text[2], 'i')
|
||||
assert.strictEqual(s1.text[3], 't')
|
||||
assert.strictEqual(s1.text, 'init')
|
||||
})
|
||||
|
||||
it('should encode the initial value as a change', () => {
|
||||
const s1 = Automerge.from({text: new Automerge.Text('init')})
|
||||
const s1 = Automerge.from({text: 'init'})
|
||||
const changes = Automerge.getAllChanges(s1)
|
||||
assert.strictEqual(changes.length, 1)
|
||||
const [s2] = Automerge.applyChanges(Automerge.init(), changes)
|
||||
assert.strictEqual(s2.text instanceof Automerge.Text, true)
|
||||
assert.strictEqual(s2.text.toString(), 'init')
|
||||
assert.strictEqual(s2.text.join(''), 'init')
|
||||
assert.strictEqual(s2.text, 'init')
|
||||
assert.strictEqual(s2.text, 'init')
|
||||
})
|
||||
|
||||
it('should allow immediate access to the value', () => {
|
||||
Automerge.change(Automerge.init(), doc => {
|
||||
const text = new Automerge.Text('init')
|
||||
assert.strictEqual(text.length, 4)
|
||||
assert.strictEqual(text.get(0), 'i')
|
||||
assert.strictEqual(text.toString(), 'init')
|
||||
doc.text = text
|
||||
assert.strictEqual(doc.text.length, 4)
|
||||
assert.strictEqual(doc.text.get(0), 'i')
|
||||
assert.strictEqual(doc.text.toString(), 'init')
|
||||
})
|
||||
})
|
||||
|
||||
it('should allow pre-assignment modification of the initial value', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
const text = new Automerge.Text('init')
|
||||
text.deleteAt(3)
|
||||
assert.strictEqual(text.join(''), 'ini')
|
||||
doc.text = text
|
||||
assert.strictEqual(doc.text.join(''), 'ini')
|
||||
assert.strictEqual(doc.text.toString(), 'ini')
|
||||
})
|
||||
assert.strictEqual(s1.text.toString(), 'ini')
|
||||
assert.strictEqual(s1.text.join(''), 'ini')
|
||||
})
|
||||
|
||||
it('should allow post-assignment modification of the initial value', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
const text = new Automerge.Text('init')
|
||||
doc.text = text
|
||||
doc.text.deleteAt(0)
|
||||
doc.text.insertAt(0, 'I')
|
||||
assert.strictEqual(doc.text.join(''), 'Init')
|
||||
assert.strictEqual(doc.text.toString(), 'Init')
|
||||
})
|
||||
assert.strictEqual(s1.text.join(''), 'Init')
|
||||
assert.strictEqual(s1.text.toString(), 'Init')
|
||||
})
|
||||
})
|
||||
|
||||
describe('non-textual control characters', () => {
|
||||
let s1
|
||||
beforeEach(() => {
|
||||
s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text()
|
||||
doc.text.insertAt(0, 'a')
|
||||
doc.text.insertAt(1, { attribute: 'bold' })
|
||||
})
|
||||
})
|
||||
|
||||
it('should allow fetching non-textual characters', () => {
|
||||
assert.deepEqual(s1.text.get(1), { attribute: 'bold' })
|
||||
//assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`)
|
||||
})
|
||||
|
||||
it('should include control characters in string length', () => {
|
||||
assert.strictEqual(s1.text.length, 2)
|
||||
assert.strictEqual(s1.text.get(0), 'a')
|
||||
})
|
||||
|
||||
it('should replace control characters from toString()', () => {
|
||||
assert.strictEqual(s1.text.toString(), 'a\uFFFC')
|
||||
})
|
||||
|
||||
it('should allow control characters to be updated', () => {
|
||||
const s2 = Automerge.change(s1, doc => doc.text.get(1).attribute = 'italic')
|
||||
const s3 = Automerge.load(Automerge.save(s2))
|
||||
assert.strictEqual(s1.text.get(1).attribute, 'bold')
|
||||
assert.strictEqual(s2.text.get(1).attribute, 'italic')
|
||||
assert.strictEqual(s3.text.get(1).attribute, 'italic')
|
||||
})
|
||||
|
||||
describe('spans interface to Text', () => {
|
||||
it('should return a simple string as a single span', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('hello world')
|
||||
})
|
||||
assert.deepEqual(s1.text.toSpans(), ['hello world'])
|
||||
})
|
||||
it('should return an empty string as an empty array', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text()
|
||||
})
|
||||
assert.deepEqual(s1.text.toSpans(), [])
|
||||
})
|
||||
it('should split a span at a control character', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('hello world')
|
||||
doc.text.insertAt(5, { attributes: { bold: true } })
|
||||
})
|
||||
assert.deepEqual(s1.text.toSpans(),
|
||||
['hello', { attributes: { bold: true } }, ' world'])
|
||||
})
|
||||
it('should allow consecutive control characters', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('hello world')
|
||||
doc.text.insertAt(5, { attributes: { bold: true } })
|
||||
doc.text.insertAt(6, { attributes: { italic: true } })
|
||||
})
|
||||
assert.deepEqual(s1.text.toSpans(),
|
||||
['hello',
|
||||
{ attributes: { bold: true } },
|
||||
{ attributes: { italic: true } },
|
||||
' world'
|
||||
])
|
||||
})
|
||||
it('should allow non-consecutive control characters', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('hello world')
|
||||
doc.text.insertAt(5, { attributes: { bold: true } })
|
||||
doc.text.insertAt(12, { attributes: { italic: true } })
|
||||
})
|
||||
assert.deepEqual(s1.text.toSpans(),
|
||||
['hello',
|
||||
{ attributes: { bold: true } },
|
||||
' world',
|
||||
{ attributes: { italic: true } }
|
||||
])
|
||||
})
|
||||
|
||||
it('should be convertable into a Quill delta', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('Gandalf the Grey')
|
||||
doc.text.insertAt(0, { attributes: { bold: true } })
|
||||
doc.text.insertAt(7 + 1, { attributes: { bold: null } })
|
||||
doc.text.insertAt(12 + 2, { attributes: { color: '#cccccc' } })
|
||||
})
|
||||
|
||||
let deltaDoc = automergeTextToDeltaDoc(s1.text)
|
||||
|
||||
// From https://quilljs.com/docs/delta/
|
||||
let expectedDoc = [
|
||||
{ insert: 'Gandalf', attributes: { bold: true } },
|
||||
{ insert: ' the ' },
|
||||
{ insert: 'Grey', attributes: { color: '#cccccc' } }
|
||||
]
|
||||
|
||||
assert.deepEqual(deltaDoc, expectedDoc)
|
||||
})
|
||||
|
||||
it('should support embeds', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('')
|
||||
doc.text.insertAt(0, { attributes: { link: 'https://quilljs.com' } })
|
||||
doc.text.insertAt(1, {
|
||||
image: 'https://quilljs.com/assets/images/icon.png'
|
||||
})
|
||||
doc.text.insertAt(2, { attributes: { link: null } })
|
||||
})
|
||||
|
||||
let deltaDoc = automergeTextToDeltaDoc(s1.text)
|
||||
|
||||
// From https://quilljs.com/docs/delta/
|
||||
let expectedDoc = [{
|
||||
// An image link
|
||||
insert: {
|
||||
image: 'https://quilljs.com/assets/images/icon.png'
|
||||
},
|
||||
attributes: {
|
||||
link: 'https://quilljs.com'
|
||||
}
|
||||
}]
|
||||
|
||||
assert.deepEqual(deltaDoc, expectedDoc)
|
||||
})
|
||||
|
||||
it('should handle concurrent overlapping spans', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('Gandalf the Grey')
|
||||
})
|
||||
|
||||
let s2 = Automerge.merge(Automerge.init(), s1)
|
||||
|
||||
let s3 = Automerge.change(s1, doc => {
|
||||
doc.text.insertAt(8, { attributes: { bold: true } })
|
||||
doc.text.insertAt(16 + 1, { attributes: { bold: null } })
|
||||
})
|
||||
|
||||
let s4 = Automerge.change(s2, doc => {
|
||||
doc.text.insertAt(0, { attributes: { bold: true } })
|
||||
doc.text.insertAt(11 + 1, { attributes: { bold: null } })
|
||||
})
|
||||
|
||||
let merged = Automerge.merge(s3, s4)
|
||||
|
||||
let deltaDoc = automergeTextToDeltaDoc(merged.text)
|
||||
|
||||
// From https://quilljs.com/docs/delta/
|
||||
let expectedDoc = [
|
||||
{ insert: 'Gandalf the Grey', attributes: { bold: true } },
|
||||
]
|
||||
|
||||
assert.deepEqual(deltaDoc, expectedDoc)
|
||||
})
|
||||
|
||||
it('should handle debolding spans', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('Gandalf the Grey')
|
||||
})
|
||||
|
||||
let s2 = Automerge.merge(Automerge.init(), s1)
|
||||
|
||||
let s3 = Automerge.change(s1, doc => {
|
||||
doc.text.insertAt(0, { attributes: { bold: true } })
|
||||
doc.text.insertAt(16 + 1, { attributes: { bold: null } })
|
||||
})
|
||||
|
||||
let s4 = Automerge.change(s2, doc => {
|
||||
doc.text.insertAt(8, { attributes: { bold: null } })
|
||||
doc.text.insertAt(11 + 1, { attributes: { bold: true } })
|
||||
})
|
||||
|
||||
|
||||
let merged = Automerge.merge(s3, s4)
|
||||
|
||||
let deltaDoc = automergeTextToDeltaDoc(merged.text)
|
||||
|
||||
// From https://quilljs.com/docs/delta/
|
||||
let expectedDoc = [
|
||||
{ insert: 'Gandalf ', attributes: { bold: true } },
|
||||
{ insert: 'the' },
|
||||
{ insert: ' Grey', attributes: { bold: true } },
|
||||
]
|
||||
|
||||
assert.deepEqual(deltaDoc, expectedDoc)
|
||||
})
|
||||
|
||||
// xxx: how would this work for colors?
|
||||
it('should handle destyling across destyled spans', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('Gandalf the Grey')
|
||||
})
|
||||
|
||||
let s2 = Automerge.merge(Automerge.init(), s1)
|
||||
|
||||
let s3 = Automerge.change(s1, doc => {
|
||||
doc.text.insertAt(0, { attributes: { bold: true } })
|
||||
doc.text.insertAt(16 + 1, { attributes: { bold: null } })
|
||||
})
|
||||
|
||||
let s4 = Automerge.change(s2, doc => {
|
||||
doc.text.insertAt(8, { attributes: { bold: null } })
|
||||
doc.text.insertAt(11 + 1, { attributes: { bold: true } })
|
||||
})
|
||||
|
||||
let merged = Automerge.merge(s3, s4)
|
||||
|
||||
let final = Automerge.change(merged, doc => {
|
||||
doc.text.insertAt(3 + 1, { attributes: { bold: null } })
|
||||
doc.text.insertAt(doc.text.length, { attributes: { bold: true } })
|
||||
})
|
||||
|
||||
let deltaDoc = automergeTextToDeltaDoc(final.text)
|
||||
|
||||
// From https://quilljs.com/docs/delta/
|
||||
let expectedDoc = [
|
||||
{ insert: 'Gan', attributes: { bold: true } },
|
||||
{ insert: 'dalf the Grey' },
|
||||
]
|
||||
|
||||
assert.deepEqual(deltaDoc, expectedDoc)
|
||||
})
|
||||
|
||||
it('should apply an insert', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('Hello world')
|
||||
})
|
||||
|
||||
const delta = [
|
||||
{ retain: 6 },
|
||||
{ insert: 'reader' },
|
||||
{ delete: 5 }
|
||||
]
|
||||
|
||||
let s2 = Automerge.change(s1, doc => {
|
||||
applyDeltaDocToAutomergeText(delta, doc)
|
||||
})
|
||||
|
||||
//assert.strictEqual(s2.text.join(''), 'Hello reader')
|
||||
assert.strictEqual(s2.text.toString(), 'Hello reader')
|
||||
})
|
||||
|
||||
it('should apply an insert with control characters', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('Hello world')
|
||||
})
|
||||
|
||||
const delta = [
|
||||
{ retain: 6 },
|
||||
{ insert: 'reader', attributes: { bold: true } },
|
||||
{ delete: 5 },
|
||||
{ insert: '!' }
|
||||
]
|
||||
|
||||
let s2 = Automerge.change(s1, doc => {
|
||||
applyDeltaDocToAutomergeText(delta, doc)
|
||||
})
|
||||
|
||||
assert.strictEqual(s2.text.toString(), 'Hello \uFFFCreader\uFFFC!')
|
||||
assert.deepEqual(s2.text.toSpans(), [
|
||||
"Hello ",
|
||||
{ attributes: { bold: true } },
|
||||
"reader",
|
||||
{ attributes: { bold: null } },
|
||||
"!"
|
||||
])
|
||||
})
|
||||
|
||||
it('should account for control characters in retain/delete lengths', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('Hello world')
|
||||
doc.text.insertAt(4, { attributes: { color: '#ccc' } })
|
||||
doc.text.insertAt(10, { attributes: { color: '#f00' } })
|
||||
})
|
||||
|
||||
const delta = [
|
||||
{ retain: 6 },
|
||||
{ insert: 'reader', attributes: { bold: true } },
|
||||
{ delete: 5 },
|
||||
{ insert: '!' }
|
||||
]
|
||||
|
||||
let s2 = Automerge.change(s1, doc => {
|
||||
applyDeltaDocToAutomergeText(delta, doc)
|
||||
})
|
||||
|
||||
assert.strictEqual(s2.text.toString(), 'Hell\uFFFCo \uFFFCreader\uFFFC\uFFFC!')
|
||||
assert.deepEqual(s2.text.toSpans(), [
|
||||
"Hell",
|
||||
{ attributes: { color: '#ccc'} },
|
||||
"o ",
|
||||
{ attributes: { bold: true } },
|
||||
"reader",
|
||||
{ attributes: { bold: null } },
|
||||
{ attributes: { color: '#f00'} },
|
||||
"!"
|
||||
])
|
||||
})
|
||||
|
||||
it('should support embeds', () => {
|
||||
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||
doc.text = new Automerge.Text('')
|
||||
})
|
||||
|
||||
let deltaDoc = [{
|
||||
// An image link
|
||||
insert: {
|
||||
image: 'https://quilljs.com/assets/images/icon.png'
|
||||
},
|
||||
attributes: {
|
||||
link: 'https://quilljs.com'
|
||||
}
|
||||
}]
|
||||
|
||||
let s2 = Automerge.change(s1, doc => {
|
||||
applyDeltaDocToAutomergeText(deltaDoc, doc)
|
||||
})
|
||||
|
||||
assert.deepEqual(s2.text.toSpans(), [
|
||||
{ attributes: { link: 'https://quilljs.com' } },
|
||||
{ image: 'https://quilljs.com/assets/images/icon.png'},
|
||||
{ attributes: { link: null } },
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should support unicode when creating text', () => {
|
||||
s1 = Automerge.from({
|
||||
text: new Automerge.Text('🐦')
|
||||
text: '🐦'
|
||||
})
|
||||
assert.strictEqual(s1.text.get(0), '🐦')
|
||||
assert.strictEqual(s1.text, '🐦')
|
||||
})
|
||||
})
|
||||
|
|
7
rust/automerge-c/.gitignore
vendored
7
rust/automerge-c/.gitignore
vendored
|
@ -1,3 +1,10 @@
|
|||
automerge
|
||||
automerge.h
|
||||
automerge.o
|
||||
*.cmake
|
||||
CMakeFiles
|
||||
Makefile
|
||||
DartConfiguration.tcl
|
||||
config.h
|
||||
CMakeCache.txt
|
||||
Cargo
|
||||
|
|
|
@ -60,11 +60,16 @@ static void test_AMkeys_empty() {
|
|||
static void test_AMkeys_list() {
|
||||
AMresultStack* stack = NULL;
|
||||
AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc;
|
||||
AMfree(AMlistPutInt(doc, AM_ROOT, 0, true, 1));
|
||||
AMfree(AMlistPutInt(doc, AM_ROOT, 1, true, 2));
|
||||
AMfree(AMlistPutInt(doc, AM_ROOT, 2, true, 3));
|
||||
AMobjId const* const list = AMpush(
|
||||
&stack,
|
||||
AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),
|
||||
AM_VALUE_OBJ_ID,
|
||||
cmocka_cb).obj_id;
|
||||
AMfree(AMlistPutInt(doc, list, 0, true, 0));
|
||||
AMfree(AMlistPutInt(doc, list, 1, true, 0));
|
||||
AMfree(AMlistPutInt(doc, list, 2, true, 0));
|
||||
AMstrs forward = AMpush(&stack,
|
||||
AMkeys(doc, AM_ROOT, NULL),
|
||||
AMkeys(doc, list, NULL),
|
||||
AM_VALUE_STRS,
|
||||
cmocka_cb).strs;
|
||||
assert_int_equal(AMstrsSize(&forward), 3);
|
||||
|
@ -72,35 +77,35 @@ static void test_AMkeys_list() {
|
|||
assert_int_equal(AMstrsSize(&reverse), 3);
|
||||
/* Forward iterator forward. */
|
||||
AMbyteSpan str = AMstrsNext(&forward, 1);
|
||||
assert_ptr_equal(strstr(str.src, "1@"), str.src);
|
||||
str = AMstrsNext(&forward, 1);
|
||||
assert_ptr_equal(strstr(str.src, "2@"), str.src);
|
||||
str = AMstrsNext(&forward, 1);
|
||||
assert_ptr_equal(strstr(str.src, "3@"), str.src);
|
||||
str = AMstrsNext(&forward, 1);
|
||||
assert_ptr_equal(strstr(str.src, "4@"), str.src);
|
||||
assert_null(AMstrsNext(&forward, 1).src);
|
||||
/* Forward iterator reverse. */
|
||||
// /* Forward iterator reverse. */
|
||||
str = AMstrsPrev(&forward, 1);
|
||||
assert_ptr_equal(strstr(str.src, "4@"), str.src);
|
||||
str = AMstrsPrev(&forward, 1);
|
||||
assert_ptr_equal(strstr(str.src, "3@"), str.src);
|
||||
str = AMstrsPrev(&forward, 1);
|
||||
assert_ptr_equal(strstr(str.src, "2@"), str.src);
|
||||
str = AMstrsPrev(&forward, 1);
|
||||
assert_ptr_equal(strstr(str.src, "1@"), str.src);
|
||||
assert_null(AMstrsPrev(&forward, 1).src);
|
||||
/* Reverse iterator forward. */
|
||||
str = AMstrsNext(&reverse, 1);
|
||||
assert_ptr_equal(strstr(str.src, "4@"), str.src);
|
||||
str = AMstrsNext(&reverse, 1);
|
||||
assert_ptr_equal(strstr(str.src, "3@"), str.src);
|
||||
str = AMstrsNext(&reverse, 1);
|
||||
assert_ptr_equal(strstr(str.src, "2@"), str.src);
|
||||
str = AMstrsNext(&reverse, 1);
|
||||
assert_ptr_equal(strstr(str.src, "1@"), str.src);
|
||||
/* Reverse iterator reverse. */
|
||||
assert_null(AMstrsNext(&reverse, 1).src);
|
||||
str = AMstrsPrev(&reverse, 1);
|
||||
assert_ptr_equal(strstr(str.src, "1@"), str.src);
|
||||
/* Reverse iterator reverse. */
|
||||
str = AMstrsPrev(&reverse, 1);
|
||||
assert_ptr_equal(strstr(str.src, "2@"), str.src);
|
||||
str = AMstrsPrev(&reverse, 1);
|
||||
assert_ptr_equal(strstr(str.src, "3@"), str.src);
|
||||
str = AMstrsPrev(&reverse, 1);
|
||||
assert_ptr_equal(strstr(str.src, "4@"), str.src);
|
||||
assert_null(AMstrsPrev(&reverse, 1).src);
|
||||
AMfreeStack(&stack);
|
||||
}
|
||||
|
@ -202,16 +207,20 @@ static void test_AMputActor_str(void **state) {
|
|||
static void test_AMspliceText() {
|
||||
AMresultStack* stack = NULL;
|
||||
AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc;
|
||||
AMfree(AMspliceText(doc, AM_ROOT, 0, 0, AMstr("one + ")));
|
||||
AMfree(AMspliceText(doc, AM_ROOT, 4, 2, AMstr("two = ")));
|
||||
AMfree(AMspliceText(doc, AM_ROOT, 8, 2, AMstr("three")));
|
||||
AMbyteSpan const text = AMpush(&stack,
|
||||
AMtext(doc, AM_ROOT, NULL),
|
||||
AMobjId const* const text = AMpush(&stack,
|
||||
AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT),
|
||||
AM_VALUE_OBJ_ID,
|
||||
cmocka_cb).obj_id;
|
||||
AMfree(AMspliceText(doc, text, 0, 0, AMstr("one + ")));
|
||||
AMfree(AMspliceText(doc, text, 4, 2, AMstr("two = ")));
|
||||
AMfree(AMspliceText(doc, text, 8, 2, AMstr("three")));
|
||||
AMbyteSpan const str = AMpush(&stack,
|
||||
AMtext(doc, text, NULL),
|
||||
AM_VALUE_STR,
|
||||
cmocka_cb).str;
|
||||
static char const* const TEXT_VALUE = "one two three";
|
||||
assert_int_equal(text.count, strlen(TEXT_VALUE));
|
||||
assert_memory_equal(text.src, TEXT_VALUE, text.count);
|
||||
static char const* const STR_VALUE = "one two three";
|
||||
assert_int_equal(str.count, strlen(STR_VALUE));
|
||||
assert_memory_equal(str.src, STR_VALUE, str.count);
|
||||
AMfreeStack(&stack);
|
||||
}
|
||||
|
||||
|
|
|
@ -18,15 +18,20 @@
|
|||
|
||||
static void test_AMlistIncrement(void** state) {
|
||||
GroupState* group_state = *state;
|
||||
AMfree(AMlistPutCounter(group_state->doc, AM_ROOT, 0, true, 0));
|
||||
AMobjId const* const list = AMpush(
|
||||
&group_state->stack,
|
||||
AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),
|
||||
AM_VALUE_OBJ_ID,
|
||||
cmocka_cb).obj_id;
|
||||
AMfree(AMlistPutCounter(group_state->doc, list, 0, true, 0));
|
||||
assert_int_equal(AMpush(&group_state->stack,
|
||||
AMlistGet(group_state->doc, AM_ROOT, 0, NULL),
|
||||
AMlistGet(group_state->doc, list, 0, NULL),
|
||||
AM_VALUE_COUNTER,
|
||||
cmocka_cb).counter, 0);
|
||||
AMfree(AMpop(&group_state->stack));
|
||||
AMfree(AMlistIncrement(group_state->doc, AM_ROOT, 0, 3));
|
||||
AMfree(AMlistIncrement(group_state->doc, list, 0, 3));
|
||||
assert_int_equal(AMpush(&group_state->stack,
|
||||
AMlistGet(group_state->doc, AM_ROOT, 0, NULL),
|
||||
AMlistGet(group_state->doc, list, 0, NULL),
|
||||
AM_VALUE_COUNTER,
|
||||
cmocka_cb).counter, 3);
|
||||
AMfree(AMpop(&group_state->stack));
|
||||
|
@ -37,14 +42,19 @@ static void test_AMlistIncrement(void** state) {
|
|||
#define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \
|
||||
static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \
|
||||
GroupState* group_state = *state; \
|
||||
AMobjId const* const list = AMpush( \
|
||||
&group_state->stack, \
|
||||
AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\
|
||||
AM_VALUE_OBJ_ID, \
|
||||
cmocka_cb).obj_id; \
|
||||
AMfree(AMlistPut ## suffix(group_state->doc, \
|
||||
AM_ROOT, \
|
||||
list, \
|
||||
0, \
|
||||
!strcmp(#mode, "insert"), \
|
||||
scalar_value)); \
|
||||
assert_true(AMpush( \
|
||||
&group_state->stack, \
|
||||
AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \
|
||||
AMlistGet(group_state->doc, list, 0, NULL), \
|
||||
AMvalue_discriminant(#suffix), \
|
||||
cmocka_cb).member == scalar_value); \
|
||||
AMfree(AMpop(&group_state->stack)); \
|
||||
|
@ -57,15 +67,20 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \
|
|||
static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \
|
||||
\
|
||||
GroupState* group_state = *state; \
|
||||
AMobjId const* const list = AMpush( \
|
||||
&group_state->stack, \
|
||||
AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\
|
||||
AM_VALUE_OBJ_ID, \
|
||||
cmocka_cb).obj_id; \
|
||||
AMfree(AMlistPutBytes(group_state->doc, \
|
||||
AM_ROOT, \
|
||||
list, \
|
||||
0, \
|
||||
!strcmp(#mode, "insert"), \
|
||||
bytes_value, \
|
||||
BYTES_SIZE)); \
|
||||
AMbyteSpan const bytes = AMpush( \
|
||||
&group_state->stack, \
|
||||
AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \
|
||||
AMlistGet(group_state->doc, list, 0, NULL), \
|
||||
AM_VALUE_BYTES, \
|
||||
cmocka_cb).bytes; \
|
||||
assert_int_equal(bytes.count, BYTES_SIZE); \
|
||||
|
@ -78,11 +93,16 @@ static void test_AMlistPutBytes_ ## mode(void **state) { \
|
|||
#define static_void_test_AMlistPutNull(mode) \
|
||||
static void test_AMlistPutNull_ ## mode(void **state) { \
|
||||
GroupState* group_state = *state; \
|
||||
AMobjId const* const list = AMpush( \
|
||||
&group_state->stack, \
|
||||
AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\
|
||||
AM_VALUE_OBJ_ID, \
|
||||
cmocka_cb).obj_id; \
|
||||
AMfree(AMlistPutNull(group_state->doc, \
|
||||
AM_ROOT, \
|
||||
list, \
|
||||
0, \
|
||||
!strcmp(#mode, "insert"))); \
|
||||
AMresult* const result = AMlistGet(group_state->doc, AM_ROOT, 0, NULL); \
|
||||
AMresult* const result = AMlistGet(group_state->doc, list, 0, NULL); \
|
||||
if (AMresultStatus(result) != AM_STATUS_OK) { \
|
||||
fail_msg_view("%s", AMerrorMessage(result)); \
|
||||
} \
|
||||
|
@ -96,12 +116,17 @@ static void test_AMlistPutNull_ ## mode(void **state) { \
|
|||
#define static_void_test_AMlistPutObject(label, mode) \
|
||||
static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \
|
||||
GroupState* group_state = *state; \
|
||||
AMobjId const* const list = AMpush( \
|
||||
&group_state->stack, \
|
||||
AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\
|
||||
AM_VALUE_OBJ_ID, \
|
||||
cmocka_cb).obj_id; \
|
||||
AMobjType const obj_type = AMobjType_tag(#label); \
|
||||
if (obj_type != AM_OBJ_TYPE_VOID) { \
|
||||
AMobjId const* const obj_id = AMpush( \
|
||||
&group_state->stack, \
|
||||
AMlistPutObject(group_state->doc, \
|
||||
AM_ROOT, \
|
||||
list, \
|
||||
0, \
|
||||
!strcmp(#mode, "insert"), \
|
||||
obj_type), \
|
||||
|
@ -114,7 +139,7 @@ static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \
|
|||
else { \
|
||||
AMpush(&group_state->stack, \
|
||||
AMlistPutObject(group_state->doc, \
|
||||
AM_ROOT, \
|
||||
list, \
|
||||
0, \
|
||||
!strcmp(#mode, "insert"), \
|
||||
obj_type), \
|
||||
|
@ -131,20 +156,21 @@ static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \
|
|||
#define static_void_test_AMlistPutStr(mode, str_value) \
|
||||
static void test_AMlistPutStr_ ## mode(void **state) { \
|
||||
GroupState* group_state = *state; \
|
||||
AMobjId const* const list = AMpush( \
|
||||
&group_state->stack, \
|
||||
AMmapPutObject(group_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),\
|
||||
AM_VALUE_OBJ_ID, \
|
||||
cmocka_cb).obj_id; \
|
||||
AMfree(AMlistPutStr(group_state->doc, \
|
||||
AM_ROOT, \
|
||||
list, \
|
||||
0, \
|
||||
!strcmp(#mode, "insert"), \
|
||||
AMstr(str_value))); \
|
||||
AMbyteSpan const str = AMpush( \
|
||||
&group_state->stack, \
|
||||
AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \
|
||||
AMlistGet(group_state->doc, list, 0, NULL), \
|
||||
AM_VALUE_STR, \
|
||||
cmocka_cb).str; \
|
||||
char* const c_str = test_calloc(1, str.count + 1); \
|
||||
strncpy(c_str, str.src, str.count); \
|
||||
print_message("str -> \"%s\"\n", c_str); \
|
||||
test_free(c_str); \
|
||||
assert_int_equal(str.count, strlen(str_value)); \
|
||||
assert_memory_equal(str.src, str_value, str.count); \
|
||||
AMfree(AMpop(&group_state->stack)); \
|
||||
|
@ -392,7 +418,7 @@ static void test_insert_at_index(void** state) {
|
|||
|
||||
AMobjId const* const list = AMpush(
|
||||
&stack,
|
||||
AMlistPutObject(doc, AM_ROOT, 0, true, AM_OBJ_TYPE_LIST),
|
||||
AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST),
|
||||
AM_VALUE_OBJ_ID,
|
||||
cmocka_cb).obj_id;
|
||||
/* Insert both at the same index. */
|
||||
|
|
|
@ -709,17 +709,10 @@ static void test_should_be_able_to_splice_text(void** state) {
|
|||
cmocka_cb).obj_id;
|
||||
/* doc.splice(text, 0, 0, "hello ") */
|
||||
AMfree(AMspliceText(doc, text, 0, 0, AMstr("hello ")));
|
||||
/* doc.splice(text, 6, 0, ["w", "o", "r", "l", "d"]) */
|
||||
static AMvalue const WORLD[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "w", .count = 1}},
|
||||
{.str_tag = AM_VALUE_STR, .str = {.src = "o", .count = 1}},
|
||||
{.str_tag = AM_VALUE_STR, .str = {.src = "r", .count = 1}},
|
||||
{.str_tag = AM_VALUE_STR, .str = {.src = "l", .count = 1}},
|
||||
{.str_tag = AM_VALUE_STR, .str = {.src = "d", .count = 1}}};
|
||||
AMfree(AMsplice(doc, text, 6, 0, WORLD, sizeof(WORLD)/sizeof(AMvalue)));
|
||||
/* doc.splice(text, 11, 0, ["!", "?"]) */
|
||||
static AMvalue const INTERROBANG[] = {{.str_tag = AM_VALUE_STR, .str = {.src = "!", .count = 1}},
|
||||
{.str_tag = AM_VALUE_STR, .str = {.src = "?", .count = 1}}};
|
||||
AMfree(AMsplice(doc, text, 11, 0, INTERROBANG, sizeof(INTERROBANG)/sizeof(AMvalue)));
|
||||
/* doc.splice(text, 6, 0, "world") */
|
||||
AMfree(AMspliceText(doc, text, 6, 0, AMstr("world")));
|
||||
/* doc.splice(text, 11, 0, "!?") */
|
||||
AMfree(AMspliceText(doc, text, 11, 0, AMstr("!?")));
|
||||
/* assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) */
|
||||
AMbyteSpan str = AMpush(&stack,
|
||||
AMlistGet(doc, text, 0, NULL),
|
||||
|
@ -765,9 +758,9 @@ static void test_should_be_able_to_splice_text(void** state) {
|
|||
}
|
||||
|
||||
/**
|
||||
* \brief should be able to insert objects into text
|
||||
* \brief should NOT be able to insert objects into text
|
||||
*/
|
||||
static void test_should_be_able_to_insert_objects_into_text(void** state) {
|
||||
static void test_should_be_unable_to_insert_objects_into_text(void** state) {
|
||||
AMresultStack* stack = *state;
|
||||
/* const doc = create() */
|
||||
AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc;
|
||||
|
@ -778,32 +771,14 @@ static void test_should_be_able_to_insert_objects_into_text(void** state) {
|
|||
AM_VALUE_OBJ_ID,
|
||||
cmocka_cb).obj_id;
|
||||
AMfree(AMspliceText(doc, text, 0, 0, AMstr("Hello world")));
|
||||
/* const obj = doc.insertObject(text, 6, { hello: "world" }); */
|
||||
AMobjId const* const obj = AMpush(
|
||||
&stack,
|
||||
/* assert.throws(() => {
|
||||
doc.insertObject(text, 6, { hello: "world" });
|
||||
}) */
|
||||
AMpush(&stack,
|
||||
AMlistPutObject(doc, text, 6, true, AM_OBJ_TYPE_MAP),
|
||||
AM_VALUE_OBJ_ID,
|
||||
cmocka_cb).obj_id;
|
||||
AMfree(AMmapPutStr(doc, obj, AMstr("hello"), AMstr("world")));
|
||||
/* assert.deepEqual(doc.text(text), "Hello \ufffcworld"); */
|
||||
AMbyteSpan str = AMpush(&stack,
|
||||
AMtext(doc, text, NULL),
|
||||
AM_VALUE_STR,
|
||||
cmocka_cb).str;
|
||||
assert_int_equal(str.count, strlen(u8"Hello \ufffcworld"));
|
||||
assert_memory_equal(str.src, u8"Hello \ufffcworld", str.count);
|
||||
/* assert.deepEqual(doc.getWithType(text, 6), ["map", obj]); */
|
||||
assert_true(AMobjIdEqual(AMpush(&stack,
|
||||
AMlistGet(doc, text, 6, NULL),
|
||||
AM_VALUE_OBJ_ID,
|
||||
cmocka_cb).obj_id, obj));
|
||||
/* assert.deepEqual(doc.getWithType(obj, "hello"), ["str", "world"]); */
|
||||
str = AMpush(&stack,
|
||||
AMmapGet(doc, obj, AMstr("hello"), NULL),
|
||||
AM_VALUE_STR,
|
||||
cmocka_cb).str;
|
||||
assert_int_equal(str.count, strlen("world"));
|
||||
assert_memory_equal(str.src, "world", str.count);
|
||||
AM_VALUE_VOID,
|
||||
NULL);
|
||||
assert_int_not_equal(AMresultStatus(stack->result), AM_STATUS_OK);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1873,7 +1848,7 @@ int run_ported_wasm_basic_tests(void) {
|
|||
cmocka_unit_test_setup_teardown(test_should_be_able_to_del, setup_stack, teardown_stack),
|
||||
cmocka_unit_test_setup_teardown(test_should_be_able_to_use_counters, setup_stack, teardown_stack),
|
||||
cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text, setup_stack, teardown_stack),
|
||||
cmocka_unit_test_setup_teardown(test_should_be_able_to_insert_objects_into_text, setup_stack, teardown_stack),
|
||||
cmocka_unit_test_setup_teardown(test_should_be_unable_to_insert_objects_into_text, setup_stack, teardown_stack),
|
||||
cmocka_unit_test_setup_teardown(test_should_be_able_to_save_all_or_incrementally, setup_stack, teardown_stack),
|
||||
cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text_2, setup_stack, teardown_stack),
|
||||
cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_map, setup_stack, teardown_stack),
|
||||
|
|
|
@ -35,6 +35,7 @@ hex = "^0.4.3"
|
|||
regex = "^1.5"
|
||||
itertools = "^0.10.3"
|
||||
thiserror = "^1.0.16"
|
||||
ropey = "1.5.0"
|
||||
|
||||
[dependencies.wasm-bindgen]
|
||||
version = "^0.2.83"
|
||||
|
|
|
@ -154,7 +154,7 @@ Lists are index addressable sets of values. These values can be any scalar or o
|
|||
|
||||
### Text
|
||||
|
||||
Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the `splice()` method. Spliced strings will be indexable by character (important to note for platforms that index by graphmeme cluster). Non text can be inserted into a text document and will be represented with the unicode object replacement character.
|
||||
Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the `splice()` method. Spliced strings will be indexable by character (important to note for platforms that index by graphmeme cluster).
|
||||
|
||||
```javascript
|
||||
let doc = create("aaaaaa")
|
||||
|
@ -162,12 +162,6 @@ Text is a specialized list type intended for modifying a text document. The pri
|
|||
doc.splice(notes, 6, 5, "everyone")
|
||||
|
||||
doc.text(notes) // returns "Hello everyone"
|
||||
|
||||
let obj = doc.insertObject(notes, 6, { hi: "there" })
|
||||
|
||||
doc.text(notes) // returns "Hello \ufffceveryone"
|
||||
doc.getWithType(notes, 6) // returns ["map", obj]
|
||||
doc.get(obj, "hi") // returns "there"
|
||||
```
|
||||
|
||||
### Tables
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
"target": "rimraf ./$TARGET && yarn compile && yarn bindgen && yarn opt",
|
||||
"compile": "cargo build --target wasm32-unknown-unknown --profile $PROFILE",
|
||||
"bindgen": "wasm-bindgen --no-typescript --weak-refs --target $TARGET --out-dir $TARGET ../target/wasm32-unknown-unknown/$TARGET_DIR/automerge_wasm.wasm",
|
||||
"opt": "wasm-opt -O4 $TARGET/automerge_wasm_bg.wasm -o $TARGET/automerge_wasm_bg.wasm",
|
||||
"opt": "wasm-opt -Oz $TARGET/automerge_wasm_bg.wasm -o $TARGET/automerge_wasm_bg.wasm",
|
||||
"test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -2,8 +2,9 @@ use crate::value::Datatype;
|
|||
use crate::Automerge;
|
||||
use automerge as am;
|
||||
use automerge::transaction::Transactable;
|
||||
use automerge::ROOT;
|
||||
use automerge::{Change, ChangeHash, ObjType, Prop};
|
||||
use js_sys::{Array, Function, Object, Reflect, Symbol, Uint8Array};
|
||||
use js_sys::{Array, Function, JsString, Object, Reflect, Symbol, Uint8Array};
|
||||
use std::collections::{BTreeSet, HashSet};
|
||||
use std::fmt::Display;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
@ -410,13 +411,13 @@ pub(crate) fn js_get_symbol<J: Into<JsValue>>(obj: J, prop: &Symbol) -> Result<J
|
|||
)?))
|
||||
}
|
||||
|
||||
pub(crate) fn to_prop(p: JsValue) -> Result<Prop, super::error::InvalidProp> {
|
||||
pub(crate) fn to_prop(p: JsValue) -> Result<Prop, error::InvalidProp> {
|
||||
if let Some(s) = p.as_string() {
|
||||
Ok(Prop::Map(s))
|
||||
} else if let Some(n) = p.as_f64() {
|
||||
Ok(Prop::Seq(n as usize))
|
||||
} else {
|
||||
Err(super::error::InvalidProp)
|
||||
Err(error::InvalidProp)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -469,13 +470,8 @@ pub(crate) fn to_objtype(
|
|||
.map(|(key, val)| (key.as_string().unwrap().into(), val))
|
||||
.collect();
|
||||
Some((ObjType::Map, map))
|
||||
} else if let Some(text) = value.as_string() {
|
||||
let text = text
|
||||
.chars()
|
||||
.enumerate()
|
||||
.map(|(i, ch)| (i.into(), ch.to_string().into()))
|
||||
.collect();
|
||||
Some((ObjType::Text, text))
|
||||
} else if value.as_string().is_some() {
|
||||
Some((ObjType::Text, vec![(0.into(), value.clone())]))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -506,22 +502,32 @@ impl Automerge {
|
|||
heads: Option<&Vec<ChangeHash>>,
|
||||
meta: &JsValue,
|
||||
) -> Result<JsValue, error::Export> {
|
||||
let result = if datatype.is_sequence() {
|
||||
self.wrap_object(
|
||||
let result = match datatype {
|
||||
Datatype::Text => {
|
||||
if let Some(heads) = heads {
|
||||
self.doc.text_at(obj, heads)?.into()
|
||||
} else {
|
||||
self.doc.text(obj)?.into()
|
||||
}
|
||||
}
|
||||
Datatype::List => self
|
||||
.wrap_object(
|
||||
self.export_list(obj, heads, meta)?,
|
||||
datatype,
|
||||
&obj.to_string().into(),
|
||||
meta,
|
||||
)?
|
||||
} else {
|
||||
self.wrap_object(
|
||||
.into(),
|
||||
_ => self
|
||||
.wrap_object(
|
||||
self.export_map(obj, heads, meta)?,
|
||||
datatype,
|
||||
&obj.to_string().into(),
|
||||
meta,
|
||||
)?
|
||||
.into(),
|
||||
};
|
||||
Ok(result.into())
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub(crate) fn export_map(
|
||||
|
@ -668,7 +674,7 @@ impl Automerge {
|
|||
} else {
|
||||
value
|
||||
};
|
||||
if matches!(datatype, Datatype::Map | Datatype::List | Datatype::Text) {
|
||||
if matches!(datatype, Datatype::Map | Datatype::List) {
|
||||
set_hidden_value(&value, &Symbol::for_(RAW_OBJECT_SYMBOL), id)?;
|
||||
}
|
||||
set_hidden_value(&value, &Symbol::for_(DATATYPE_SYMBOL), datatype)?;
|
||||
|
@ -684,12 +690,23 @@ impl Automerge {
|
|||
array: &Object,
|
||||
patch: &Patch,
|
||||
meta: &JsValue,
|
||||
exposed: &mut HashSet<ObjId>,
|
||||
) -> Result<Object, error::ApplyPatch> {
|
||||
let result = Array::from(array); // shallow copy
|
||||
match patch {
|
||||
Patch::PutSeq { index, value, .. } => {
|
||||
Patch::PutSeq {
|
||||
index,
|
||||
value,
|
||||
expose,
|
||||
..
|
||||
} => {
|
||||
if *expose && value.0.is_object() {
|
||||
exposed.insert(value.1.clone());
|
||||
js_set(&result, *index as f64, &JsValue::null())?;
|
||||
} else {
|
||||
let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?;
|
||||
js_set(&result, *index as f64, &sub_val)?;
|
||||
}
|
||||
Ok(result.into())
|
||||
}
|
||||
Patch::DeleteSeq { index, .. } => {
|
||||
|
@ -717,6 +734,8 @@ impl Automerge {
|
|||
}
|
||||
Patch::DeleteMap { .. } => Err(error::ApplyPatch::DeleteKeyFromSeq),
|
||||
Patch::PutMap { .. } => Err(error::ApplyPatch::PutKeyInSeq),
|
||||
//Patch::SpliceText { .. } => Err(to_js_err("cannot splice text in seq")),
|
||||
Patch::SpliceText { .. } => Err(error::ApplyPatch::SpliceTextInSeq),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -725,12 +744,20 @@ impl Automerge {
|
|||
map: &Object,
|
||||
patch: &Patch,
|
||||
meta: &JsValue,
|
||||
exposed: &mut HashSet<ObjId>,
|
||||
) -> Result<Object, error::ApplyPatch> {
|
||||
let result = Object::assign(&Object::new(), map); // shallow copy
|
||||
match patch {
|
||||
Patch::PutMap { key, value, .. } => {
|
||||
Patch::PutMap {
|
||||
key, value, expose, ..
|
||||
} => {
|
||||
if *expose && value.0.is_object() {
|
||||
exposed.insert(value.1.clone());
|
||||
js_set(&result, key, &JsValue::null())?;
|
||||
} else {
|
||||
let sub_val = self.maybe_wrap_object(alloc(&value.0), &value.1, meta)?;
|
||||
js_set(&result, key, &sub_val)?;
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
Patch::DeleteMap { key, .. } => {
|
||||
|
@ -760,6 +787,8 @@ impl Automerge {
|
|||
}
|
||||
Patch::Insert { .. } => Err(error::ApplyPatch::InsertInMap),
|
||||
Patch::DeleteSeq { .. } => Err(error::ApplyPatch::SpliceInMap),
|
||||
//Patch::SpliceText { .. } => Err(to_js_err("cannot Splice into map")),
|
||||
Patch::SpliceText { .. } => Err(error::ApplyPatch::SpliceTextInMap),
|
||||
Patch::PutSeq { .. } => Err(error::ApplyPatch::PutIdxInMap),
|
||||
}
|
||||
}
|
||||
|
@ -770,12 +799,23 @@ impl Automerge {
|
|||
patch: &Patch,
|
||||
depth: usize,
|
||||
meta: &JsValue,
|
||||
exposed: &mut HashSet<ObjId>,
|
||||
) -> Result<Object, error::ApplyPatch> {
|
||||
let (inner, datatype, id) = self.unwrap_object(&obj)?;
|
||||
let prop = patch.path().get(depth).map(|p| prop_to_js(&p.1));
|
||||
let result = if let Some(prop) = prop {
|
||||
if let Ok(sub_obj) = js_get(&inner, &prop)?.0.dyn_into::<Object>() {
|
||||
let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta)?;
|
||||
let subval = js_get(&inner, &prop)?.0;
|
||||
if subval.is_string() && patch.path().len() - 1 == depth {
|
||||
if let Ok(s) = subval.dyn_into::<JsString>() {
|
||||
let new_value = self.apply_patch_to_text(&s, patch)?;
|
||||
let result = shallow_copy(&inner);
|
||||
js_set(&result, &prop, &new_value)?;
|
||||
Ok(result)
|
||||
} else {
|
||||
panic!("string is not JsString")
|
||||
}
|
||||
} else if let Ok(sub_obj) = js_get(&inner, &prop)?.0.dyn_into::<Object>() {
|
||||
let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta, exposed)?;
|
||||
let result = shallow_copy(&inner);
|
||||
js_set(&result, &prop, &new_value)?;
|
||||
Ok(result)
|
||||
|
@ -785,15 +825,46 @@ impl Automerge {
|
|||
return Ok(obj);
|
||||
}
|
||||
} else if Array::is_array(&inner) {
|
||||
self.apply_patch_to_array(&inner, patch, meta)
|
||||
if id.as_string() == Some(patch.obj().to_string()) {
|
||||
self.apply_patch_to_array(&inner, patch, meta, exposed)
|
||||
} else {
|
||||
self.apply_patch_to_map(&inner, patch, meta)
|
||||
Ok(Array::from(&inner).into())
|
||||
}
|
||||
} else if id.as_string() == Some(patch.obj().to_string()) {
|
||||
self.apply_patch_to_map(&inner, patch, meta, exposed)
|
||||
} else {
|
||||
Ok(Object::assign(&Object::new(), &inner))
|
||||
}?;
|
||||
|
||||
self.wrap_object(result, datatype, &id, meta)
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
|
||||
fn apply_patch_to_text(
|
||||
&self,
|
||||
string: &JsString,
|
||||
patch: &Patch,
|
||||
) -> Result<JsValue, error::ApplyPatch> {
|
||||
match patch {
|
||||
Patch::DeleteSeq { index, length, .. } => {
|
||||
let index = *index as u32;
|
||||
let before = string.slice(0, index);
|
||||
let after = string.slice(index + *length as u32, string.length());
|
||||
let result = before.concat(&after);
|
||||
Ok(result.into())
|
||||
}
|
||||
Patch::SpliceText { index, value, .. } => {
|
||||
let index = index.1 as u32;
|
||||
let length = string.length();
|
||||
let before = string.slice(0, index);
|
||||
let after = string.slice(index, length);
|
||||
let result = before.concat(&value.to_string().into()).concat(&after);
|
||||
Ok(result.into())
|
||||
}
|
||||
_ => Ok(string.into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn sub_splice<'a, I: IntoIterator<Item = &'a (Value<'a>, ObjId)>>(
|
||||
&self,
|
||||
o: Array,
|
||||
|
@ -815,6 +886,170 @@ impl Automerge {
|
|||
Reflect::apply(&method, &o, &args).map_err(error::Export::CallSplice)?;
|
||||
Ok(o.into())
|
||||
}
|
||||
|
||||
pub(crate) fn import(&self, id: JsValue) -> Result<(ObjId, am::ObjType), error::ImportObj> {
|
||||
if let Some(s) = id.as_string() {
|
||||
if let Some(components) = s.strip_prefix('/').map(|post| post.split('/')) {
|
||||
self.import_path(components)
|
||||
.map_err(|e| error::ImportObj::InvalidPath(s.to_string(), e))
|
||||
} else {
|
||||
let id = self.doc.import(&s).map_err(error::ImportObj::BadImport)?;
|
||||
// SAFETY: we just looked this up
|
||||
let obj_type = self.doc.object_type(&id).unwrap();
|
||||
Ok((id, obj_type))
|
||||
}
|
||||
} else {
|
||||
Err(error::ImportObj::NotString)
|
||||
}
|
||||
}
|
||||
|
||||
fn import_path<'a, I: Iterator<Item = &'a str>>(
|
||||
&self,
|
||||
components: I,
|
||||
) -> Result<(ObjId, am::ObjType), error::ImportPath> {
|
||||
let mut obj = ROOT;
|
||||
let mut obj_type = am::ObjType::Map;
|
||||
for (i, prop) in components.enumerate() {
|
||||
if prop.is_empty() {
|
||||
break;
|
||||
}
|
||||
let is_map = matches!(obj_type, am::ObjType::Map | am::ObjType::Table);
|
||||
let val = if is_map {
|
||||
self.doc.get(obj, prop)?
|
||||
} else {
|
||||
let idx = prop
|
||||
.parse()
|
||||
.map_err(|_| error::ImportPath::IndexNotInteger(i, prop.to_string()))?;
|
||||
self.doc.get(obj, am::Prop::Seq(idx))?
|
||||
};
|
||||
match val {
|
||||
Some((am::Value::Object(am::ObjType::Map), id)) => {
|
||||
obj_type = am::ObjType::Map;
|
||||
obj = id;
|
||||
}
|
||||
Some((am::Value::Object(am::ObjType::Table), id)) => {
|
||||
obj_type = am::ObjType::Table;
|
||||
obj = id;
|
||||
}
|
||||
Some((am::Value::Object(am::ObjType::List), id)) => {
|
||||
obj_type = am::ObjType::List;
|
||||
obj = id;
|
||||
}
|
||||
Some((am::Value::Object(am::ObjType::Text), id)) => {
|
||||
obj_type = am::ObjType::Text;
|
||||
obj = id;
|
||||
}
|
||||
None => return Err(error::ImportPath::NonExistentObject(i, prop.to_string())),
|
||||
_ => return Err(error::ImportPath::NotAnObject),
|
||||
};
|
||||
}
|
||||
Ok((obj, obj_type))
|
||||
}
|
||||
|
||||
pub(crate) fn import_prop(&self, prop: JsValue) -> Result<Prop, error::InvalidProp> {
|
||||
if let Some(s) = prop.as_string() {
|
||||
Ok(s.into())
|
||||
} else if let Some(n) = prop.as_f64() {
|
||||
Ok((n as usize).into())
|
||||
} else {
|
||||
Err(error::InvalidProp)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn import_scalar(
|
||||
&self,
|
||||
value: &JsValue,
|
||||
datatype: &Option<String>,
|
||||
) -> Option<am::ScalarValue> {
|
||||
match datatype.as_deref() {
|
||||
Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean),
|
||||
Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)),
|
||||
Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)),
|
||||
Some("str") => value.as_string().map(|v| am::ScalarValue::Str(v.into())),
|
||||
Some("f64") => value.as_f64().map(am::ScalarValue::F64),
|
||||
Some("bytes") => Some(am::ScalarValue::Bytes(
|
||||
value.clone().dyn_into::<Uint8Array>().unwrap().to_vec(),
|
||||
)),
|
||||
Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)),
|
||||
Some("timestamp") => {
|
||||
if let Some(v) = value.as_f64() {
|
||||
Some(am::ScalarValue::Timestamp(v as i64))
|
||||
} else if let Ok(d) = value.clone().dyn_into::<js_sys::Date>() {
|
||||
Some(am::ScalarValue::Timestamp(d.get_time() as i64))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Some("null") => Some(am::ScalarValue::Null),
|
||||
Some(_) => None,
|
||||
None => {
|
||||
if value.is_null() {
|
||||
Some(am::ScalarValue::Null)
|
||||
} else if let Some(b) = value.as_bool() {
|
||||
Some(am::ScalarValue::Boolean(b))
|
||||
} else if let Some(s) = value.as_string() {
|
||||
Some(am::ScalarValue::Str(s.into()))
|
||||
} else if let Some(n) = value.as_f64() {
|
||||
if (n.round() - n).abs() < f64::EPSILON {
|
||||
Some(am::ScalarValue::Int(n as i64))
|
||||
} else {
|
||||
Some(am::ScalarValue::F64(n))
|
||||
}
|
||||
} else if let Ok(d) = value.clone().dyn_into::<js_sys::Date>() {
|
||||
Some(am::ScalarValue::Timestamp(d.get_time() as i64))
|
||||
} else if let Ok(o) = &value.clone().dyn_into::<Uint8Array>() {
|
||||
Some(am::ScalarValue::Bytes(o.to_vec()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn import_value(
|
||||
&self,
|
||||
value: &JsValue,
|
||||
datatype: Option<String>,
|
||||
) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), error::InvalidValue> {
|
||||
match self.import_scalar(value, &datatype) {
|
||||
Some(val) => Ok((val.into(), vec![])),
|
||||
None => {
|
||||
if let Some((o, subvals)) = to_objtype(value, &datatype) {
|
||||
Ok((o.into(), subvals))
|
||||
} else {
|
||||
web_sys::console::log_2(&"Invalid value".into(), value);
|
||||
Err(error::InvalidValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn finalize_exposed(
|
||||
&self,
|
||||
object: &JsValue,
|
||||
exposed: HashSet<ObjId>,
|
||||
meta: &JsValue,
|
||||
) -> Result<(), error::ApplyPatch> {
|
||||
for obj in exposed {
|
||||
let mut pointer = object.clone();
|
||||
let obj_type = self.doc.object_type(&obj).unwrap();
|
||||
let path: Vec<_> = self
|
||||
.doc
|
||||
.path_to_object(&obj)?
|
||||
.iter()
|
||||
.map(|p| prop_to_js(&p.1))
|
||||
.collect();
|
||||
let value = self.export_object(&obj, obj_type.into(), None, meta)?;
|
||||
for (i, prop) in path.iter().enumerate() {
|
||||
if i + 1 < path.len() {
|
||||
pointer = js_get(&pointer, prop)?.0;
|
||||
} else {
|
||||
js_set(&pointer, prop, &value)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn alloc(value: &Value<'_>) -> (Datatype, JsValue) {
|
||||
|
@ -823,7 +1058,7 @@ pub(crate) fn alloc(value: &Value<'_>) -> (Datatype, JsValue) {
|
|||
ObjType::Map => (Datatype::Map, Object::new().into()),
|
||||
ObjType::Table => (Datatype::Table, Object::new().into()),
|
||||
ObjType::List => (Datatype::List, Array::new().into()),
|
||||
ObjType::Text => (Datatype::Text, Array::new().into()),
|
||||
ObjType::Text => (Datatype::Text, "".into()),
|
||||
},
|
||||
am::Value::Scalar(s) => match s.as_ref() {
|
||||
am::ScalarValue::Bytes(v) => (Datatype::Bytes, Uint8Array::from(v.as_slice()).into()),
|
||||
|
@ -877,7 +1112,7 @@ fn prop_to_js(prop: &Prop) -> JsValue {
|
|||
}
|
||||
|
||||
pub(crate) mod error {
|
||||
use automerge::LoadChangeError;
|
||||
use automerge::{AutomergeError, LoadChangeError};
|
||||
use wasm_bindgen::JsValue;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
|
@ -1028,6 +1263,8 @@ pub(crate) mod error {
|
|||
GetSplice(JsValue),
|
||||
#[error("error calling splice: {0:?}")]
|
||||
CallSplice(JsValue),
|
||||
#[error(transparent)]
|
||||
Automerge(#[from] AutomergeError),
|
||||
}
|
||||
|
||||
impl From<Export> for JsValue {
|
||||
|
@ -1054,12 +1291,18 @@ pub(crate) mod error {
|
|||
InsertInMap,
|
||||
#[error("cannot splice into a map")]
|
||||
SpliceInMap,
|
||||
#[error("cannot splice text into a seq")]
|
||||
SpliceTextInSeq,
|
||||
#[error("cannot splice text into a map")]
|
||||
SpliceTextInMap,
|
||||
#[error("cannot put a seq index in a map")]
|
||||
PutIdxInMap,
|
||||
#[error(transparent)]
|
||||
GetProp(#[from] GetProp),
|
||||
#[error(transparent)]
|
||||
SetProp(#[from] SetProp),
|
||||
#[error(transparent)]
|
||||
Automerge(#[from] AutomergeError),
|
||||
}
|
||||
|
||||
impl From<ApplyPatch> for JsValue {
|
||||
|
@ -1087,4 +1330,40 @@ pub(crate) mod error {
|
|||
JsValue::from(e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ImportObj {
|
||||
#[error("obj id was not a string")]
|
||||
NotString,
|
||||
#[error("invalid path {0}: {1}")]
|
||||
InvalidPath(String, ImportPath),
|
||||
#[error("unable to import object id: {0}")]
|
||||
BadImport(AutomergeError),
|
||||
}
|
||||
|
||||
impl From<ImportObj> for JsValue {
|
||||
fn from(e: ImportObj) -> Self {
|
||||
JsValue::from(format!("invalid object ID: {}", e))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ImportPath {
|
||||
#[error(transparent)]
|
||||
Automerge(#[from] AutomergeError),
|
||||
#[error("path component {0} ({1}) should be an integer to index a sequence")]
|
||||
IndexNotInteger(usize, String),
|
||||
#[error("path component {0} ({1}) referenced a nonexistent object")]
|
||||
NonExistentObject(usize, String),
|
||||
#[error("path did not refer to an object")]
|
||||
NotAnObject,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("given property was not a string or integer")]
|
||||
pub struct InvalidProp;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("given property was not a string or integer")]
|
||||
pub struct InvalidValue;
|
||||
}
|
||||
|
|
|
@ -29,10 +29,11 @@
|
|||
use am::transaction::CommitOptions;
|
||||
use am::transaction::{Observed, Transactable, UnObserved};
|
||||
use automerge as am;
|
||||
use automerge::{Change, ObjId, ObjType, Prop, Value, ROOT};
|
||||
use automerge::{Change, ObjId, Prop, Value, ROOT};
|
||||
use js_sys::{Array, Function, Object, Uint8Array};
|
||||
use serde::ser::Serialize;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::convert::TryInto;
|
||||
use wasm_bindgen::prelude::*;
|
||||
use wasm_bindgen::JsCast;
|
||||
|
@ -73,6 +74,7 @@ pub struct Automerge {
|
|||
impl Automerge {
|
||||
pub fn new(actor: Option<String>) -> Result<Automerge, error::BadActorId> {
|
||||
let mut doc = AutoCommit::default();
|
||||
doc.set_utf16(true);
|
||||
if let Some(a) = actor {
|
||||
let a = automerge::ActorId::from(hex::decode(a)?.to_vec());
|
||||
doc.set_actor(a);
|
||||
|
@ -104,11 +106,12 @@ impl Automerge {
|
|||
heads: JsValue,
|
||||
) -> Result<Automerge, error::Fork> {
|
||||
let heads: Result<Vec<am::ChangeHash>, _> = JS(heads).try_into();
|
||||
let doc = if let Ok(heads) = heads {
|
||||
let mut doc = if let Ok(heads) = heads {
|
||||
self.doc.fork_at(&heads)?
|
||||
} else {
|
||||
self.doc.fork()
|
||||
};
|
||||
doc.set_utf16(true);
|
||||
let mut automerge = Automerge {
|
||||
doc,
|
||||
freeze: self.freeze,
|
||||
|
@ -188,7 +191,7 @@ impl Automerge {
|
|||
let start = start as usize;
|
||||
let delete_count = delete_count as usize;
|
||||
if let Some(t) = text.as_string() {
|
||||
if obj_type == ObjType::Text {
|
||||
if obj_type == am::ObjType::Text {
|
||||
self.doc.splice_text(&obj, start, delete_count, &t)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -202,9 +205,22 @@ impl Automerge {
|
|||
vals.push(value);
|
||||
}
|
||||
}
|
||||
Ok(self
|
||||
.doc
|
||||
.splice(&obj, start, delete_count, vals.into_iter())?)
|
||||
if !vals.is_empty() {
|
||||
self.doc.splice(&obj, start, delete_count, vals)?;
|
||||
} else {
|
||||
// no vals given but we still need to call the text vs splice
|
||||
// bc utf16
|
||||
match self.doc.object_type(&obj) {
|
||||
Some(am::ObjType::List) => {
|
||||
self.doc.splice(&obj, start, delete_count, vals)?;
|
||||
}
|
||||
Some(am::ObjType::Text) => {
|
||||
self.doc.splice_text(&obj, start, delete_count, "")?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn push(
|
||||
|
@ -229,11 +245,16 @@ impl Automerge {
|
|||
value: JsValue,
|
||||
) -> Result<Option<String>, error::InsertObject> {
|
||||
let (obj, _) = self.import(obj)?;
|
||||
let (value, subvals) =
|
||||
let (objtype, subvals) =
|
||||
to_objtype(&value, &None).ok_or(error::InsertObject::ValueNotObject)?;
|
||||
let index = self.doc.length(&obj);
|
||||
let opid = self.doc.insert_object(&obj, index, value)?;
|
||||
let opid = self.doc.insert_object(&obj, index, objtype)?;
|
||||
if objtype == am::ObjType::Text {
|
||||
self.doc
|
||||
.splice_text(&opid, 0, 0, &value.as_string().unwrap_or_default())?;
|
||||
} else {
|
||||
self.subset::<error::InsertObject>(&opid, subvals)?;
|
||||
}
|
||||
Ok(opid.to_string().into())
|
||||
}
|
||||
|
||||
|
@ -262,10 +283,15 @@ impl Automerge {
|
|||
) -> Result<Option<String>, error::InsertObject> {
|
||||
let (obj, _) = self.import(obj)?;
|
||||
let index = index as f64;
|
||||
let (value, subvals) =
|
||||
let (objtype, subvals) =
|
||||
to_objtype(&value, &None).ok_or(error::InsertObject::ValueNotObject)?;
|
||||
let opid = self.doc.insert_object(&obj, index as usize, value)?;
|
||||
let opid = self.doc.insert_object(&obj, index as usize, objtype)?;
|
||||
if objtype == am::ObjType::Text {
|
||||
self.doc
|
||||
.splice_text(&opid, 0, 0, &value.as_string().unwrap_or_default())?;
|
||||
} else {
|
||||
self.subset::<error::InsertObject>(&opid, subvals)?;
|
||||
}
|
||||
Ok(opid.to_string().into())
|
||||
}
|
||||
|
||||
|
@ -294,16 +320,23 @@ impl Automerge {
|
|||
) -> Result<JsValue, error::InsertObject> {
|
||||
let (obj, _) = self.import(obj)?;
|
||||
let prop = self.import_prop(prop)?;
|
||||
let (value, subvals) =
|
||||
let (objtype, subvals) =
|
||||
to_objtype(&value, &None).ok_or(error::InsertObject::ValueNotObject)?;
|
||||
let opid = self.doc.put_object(&obj, prop, value)?;
|
||||
let opid = self.doc.put_object(&obj, prop, objtype)?;
|
||||
if objtype == am::ObjType::Text {
|
||||
self.doc
|
||||
.splice_text(&opid, 0, 0, &value.as_string().unwrap_or_default())?;
|
||||
} else {
|
||||
self.subset::<error::InsertObject>(&opid, subvals)?;
|
||||
}
|
||||
Ok(opid.to_string().into())
|
||||
}
|
||||
|
||||
fn subset<E>(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), E>
|
||||
where
|
||||
E: From<automerge::AutomergeError> + From<error::ImportObj> + From<error::InvalidValue>,
|
||||
E: From<automerge::AutomergeError>
|
||||
+ From<interop::error::ImportObj>
|
||||
+ From<interop::error::InvalidValue>,
|
||||
{
|
||||
for (p, v) in vals {
|
||||
let (value, subvals) = self.import_value(&v, None)?;
|
||||
|
@ -498,17 +531,27 @@ impl Automerge {
|
|||
object = self.wrap_object(object, datatype, &id, &meta)?;
|
||||
}
|
||||
|
||||
for p in patches {
|
||||
if let Some(c) = &callback {
|
||||
let mut exposed = HashSet::default();
|
||||
|
||||
let before = object.clone();
|
||||
object = self.apply_patch(object, &p, 0, &meta)?;
|
||||
c.call3(&JsValue::undefined(), &p.try_into()?, &before, &object)
|
||||
|
||||
for p in &patches {
|
||||
object = self.apply_patch(object, p, 0, &meta, &mut exposed)?;
|
||||
}
|
||||
|
||||
if let Some(c) = &callback {
|
||||
if !patches.is_empty() {
|
||||
let patches: Array = patches
|
||||
.into_iter()
|
||||
.map(JsValue::try_from)
|
||||
.collect::<Result<_, _>>()?;
|
||||
c.call3(&JsValue::undefined(), &patches.into(), &before, &object)
|
||||
.map_err(error::ApplyPatch::PatchCallback)?;
|
||||
} else {
|
||||
object = self.apply_patch(object, &p, 0, &meta)?;
|
||||
}
|
||||
}
|
||||
|
||||
self.finalize_exposed(&object, exposed, &meta)?;
|
||||
|
||||
Ok(object.into())
|
||||
}
|
||||
|
||||
|
@ -673,145 +716,11 @@ impl Automerge {
|
|||
heads: Option<Array>,
|
||||
meta: JsValue,
|
||||
) -> Result<JsValue, error::Materialize> {
|
||||
let (obj, obj_type) = self.import(obj).unwrap_or((ROOT, ObjType::Map));
|
||||
let (obj, obj_type) = self.import(obj).unwrap_or((ROOT, am::ObjType::Map));
|
||||
let heads = get_heads(heads)?;
|
||||
let _patches = self.doc.observer().take_patches(); // throw away patches
|
||||
Ok(self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta)?)
|
||||
}
|
||||
|
||||
fn import(&self, id: JsValue) -> Result<(ObjId, ObjType), error::ImportObj> {
|
||||
if let Some(s) = id.as_string() {
|
||||
if let Some(components) = s.strip_prefix('/').map(|post| post.split('/')) {
|
||||
self.import_path(components)
|
||||
.map_err(|e| error::ImportObj::InvalidPath(s.to_string(), e))
|
||||
} else {
|
||||
let id = self.doc.import(&s).map_err(error::ImportObj::BadImport)?;
|
||||
// SAFETY: we just looked this up
|
||||
let obj_type = self.doc.object_type(&id).unwrap();
|
||||
Ok((id, obj_type))
|
||||
}
|
||||
} else {
|
||||
Err(error::ImportObj::NotString)
|
||||
}
|
||||
}
|
||||
|
||||
fn import_path<'a, I: Iterator<Item = &'a str>>(
|
||||
&self,
|
||||
components: I,
|
||||
) -> Result<(ObjId, ObjType), error::ImportPath> {
|
||||
let mut obj = ROOT;
|
||||
let mut obj_type = ObjType::Map;
|
||||
for (i, prop) in components.enumerate() {
|
||||
if prop.is_empty() {
|
||||
break;
|
||||
}
|
||||
let is_map = matches!(obj_type, ObjType::Map | ObjType::Table);
|
||||
let val = if is_map {
|
||||
self.doc.get(obj, prop)?
|
||||
} else {
|
||||
let idx = prop
|
||||
.parse()
|
||||
.map_err(|_| error::ImportPath::IndexNotInteger(i, prop.to_string()))?;
|
||||
self.doc.get(obj, am::Prop::Seq(idx))?
|
||||
};
|
||||
match val {
|
||||
Some((am::Value::Object(ObjType::Map), id)) => {
|
||||
obj_type = ObjType::Map;
|
||||
obj = id;
|
||||
}
|
||||
Some((am::Value::Object(ObjType::Table), id)) => {
|
||||
obj_type = ObjType::Table;
|
||||
obj = id;
|
||||
}
|
||||
Some((am::Value::Object(ObjType::List), id)) => {
|
||||
obj_type = ObjType::List;
|
||||
obj = id;
|
||||
}
|
||||
Some((am::Value::Object(ObjType::Text), id)) => {
|
||||
obj_type = ObjType::Text;
|
||||
obj = id;
|
||||
}
|
||||
None => return Err(error::ImportPath::NonExistentObject(i, prop.to_string())),
|
||||
_ => return Err(error::ImportPath::NotAnObject),
|
||||
};
|
||||
}
|
||||
Ok((obj, obj_type))
|
||||
}
|
||||
|
||||
fn import_prop(&self, prop: JsValue) -> Result<Prop, error::InvalidProp> {
|
||||
if let Some(s) = prop.as_string() {
|
||||
Ok(s.into())
|
||||
} else if let Some(n) = prop.as_f64() {
|
||||
Ok((n as usize).into())
|
||||
} else {
|
||||
Err(error::InvalidProp)
|
||||
}
|
||||
}
|
||||
|
||||
fn import_scalar(&self, value: &JsValue, datatype: &Option<String>) -> Option<am::ScalarValue> {
|
||||
match datatype.as_deref() {
|
||||
Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean),
|
||||
Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)),
|
||||
Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)),
|
||||
Some("str") => value.as_string().map(|v| am::ScalarValue::Str(v.into())),
|
||||
Some("f64") => value.as_f64().map(am::ScalarValue::F64),
|
||||
Some("bytes") => Some(am::ScalarValue::Bytes(
|
||||
value.clone().dyn_into::<Uint8Array>().unwrap().to_vec(),
|
||||
)),
|
||||
Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)),
|
||||
Some("timestamp") => {
|
||||
if let Some(v) = value.as_f64() {
|
||||
Some(am::ScalarValue::Timestamp(v as i64))
|
||||
} else if let Ok(d) = value.clone().dyn_into::<js_sys::Date>() {
|
||||
Some(am::ScalarValue::Timestamp(d.get_time() as i64))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Some("null") => Some(am::ScalarValue::Null),
|
||||
Some(_) => None,
|
||||
None => {
|
||||
if value.is_null() {
|
||||
Some(am::ScalarValue::Null)
|
||||
} else if let Some(b) = value.as_bool() {
|
||||
Some(am::ScalarValue::Boolean(b))
|
||||
} else if let Some(s) = value.as_string() {
|
||||
Some(am::ScalarValue::Str(s.into()))
|
||||
} else if let Some(n) = value.as_f64() {
|
||||
if (n.round() - n).abs() < f64::EPSILON {
|
||||
Some(am::ScalarValue::Int(n as i64))
|
||||
} else {
|
||||
Some(am::ScalarValue::F64(n))
|
||||
}
|
||||
} else if let Ok(d) = value.clone().dyn_into::<js_sys::Date>() {
|
||||
Some(am::ScalarValue::Timestamp(d.get_time() as i64))
|
||||
} else if let Ok(o) = &value.clone().dyn_into::<Uint8Array>() {
|
||||
Some(am::ScalarValue::Bytes(o.to_vec()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn import_value(
|
||||
&self,
|
||||
value: &JsValue,
|
||||
datatype: Option<String>,
|
||||
) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), error::InvalidValue> {
|
||||
match self.import_scalar(value, &datatype) {
|
||||
Some(val) => Ok((val.into(), vec![])),
|
||||
None => {
|
||||
if let Some((o, subvals)) = to_objtype(value, &datatype) {
|
||||
Ok((o.into(), subvals))
|
||||
} else {
|
||||
web_sys::console::log_2(&"Invalid value".into(), value);
|
||||
Err(error::InvalidValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = emptyChange)]
|
||||
pub fn empty_change(&mut self, message: Option<String>, time: Option<f64>) -> JsValue {
|
||||
let time = time.map(|f| f as i64);
|
||||
|
@ -832,6 +741,7 @@ pub fn load(data: Uint8Array, actor: Option<String>) -> Result<Automerge, error:
|
|||
let data = data.to_vec();
|
||||
let mut doc =
|
||||
am::AutoCommitWithObs::<UnObserved>::load(&data)?.with_observer(Observer::default());
|
||||
doc.set_utf16(true);
|
||||
if let Some(s) = actor {
|
||||
let actor =
|
||||
automerge::ActorId::from(hex::decode(s).map_err(error::BadActorId::from)?.to_vec());
|
||||
|
@ -972,44 +882,16 @@ pub mod error {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ImportPath {
|
||||
#[error(transparent)]
|
||||
Automerge(#[from] AutomergeError),
|
||||
#[error("path component {0} ({1}) should be an integer to index a sequence")]
|
||||
IndexNotInteger(usize, String),
|
||||
#[error("path component {0} ({1}) referenced a nonexistent object")]
|
||||
NonExistentObject(usize, String),
|
||||
#[error("path did not refer to an object")]
|
||||
NotAnObject,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ImportObj {
|
||||
#[error("obj id was not a string")]
|
||||
NotString,
|
||||
#[error("invalid path {0}: {1}")]
|
||||
InvalidPath(String, ImportPath),
|
||||
#[error("unable to import object id: {0}")]
|
||||
BadImport(AutomergeError),
|
||||
}
|
||||
|
||||
impl From<ImportObj> for JsValue {
|
||||
fn from(e: ImportObj) -> Self {
|
||||
JsValue::from(format!("invalid object ID: {}", e))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Get {
|
||||
#[error("invalid object ID: {0}")]
|
||||
ImportObj(#[from] ImportObj),
|
||||
ImportObj(#[from] interop::error::ImportObj),
|
||||
#[error(transparent)]
|
||||
Automerge(#[from] AutomergeError),
|
||||
#[error("bad heads: {0}")]
|
||||
BadHeads(#[from] interop::error::BadChangeHashes),
|
||||
#[error(transparent)]
|
||||
InvalidProp(#[from] InvalidProp),
|
||||
InvalidProp(#[from] interop::error::InvalidProp),
|
||||
}
|
||||
|
||||
impl From<Get> for JsValue {
|
||||
|
@ -1021,7 +903,7 @@ pub mod error {
|
|||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Splice {
|
||||
#[error("invalid object ID: {0}")]
|
||||
ImportObj(#[from] ImportObj),
|
||||
ImportObj(#[from] interop::error::ImportObj),
|
||||
#[error(transparent)]
|
||||
Automerge(#[from] AutomergeError),
|
||||
#[error("value at {0} in values to insert was not a primitive")]
|
||||
|
@ -1037,15 +919,15 @@ pub mod error {
|
|||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Insert {
|
||||
#[error("invalid object id: {0}")]
|
||||
ImportObj(#[from] ImportObj),
|
||||
ImportObj(#[from] interop::error::ImportObj),
|
||||
#[error("the value to insert was not a primitive")]
|
||||
ValueNotPrimitive,
|
||||
#[error(transparent)]
|
||||
Automerge(#[from] AutomergeError),
|
||||
#[error(transparent)]
|
||||
InvalidProp(#[from] InvalidProp),
|
||||
InvalidProp(#[from] interop::error::InvalidProp),
|
||||
#[error(transparent)]
|
||||
InvalidValue(#[from] InvalidValue),
|
||||
InvalidValue(#[from] interop::error::InvalidValue),
|
||||
}
|
||||
|
||||
impl From<Insert> for JsValue {
|
||||
|
@ -1057,15 +939,15 @@ pub mod error {
|
|||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum InsertObject {
|
||||
#[error("invalid object id: {0}")]
|
||||
ImportObj(#[from] ImportObj),
|
||||
ImportObj(#[from] interop::error::ImportObj),
|
||||
#[error("the value to insert must be an object")]
|
||||
ValueNotObject,
|
||||
#[error(transparent)]
|
||||
Automerge(#[from] AutomergeError),
|
||||
#[error(transparent)]
|
||||
InvalidProp(#[from] InvalidProp),
|
||||
InvalidProp(#[from] interop::error::InvalidProp),
|
||||
#[error(transparent)]
|
||||
InvalidValue(#[from] InvalidValue),
|
||||
InvalidValue(#[from] interop::error::InvalidValue),
|
||||
}
|
||||
|
||||
impl From<InsertObject> for JsValue {
|
||||
|
@ -1074,20 +956,12 @@ pub mod error {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("given property was not a string or integer")]
|
||||
pub struct InvalidProp;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("given property was not a string or integer")]
|
||||
pub struct InvalidValue;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Increment {
|
||||
#[error("invalid object id: {0}")]
|
||||
ImportObj(#[from] ImportObj),
|
||||
ImportObj(#[from] interop::error::ImportObj),
|
||||
#[error(transparent)]
|
||||
InvalidProp(#[from] InvalidProp),
|
||||
InvalidProp(#[from] interop::error::InvalidProp),
|
||||
#[error("value was not numeric")]
|
||||
ValueNotNumeric,
|
||||
#[error(transparent)]
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
#![allow(dead_code)]
|
||||
|
||||
use crate::interop::{self, alloc, js_set};
|
||||
use automerge::{ObjId, OpObserver, Parents, Prop, SequenceTree, Value};
|
||||
use automerge::{Automerge, ObjId, OpObserver, Prop, SequenceTree, Value};
|
||||
use js_sys::{Array, Object};
|
||||
use ropey::Rope;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
|
@ -32,14 +33,14 @@ pub(crate) enum Patch {
|
|||
path: Vec<(ObjId, Prop)>,
|
||||
key: String,
|
||||
value: (Value<'static>, ObjId),
|
||||
conflict: bool,
|
||||
expose: bool,
|
||||
},
|
||||
PutSeq {
|
||||
obj: ObjId,
|
||||
path: Vec<(ObjId, Prop)>,
|
||||
index: usize,
|
||||
value: (Value<'static>, ObjId),
|
||||
conflict: bool,
|
||||
expose: bool,
|
||||
},
|
||||
Insert {
|
||||
obj: ObjId,
|
||||
|
@ -47,6 +48,13 @@ pub(crate) enum Patch {
|
|||
index: usize,
|
||||
values: SequenceTree<(Value<'static>, ObjId)>,
|
||||
},
|
||||
SpliceText {
|
||||
obj: ObjId,
|
||||
path: Vec<(ObjId, Prop)>,
|
||||
index: (usize, usize),
|
||||
length: (usize, usize),
|
||||
value: Rope,
|
||||
},
|
||||
Increment {
|
||||
obj: ObjId,
|
||||
path: Vec<(ObjId, Prop)>,
|
||||
|
@ -69,7 +77,7 @@ pub(crate) enum Patch {
|
|||
impl OpObserver for Observer {
|
||||
fn insert(
|
||||
&mut self,
|
||||
mut parents: Parents<'_>,
|
||||
doc: &Automerge,
|
||||
obj: ObjId,
|
||||
index: usize,
|
||||
tagged_value: (Value<'_>, ObjId),
|
||||
|
@ -90,7 +98,7 @@ impl OpObserver for Observer {
|
|||
return;
|
||||
}
|
||||
}
|
||||
let path = parents.path();
|
||||
if let Some(path) = doc.parents(&obj).ok().and_then(|mut p| p.visible_path()) {
|
||||
let mut values = SequenceTree::new();
|
||||
values.push(value);
|
||||
let patch = Patch::Insert {
|
||||
|
@ -102,16 +110,111 @@ impl OpObserver for Observer {
|
|||
self.patches.push(patch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn delete(&mut self, mut parents: Parents<'_>, obj: ObjId, prop: Prop) {
|
||||
fn splice_text_utf16(
|
||||
&mut self,
|
||||
doc: &Automerge,
|
||||
obj: ObjId,
|
||||
(index8, index16): (usize, usize),
|
||||
(len8, len16): (usize, usize),
|
||||
value: &str,
|
||||
) {
|
||||
if self.enabled {
|
||||
assert!(len8 <= len16);
|
||||
assert!(index8 <= index16);
|
||||
if let Some(Patch::SpliceText {
|
||||
obj: tail_obj,
|
||||
index: (tail_index8, _),
|
||||
length: (tail_len8, tail_len16),
|
||||
value: prev_value,
|
||||
..
|
||||
}) = self.patches.last_mut()
|
||||
{
|
||||
let range = *tail_index8..=*tail_index8 + *tail_len8;
|
||||
if tail_obj == &obj && range.contains(&index8) {
|
||||
prev_value.insert(index8 - *tail_index8, value);
|
||||
*tail_len16 += len16;
|
||||
*tail_len8 += len8;
|
||||
return;
|
||||
}
|
||||
}
|
||||
if let Some(path) = doc.parents(&obj).ok().and_then(|mut p| p.visible_path()) {
|
||||
let value = Rope::from_str(value);
|
||||
assert!(len8 == value.len_chars());
|
||||
let patch = Patch::SpliceText {
|
||||
path,
|
||||
obj,
|
||||
index: (index8, index16),
|
||||
length: (len8, len16),
|
||||
value,
|
||||
};
|
||||
self.patches.push(patch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn delete_utf16(
|
||||
&mut self,
|
||||
doc: &Automerge,
|
||||
obj: ObjId,
|
||||
(index8, index16): (usize, usize),
|
||||
(len8, len16): (usize, usize),
|
||||
) {
|
||||
if self.enabled {
|
||||
assert!(len8 <= len16);
|
||||
assert!(index8 <= index16);
|
||||
match self.patches.last_mut() {
|
||||
Some(Patch::SpliceText {
|
||||
obj: tail_obj,
|
||||
index: (tail_index8, _),
|
||||
length: (tail_len8, tail_len16),
|
||||
value,
|
||||
..
|
||||
}) => {
|
||||
let range = *tail_index8..*tail_index8 + *tail_len8;
|
||||
if tail_obj == &obj && range.contains(&index8) {
|
||||
let start = index8 - *tail_index8;
|
||||
let end = start + len8;
|
||||
value.remove(start..end);
|
||||
*tail_len16 -= len8;
|
||||
*tail_len16 -= len16;
|
||||
return;
|
||||
}
|
||||
}
|
||||
Some(Patch::DeleteSeq {
|
||||
obj: tail_obj,
|
||||
index: tail_index,
|
||||
length: tail_length,
|
||||
..
|
||||
}) => {
|
||||
if tail_obj == &obj && index16 == *tail_index {
|
||||
*tail_length += len16;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if let Some(path) = doc.parents(&obj).ok().and_then(|mut p| p.visible_path()) {
|
||||
let patch = Patch::DeleteSeq {
|
||||
path,
|
||||
obj,
|
||||
index: index16,
|
||||
length: len16,
|
||||
};
|
||||
self.patches.push(patch)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn delete(&mut self, doc: &Automerge, obj: ObjId, prop: Prop) {
|
||||
if self.enabled {
|
||||
if let Some(Patch::Insert {
|
||||
obj: tail_obj,
|
||||
index: tail_index,
|
||||
values,
|
||||
..
|
||||
}) = self.patches.last_mut()
|
||||
{
|
||||
}) = self.patches.last_mut() {
|
||||
if let Prop::Seq(index) = prop {
|
||||
let range = *tail_index..*tail_index + values.len();
|
||||
if tail_obj == &obj && range.contains(&index) {
|
||||
|
@ -120,7 +223,7 @@ impl OpObserver for Observer {
|
|||
}
|
||||
}
|
||||
}
|
||||
let path = parents.path();
|
||||
if let Some(path) = doc.parents(&obj).ok().and_then(|mut p| p.visible_path()) {
|
||||
let patch = match prop {
|
||||
Prop::Map(key) => Patch::DeleteMap { path, obj, key },
|
||||
Prop::Seq(index) => Patch::DeleteSeq {
|
||||
|
@ -133,17 +236,19 @@ impl OpObserver for Observer {
|
|||
self.patches.push(patch)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn put(
|
||||
&mut self,
|
||||
mut parents: Parents<'_>,
|
||||
doc: &Automerge,
|
||||
obj: ObjId,
|
||||
prop: Prop,
|
||||
tagged_value: (Value<'_>, ObjId),
|
||||
conflict: bool,
|
||||
_conflict: bool,
|
||||
) {
|
||||
if self.enabled {
|
||||
let path = parents.path();
|
||||
let expose = false;
|
||||
if let Some(path) = doc.parents(&obj).ok().and_then(|mut p| p.visible_path()) {
|
||||
let value = (tagged_value.0.to_owned(), tagged_value.1);
|
||||
let patch = match prop {
|
||||
Prop::Map(key) => Patch::PutMap {
|
||||
|
@ -151,29 +256,59 @@ impl OpObserver for Observer {
|
|||
obj,
|
||||
key,
|
||||
value,
|
||||
conflict,
|
||||
expose,
|
||||
},
|
||||
Prop::Seq(index) => Patch::PutSeq {
|
||||
path,
|
||||
obj,
|
||||
index,
|
||||
value,
|
||||
conflict,
|
||||
expose,
|
||||
},
|
||||
};
|
||||
self.patches.push(patch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn increment(
|
||||
fn expose(
|
||||
&mut self,
|
||||
mut parents: Parents<'_>,
|
||||
doc: &Automerge,
|
||||
obj: ObjId,
|
||||
prop: Prop,
|
||||
tagged_value: (i64, ObjId),
|
||||
tagged_value: (Value<'_>, ObjId),
|
||||
_conflict: bool,
|
||||
) {
|
||||
if self.enabled {
|
||||
let path = parents.path();
|
||||
let expose = true;
|
||||
if let Some(path) = doc.parents(&obj).ok().and_then(|mut p| p.visible_path()) {
|
||||
let value = (tagged_value.0.to_owned(), tagged_value.1);
|
||||
let patch = match prop {
|
||||
Prop::Map(key) => Patch::PutMap {
|
||||
path,
|
||||
obj,
|
||||
key,
|
||||
value,
|
||||
expose,
|
||||
},
|
||||
Prop::Seq(index) => Patch::PutSeq {
|
||||
path,
|
||||
obj,
|
||||
index,
|
||||
value,
|
||||
expose,
|
||||
},
|
||||
};
|
||||
self.patches.push(patch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flag_conflict(&mut self, _doc: &Automerge, _obj: ObjId, _prop: Prop) {}
|
||||
|
||||
fn increment(&mut self, doc: &Automerge, obj: ObjId, prop: Prop, tagged_value: (i64, ObjId)) {
|
||||
if self.enabled {
|
||||
if let Some(path) = doc.parents(&obj).ok().and_then(|mut p| p.visible_path()) {
|
||||
let value = tagged_value.0;
|
||||
self.patches.push(Patch::Increment {
|
||||
path,
|
||||
|
@ -183,6 +318,7 @@ impl OpObserver for Observer {
|
|||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn merge(&mut self, other: &Self) {
|
||||
self.patches.extend_from_slice(other.patches.as_slice())
|
||||
|
@ -219,6 +355,7 @@ impl Patch {
|
|||
Self::PutSeq { path, .. } => path.as_slice(),
|
||||
Self::Increment { path, .. } => path.as_slice(),
|
||||
Self::Insert { path, .. } => path.as_slice(),
|
||||
Self::SpliceText { path, .. } => path.as_slice(),
|
||||
Self::DeleteMap { path, .. } => path.as_slice(),
|
||||
Self::DeleteSeq { path, .. } => path.as_slice(),
|
||||
}
|
||||
|
@ -230,6 +367,7 @@ impl Patch {
|
|||
Self::PutSeq { obj, .. } => obj,
|
||||
Self::Increment { obj, .. } => obj,
|
||||
Self::Insert { obj, .. } => obj,
|
||||
Self::SpliceText { obj, .. } => obj,
|
||||
Self::DeleteMap { obj, .. } => obj,
|
||||
Self::DeleteSeq { obj, .. } => obj,
|
||||
}
|
||||
|
@ -243,11 +381,7 @@ impl TryFrom<Patch> for JsValue {
|
|||
let result = Object::new();
|
||||
match p {
|
||||
Patch::PutMap {
|
||||
path,
|
||||
key,
|
||||
value,
|
||||
conflict,
|
||||
..
|
||||
path, key, value, ..
|
||||
} => {
|
||||
js_set(&result, "action", "put")?;
|
||||
js_set(
|
||||
|
@ -256,15 +390,10 @@ impl TryFrom<Patch> for JsValue {
|
|||
export_path(path.as_slice(), &Prop::Map(key)),
|
||||
)?;
|
||||
js_set(&result, "value", alloc(&value.0).1)?;
|
||||
js_set(&result, "conflict", &JsValue::from_bool(conflict))?;
|
||||
Ok(result.into())
|
||||
}
|
||||
Patch::PutSeq {
|
||||
path,
|
||||
index,
|
||||
value,
|
||||
conflict,
|
||||
..
|
||||
path, index, value, ..
|
||||
} => {
|
||||
js_set(&result, "action", "put")?;
|
||||
js_set(
|
||||
|
@ -273,7 +402,6 @@ impl TryFrom<Patch> for JsValue {
|
|||
export_path(path.as_slice(), &Prop::Seq(index)),
|
||||
)?;
|
||||
js_set(&result, "value", alloc(&value.0).1)?;
|
||||
js_set(&result, "conflict", &JsValue::from_bool(conflict))?;
|
||||
Ok(result.into())
|
||||
}
|
||||
Patch::Insert {
|
||||
|
@ -282,7 +410,7 @@ impl TryFrom<Patch> for JsValue {
|
|||
values,
|
||||
..
|
||||
} => {
|
||||
js_set(&result, "action", "splice")?;
|
||||
js_set(&result, "action", "insert")?;
|
||||
js_set(
|
||||
&result,
|
||||
"path",
|
||||
|
@ -295,6 +423,21 @@ impl TryFrom<Patch> for JsValue {
|
|||
)?;
|
||||
Ok(result.into())
|
||||
}
|
||||
Patch::SpliceText {
|
||||
path,
|
||||
index: (_, index16),
|
||||
value,
|
||||
..
|
||||
} => {
|
||||
js_set(&result, "action", "splice")?;
|
||||
js_set(
|
||||
&result,
|
||||
"path",
|
||||
export_path(path.as_slice(), &Prop::Seq(index16)),
|
||||
)?;
|
||||
js_set(&result, "value", value.to_string())?;
|
||||
Ok(result.into())
|
||||
}
|
||||
Patch::Increment {
|
||||
path, prop, value, ..
|
||||
} => {
|
||||
|
|
|
@ -20,10 +20,6 @@ pub(crate) enum Datatype {
|
|||
}
|
||||
|
||||
impl Datatype {
|
||||
pub(crate) fn is_sequence(&self) -> bool {
|
||||
matches!(self, Self::List | Self::Text)
|
||||
}
|
||||
|
||||
pub(crate) fn is_scalar(&self) -> bool {
|
||||
!matches!(self, Self::Map | Self::Table | Self::List | Self::Text)
|
||||
}
|
||||
|
|
|
@ -104,8 +104,8 @@ describe('Automerge', () => {
|
|||
doc1.putObject("/", "list", "abc");
|
||||
const patches = doc1.popPatches()
|
||||
assert.deepEqual( patches, [
|
||||
{ action: 'put', conflict: false, path: [ 'list' ], value: [] },
|
||||
{ action: 'splice', path: [ 'list', 0 ], values: [ 'a', 'b', 'c' ] }])
|
||||
{ action: 'put', path: [ 'list' ], value: "" },
|
||||
{ action: 'splice', path: [ 'list', 0 ], value: 'abc' }])
|
||||
})
|
||||
|
||||
it('it should allow registering type wrappers', () => {
|
||||
|
@ -140,29 +140,26 @@ describe('Automerge', () => {
|
|||
|
||||
let mat = doc1.materialize("/")
|
||||
|
||||
assert.deepEqual( mat, { notes: "hello world".split("") } )
|
||||
assert.deepEqual( mat, { notes: "hello world" } )
|
||||
|
||||
const doc2 = create()
|
||||
let apply : any = doc2.materialize("/")
|
||||
doc2.enablePatches(true)
|
||||
doc2.registerDatatype("text", (n: Value[]) => new String(n.join("")))
|
||||
apply = doc2.applyPatches(apply)
|
||||
|
||||
doc2.merge(doc1);
|
||||
apply = doc2.applyPatches(apply)
|
||||
assert.deepEqual(_obj(apply), "_root")
|
||||
assert.deepEqual(_obj(apply['notes']), "1@aaaa")
|
||||
assert.deepEqual( apply, { notes: new String("hello world") } )
|
||||
assert.deepEqual( apply, { notes: "hello world" } )
|
||||
|
||||
doc2.splice("/notes", 6, 5, "everyone");
|
||||
apply = doc2.applyPatches(apply)
|
||||
assert.deepEqual( apply, { notes: new String("hello everyone") } )
|
||||
assert.deepEqual( apply, { notes: "hello everyone" } )
|
||||
|
||||
mat = doc2.materialize("/")
|
||||
assert.deepEqual(_obj(mat), "_root")
|
||||
// @ts-ignore
|
||||
assert.deepEqual(_obj(mat.notes), "1@aaaa")
|
||||
assert.deepEqual( mat, { notes: new String("hello everyone") } )
|
||||
assert.deepEqual( mat, { notes: "hello everyone" } )
|
||||
})
|
||||
|
||||
it('should set the OBJECT_ID property on lists, maps, and text objects and not on scalars', () => {
|
||||
|
@ -189,8 +186,8 @@ describe('Automerge', () => {
|
|||
assert.equal(_obj(applied.bytes), null)
|
||||
assert.equal(_obj(applied.counter), null)
|
||||
assert.equal(_obj(applied.date), null)
|
||||
assert.equal(_obj(applied.text), null)
|
||||
|
||||
assert.notEqual(_obj(applied.text), null)
|
||||
assert.notEqual(_obj(applied.list), null)
|
||||
assert.notEqual(_obj(applied.map), null)
|
||||
})
|
||||
|
|
|
@ -118,12 +118,6 @@ describe('Automerge', () => {
|
|||
doc.splice(notes, 6, 5, "everyone")
|
||||
|
||||
assert.deepEqual(doc.text(notes), "Hello everyone")
|
||||
|
||||
const obj = doc.insertObject(notes, 6, { hi: "there" })
|
||||
|
||||
assert.deepEqual(doc.text(notes), "Hello \ufffceveryone")
|
||||
assert.deepEqual(doc.get(notes, 6), obj)
|
||||
assert.deepEqual(doc.get(obj, "hi"), "there")
|
||||
})
|
||||
it('Querying Data (1)', () => {
|
||||
const doc1 = create("aabbcc")
|
||||
|
|
|
@ -222,8 +222,8 @@ describe('Automerge', () => {
|
|||
|
||||
const text = doc.putObject(root, "text", "");
|
||||
doc.splice(text, 0, 0, "hello ")
|
||||
doc.splice(text, 6, 0, ["w", "o", "r", "l", "d"])
|
||||
doc.splice(text, 11, 0, ["!", "?"])
|
||||
doc.splice(text, 6, 0, "world")
|
||||
doc.splice(text, 11, 0, "!?")
|
||||
assert.deepEqual(doc.getWithType(text, 0), ["str", "h"])
|
||||
assert.deepEqual(doc.getWithType(text, 1), ["str", "e"])
|
||||
assert.deepEqual(doc.getWithType(text, 9), ["str", "l"])
|
||||
|
@ -232,13 +232,12 @@ describe('Automerge', () => {
|
|||
assert.deepEqual(doc.getWithType(text, 12), ["str", "?"])
|
||||
})
|
||||
|
||||
it('should be able to insert objects into text', () => {
|
||||
it('should NOT be able to insert objects into text', () => {
|
||||
const doc = create()
|
||||
const text = doc.putObject("/", "text", "Hello world");
|
||||
const obj = doc.insertObject(text, 6, { hello: "world" });
|
||||
assert.deepEqual(doc.text(text), "Hello \ufffcworld");
|
||||
assert.deepEqual(doc.getWithType(text, 6), ["map", obj]);
|
||||
assert.deepEqual(doc.getWithType(obj, "hello"), ["str", "world"]);
|
||||
assert.throws(() => {
|
||||
doc.insertObject(text, 6, { hello: "world" });
|
||||
})
|
||||
})
|
||||
|
||||
it('should be able save all or incrementally', () => {
|
||||
|
@ -374,7 +373,6 @@ describe('Automerge', () => {
|
|||
|
||||
it('recursive sets are possible', () => {
|
||||
const doc = create("aaaa")
|
||||
doc.registerDatatype("text", (n: Value[]) => new String(n.join("")))
|
||||
const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]])
|
||||
const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] })
|
||||
doc.putObject("_root", "info1", "hello world") // 'text' object
|
||||
|
@ -382,13 +380,13 @@ describe('Automerge', () => {
|
|||
const l4 = doc.putObject("_root", "info3", "hello world")
|
||||
assert.deepEqual(doc.materialize(), {
|
||||
"list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]],
|
||||
"info1": new String("hello world"),
|
||||
"info1": "hello world",
|
||||
"info2": "hello world",
|
||||
"info3": new String("hello world"),
|
||||
"info3": "hello world",
|
||||
})
|
||||
assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] })
|
||||
assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]])
|
||||
assert.deepEqual(doc.materialize(l4), new String("hello world"))
|
||||
assert.deepEqual(doc.materialize(l4), "hello world")
|
||||
})
|
||||
|
||||
it('only returns an object id when objects are created', () => {
|
||||
|
@ -477,7 +475,7 @@ describe('Automerge', () => {
|
|||
doc2.enablePatches(true)
|
||||
doc2.loadIncremental(doc1.saveIncremental())
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: ['hello'], value: 'world', conflict: false }
|
||||
{ action: 'put', path: ['hello'], value: 'world' }
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -487,9 +485,9 @@ describe('Automerge', () => {
|
|||
doc2.enablePatches(true)
|
||||
doc2.loadIncremental(doc1.saveIncremental())
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: [ 'birds' ], value: {}, conflict: false },
|
||||
{ action: 'put', path: [ 'birds', 'friday' ], value: {}, conflict: false },
|
||||
{ action: 'put', path: [ 'birds', 'friday', 'robins' ], value: 3, conflict: false},
|
||||
{ action: 'put', path: [ 'birds' ], value: {} },
|
||||
{ action: 'put', path: [ 'birds', 'friday' ], value: {} },
|
||||
{ action: 'put', path: [ 'birds', 'friday', 'robins' ], value: 3},
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -501,7 +499,7 @@ describe('Automerge', () => {
|
|||
doc1.delete('_root', 'favouriteBird')
|
||||
doc2.loadIncremental(doc1.saveIncremental())
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: [ 'favouriteBird' ], value: 'Robin', conflict: false },
|
||||
{ action: 'put', path: [ 'favouriteBird' ], value: 'Robin' },
|
||||
{ action: 'del', path: [ 'favouriteBird' ] }
|
||||
])
|
||||
})
|
||||
|
@ -512,8 +510,8 @@ describe('Automerge', () => {
|
|||
doc2.enablePatches(true)
|
||||
doc2.loadIncremental(doc1.saveIncremental())
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: [ 'birds' ], value: [], conflict: false },
|
||||
{ action: 'splice', path: [ 'birds', 0 ], values: ['Goldfinch', 'Chaffinch'] },
|
||||
{ action: 'put', path: [ 'birds' ], value: [] },
|
||||
{ action: 'insert', path: [ 'birds', 0 ], values: ['Goldfinch', 'Chaffinch'] },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -525,9 +523,9 @@ describe('Automerge', () => {
|
|||
doc2.enablePatches(true)
|
||||
doc2.loadIncremental(doc1.saveIncremental())
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'splice', path: [ 'birds', 0 ], values: [{}] },
|
||||
{ action: 'put', path: [ 'birds', 0, 'species' ], value: 'Goldfinch', conflict: false },
|
||||
{ action: 'put', path: [ 'birds', 0, 'count', ], value: 3, conflict: false }
|
||||
{ action: 'insert', path: [ 'birds', 0 ], values: [{}] },
|
||||
{ action: 'put', path: [ 'birds', 0, 'species' ], value: 'Goldfinch' },
|
||||
{ action: 'put', path: [ 'birds', 0, 'count', ], value: 3 }
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -543,7 +541,7 @@ describe('Automerge', () => {
|
|||
assert.deepEqual(doc1.getWithType('1@aaaa', 1), ['str', 'Greenfinch'])
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'del', path: ['birds', 0] },
|
||||
{ action: 'splice', path: ['birds', 1], values: ['Greenfinch'] }
|
||||
{ action: 'insert', path: ['birds', 1], values: ['Greenfinch'] }
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -566,10 +564,10 @@ describe('Automerge', () => {
|
|||
assert.deepEqual([0, 1, 2, 3].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd'])
|
||||
assert.deepEqual([0, 1, 2, 3].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd'])
|
||||
assert.deepEqual(doc3.popPatches(), [
|
||||
{ action: 'splice', path: ['values', 0], values:['a','b','c','d'] },
|
||||
{ action: 'insert', path: ['values', 0], values:['a','b','c','d'] },
|
||||
])
|
||||
assert.deepEqual(doc4.popPatches(), [
|
||||
{ action: 'splice', path: ['values',0], values:['a','b','c','d'] },
|
||||
{ action: 'insert', path: ['values',0], values:['a','b','c','d'] },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -592,10 +590,10 @@ describe('Automerge', () => {
|
|||
assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f'])
|
||||
assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f'])
|
||||
assert.deepEqual(doc3.popPatches(), [
|
||||
{ action: 'splice', path: ['values', 2], values: ['c','d','e','f'] },
|
||||
{ action: 'insert', path: ['values', 2], values: ['c','d','e','f'] },
|
||||
])
|
||||
assert.deepEqual(doc4.popPatches(), [
|
||||
{ action: 'splice', path: ['values', 2], values: ['c','d','e','f'] },
|
||||
{ action: 'insert', path: ['values', 2], values: ['c','d','e','f'] },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -613,12 +611,11 @@ describe('Automerge', () => {
|
|||
assert.deepEqual(doc4.getWithType('_root', 'bird'), ['str', 'Goldfinch'])
|
||||
assert.deepEqual(doc4.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']])
|
||||
assert.deepEqual(doc3.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Greenfinch', conflict: false },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true },
|
||||
{ action: 'put', path: ['bird'], value: 'Greenfinch' },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch' },
|
||||
])
|
||||
assert.deepEqual(doc4.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch' },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -647,17 +644,13 @@ describe('Automerge', () => {
|
|||
['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc']
|
||||
])
|
||||
assert.deepEqual(doc1.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Chaffinch', conflict: true },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }
|
||||
{ action: 'put', path: ['bird'], value: 'Chaffinch' },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch' }
|
||||
])
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }
|
||||
])
|
||||
assert.deepEqual(doc3.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch' },
|
||||
])
|
||||
assert.deepEqual(doc3.popPatches(), [ ])
|
||||
})
|
||||
|
||||
it('should allow a conflict to be resolved', () => {
|
||||
|
@ -672,9 +665,9 @@ describe('Automerge', () => {
|
|||
doc3.loadIncremental(doc1.saveIncremental())
|
||||
assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']])
|
||||
assert.deepEqual(doc3.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Greenfinch', conflict: false },
|
||||
{ action: 'put', path: ['bird'], value: 'Chaffinch', conflict: true },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false }
|
||||
{ action: 'put', path: ['bird'], value: 'Greenfinch' },
|
||||
{ action: 'put', path: ['bird'], value: 'Chaffinch' },
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch' }
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -694,10 +687,10 @@ describe('Automerge', () => {
|
|||
assert.deepEqual(doc2.getWithType('_root', 'bird'), ['str', 'Goldfinch'])
|
||||
assert.deepEqual(doc2.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']])
|
||||
assert.deepEqual(doc1.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false }
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch' }
|
||||
])
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false }
|
||||
{ action: 'put', path: ['bird'], value: 'Goldfinch' }
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -720,12 +713,11 @@ describe('Automerge', () => {
|
|||
assert.deepEqual(doc4.getWithType('1@aaaa', 0), ['str', 'Redwing'])
|
||||
assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']])
|
||||
assert.deepEqual(doc3.popPatches(), [
|
||||
{ action: 'put', path: ['birds',0], value: 'Song Thrush', conflict: false },
|
||||
{ action: 'put', path: ['birds',0], value: 'Redwing', conflict: true }
|
||||
{ action: 'put', path: ['birds',0], value: 'Song Thrush' },
|
||||
{ action: 'put', path: ['birds',0], value: 'Redwing' }
|
||||
])
|
||||
assert.deepEqual(doc4.popPatches(), [
|
||||
{ action: 'put', path: ['birds',0], value: 'Redwing', conflict: false },
|
||||
{ action: 'put', path: ['birds',0], value: 'Redwing', conflict: true }
|
||||
{ action: 'put', path: ['birds',0], value: 'Redwing' },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -751,15 +743,14 @@ describe('Automerge', () => {
|
|||
assert.deepEqual(doc4.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']])
|
||||
assert.deepEqual(doc3.popPatches(), [
|
||||
{ action: 'del', path: ['birds',0], },
|
||||
{ action: 'put', path: ['birds',1], value: 'Song Thrush', conflict: false },
|
||||
{ action: 'splice', path: ['birds',0], values: ['Ring-necked parakeet'] },
|
||||
{ action: 'put', path: ['birds',2], value: 'Redwing', conflict: true }
|
||||
{ action: 'put', path: ['birds',1], value: 'Song Thrush' },
|
||||
{ action: 'insert', path: ['birds',0], values: ['Ring-necked parakeet'] },
|
||||
{ action: 'put', path: ['birds',2], value: 'Redwing' }
|
||||
])
|
||||
assert.deepEqual(doc4.popPatches(), [
|
||||
{ action: 'put', path: ['birds',0], value: 'Ring-necked parakeet', conflict: false },
|
||||
{ action: 'put', path: ['birds',2], value: 'Redwing', conflict: false },
|
||||
{ action: 'put', path: ['birds',0], value: 'Ring-necked parakeet', conflict: false },
|
||||
{ action: 'put', path: ['birds',2], value: 'Redwing', conflict: true }
|
||||
{ action: 'put', path: ['birds',0], value: 'Ring-necked parakeet' },
|
||||
{ action: 'put', path: ['birds',2], value: 'Redwing' },
|
||||
{ action: 'put', path: ['birds',0], value: 'Ring-necked parakeet' },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -775,14 +766,14 @@ describe('Automerge', () => {
|
|||
doc3.loadIncremental(change2)
|
||||
assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']])
|
||||
assert.deepEqual(doc3.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Robin', conflict: false },
|
||||
{ action: 'put', path: ['bird'], value: 'Wren', conflict: true }
|
||||
{ action: 'put', path: ['bird'], value: 'Robin' },
|
||||
{ action: 'put', path: ['bird'], value: 'Wren' }
|
||||
])
|
||||
doc3.loadIncremental(change3)
|
||||
assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Robin'])
|
||||
assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa']])
|
||||
assert.deepEqual(doc3.popPatches(), [
|
||||
{ action: 'put', path: ['bird'], value: 'Robin', conflict: false }
|
||||
{ action: 'put', path: ['bird'], value: 'Robin' }
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -797,14 +788,11 @@ describe('Automerge', () => {
|
|||
doc2.loadIncremental(change1)
|
||||
assert.deepEqual(doc1.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']])
|
||||
assert.deepEqual(doc1.popPatches(), [
|
||||
{ action: 'put', path: ['birds'], value: {}, conflict: true },
|
||||
{ action: 'put', path: ['birds', 'Sparrowhawk'], value: 1, conflict: false }
|
||||
{ action: 'put', path: ['birds'], value: {} },
|
||||
{ action: 'put', path: ['birds', 'Sparrowhawk'], value: 1 }
|
||||
])
|
||||
assert.deepEqual(doc2.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']])
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: ['birds'], value: {}, conflict: true },
|
||||
{ action: 'splice', path: ['birds',0], values: ['Parakeet'] }
|
||||
])
|
||||
assert.deepEqual(doc2.popPatches(), [])
|
||||
})
|
||||
|
||||
it('should support date objects', () => {
|
||||
|
@ -814,7 +802,7 @@ describe('Automerge', () => {
|
|||
doc2.loadIncremental(doc1.saveIncremental())
|
||||
assert.deepEqual(doc2.getWithType('_root', 'createdAt'), ['timestamp', now])
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: ['createdAt'], value: now, conflict: false }
|
||||
{ action: 'put', path: ['createdAt'], value: now }
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -828,11 +816,11 @@ describe('Automerge', () => {
|
|||
doc1.putObject('_root', 'list', [])
|
||||
|
||||
assert.deepEqual(doc1.popPatches(), [
|
||||
{ action: 'put', path: ['key1'], value: 1, conflict: false },
|
||||
{ action: 'put', path: ['key1'], value: 2, conflict: false },
|
||||
{ action: 'put', path: ['key2'], value: 3, conflict: false },
|
||||
{ action: 'put', path: ['map'], value: {}, conflict: false },
|
||||
{ action: 'put', path: ['list'], value: [], conflict: false },
|
||||
{ action: 'put', path: ['key1'], value: 1 },
|
||||
{ action: 'put', path: ['key1'], value: 2 },
|
||||
{ action: 'put', path: ['key2'], value: 3 },
|
||||
{ action: 'put', path: ['map'], value: {} },
|
||||
{ action: 'put', path: ['list'], value: [] },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -847,8 +835,8 @@ describe('Automerge', () => {
|
|||
doc1.insertObject(list, 2, [])
|
||||
|
||||
assert.deepEqual(doc1.popPatches(), [
|
||||
{ action: 'put', path: ['list'], value: [], conflict: false },
|
||||
{ action: 'splice', path: ['list', 0], values: [2,1,[],{},3] },
|
||||
{ action: 'put', path: ['list'], value: [] },
|
||||
{ action: 'insert', path: ['list', 0], values: [2,1,[],{},3] },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -861,8 +849,8 @@ describe('Automerge', () => {
|
|||
doc1.pushObject(list, [])
|
||||
|
||||
assert.deepEqual(doc1.popPatches(), [
|
||||
{ action: 'put', path: ['list'], value: [], conflict: false },
|
||||
{ action: 'splice', path: ['list',0], values: [1,{},[]] },
|
||||
{ action: 'put', path: ['list'], value: [] },
|
||||
{ action: 'insert', path: ['list',0], values: [1,{},[]] },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -874,8 +862,8 @@ describe('Automerge', () => {
|
|||
doc1.splice(list, 1, 2)
|
||||
|
||||
assert.deepEqual(doc1.popPatches(), [
|
||||
{ action: 'put', path: ['list'], value: [], conflict: false },
|
||||
{ action: 'splice', path: ['list',0], values: [1,4] },
|
||||
{ action: 'put', path: ['list'], value: [] },
|
||||
{ action: 'insert', path: ['list',0], values: [1,4] },
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -886,7 +874,7 @@ describe('Automerge', () => {
|
|||
doc1.increment('_root', 'counter', 4)
|
||||
|
||||
assert.deepEqual(doc1.popPatches(), [
|
||||
{ action: 'put', path: ['counter'], value: 2, conflict: false },
|
||||
{ action: 'put', path: ['counter'], value: 2 },
|
||||
{ action: 'inc', path: ['counter'], value: 4 },
|
||||
])
|
||||
})
|
||||
|
@ -900,8 +888,8 @@ describe('Automerge', () => {
|
|||
doc1.delete('_root', 'key1')
|
||||
doc1.delete('_root', 'key2')
|
||||
assert.deepEqual(doc1.popPatches(), [
|
||||
{ action: 'put', path: ['key1'], value: 1, conflict: false },
|
||||
{ action: 'put', path: ['key2'], value: 2, conflict: false },
|
||||
{ action: 'put', path: ['key1'], value: 1 },
|
||||
{ action: 'put', path: ['key2'], value: 2 },
|
||||
{ action: 'del', path: ['key1'], },
|
||||
{ action: 'del', path: ['key2'], },
|
||||
])
|
||||
|
@ -916,7 +904,7 @@ describe('Automerge', () => {
|
|||
doc2.loadIncremental(doc1.saveIncremental())
|
||||
assert.deepEqual(doc2.getWithType('_root', 'starlings'), ['counter', 3])
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: ['starlings'], value: 2, conflict: false },
|
||||
{ action: 'put', path: ['starlings'], value: 2 },
|
||||
{ action: 'inc', path: ['starlings'], value: 1 }
|
||||
])
|
||||
})
|
||||
|
@ -934,8 +922,8 @@ describe('Automerge', () => {
|
|||
doc2.loadIncremental(doc1.saveIncremental())
|
||||
|
||||
assert.deepEqual(doc2.popPatches(), [
|
||||
{ action: 'put', path: ['list'], value: [], conflict: false },
|
||||
{ action: 'splice', path: ['list',0], values: [1] },
|
||||
{ action: 'put', path: ['list'], value: [] },
|
||||
{ action: 'insert', path: ['list',0], values: [1] },
|
||||
{ action: 'inc', path: ['list',0], value: 2 },
|
||||
{ action: 'inc', path: ['list',0], value: -5 },
|
||||
])
|
||||
|
@ -1940,5 +1928,54 @@ describe('Automerge', () => {
|
|||
assert.deepStrictEqual(s1.sharedHeads, [c2, c8].sort())
|
||||
})
|
||||
})
|
||||
|
||||
it('can handle utf16 text', () => {
|
||||
const doc = create()
|
||||
doc.enablePatches(true)
|
||||
let mat : any = doc.materialize("/")
|
||||
|
||||
doc.putObject("/", "width1", "AAAAAA")
|
||||
doc.putObject("/", "width2", "🐻🐻🐻🐻🐻🐻")
|
||||
doc.putObject("/", "mixed", "A🐻A🐻A🐻")
|
||||
mat = doc.applyPatches(mat)
|
||||
|
||||
const remote = load(doc.save())
|
||||
remote.enablePatches(true)
|
||||
let r_mat : any = remote.materialize("/")
|
||||
|
||||
assert.deepEqual(mat, { width1: "AAAAAA", width2: "🐻🐻🐻🐻🐻🐻", mixed: "A🐻A🐻A🐻" })
|
||||
assert.deepEqual(mat.width1.slice(2,4), "AA")
|
||||
assert.deepEqual(mat.width2.slice(2,4), "🐻")
|
||||
assert.deepEqual(mat.mixed.slice(1,4), "🐻A")
|
||||
|
||||
assert.deepEqual(r_mat, { width1: "AAAAAA", width2: "🐻🐻🐻🐻🐻🐻", mixed: "A🐻A🐻A🐻" })
|
||||
assert.deepEqual(r_mat.width1.slice(2,4), "AA")
|
||||
assert.deepEqual(r_mat.width2.slice(2,4), "🐻")
|
||||
assert.deepEqual(r_mat.mixed.slice(1,4), "🐻A")
|
||||
|
||||
doc.splice("/width1", 2, 2, "🐻")
|
||||
doc.splice("/width2", 2, 2, "A🐻A")
|
||||
doc.splice("/mixed", 3, 3, "X")
|
||||
|
||||
mat = doc.applyPatches(mat)
|
||||
remote.loadIncremental(doc.saveIncremental());
|
||||
r_mat = remote.applyPatches(r_mat)
|
||||
|
||||
assert.deepEqual(mat.width1, "AA🐻AA")
|
||||
assert.deepEqual(mat.width2, "🐻A🐻A🐻🐻🐻🐻")
|
||||
assert.deepEqual(mat.mixed, "A🐻XA🐻")
|
||||
|
||||
assert.deepEqual(r_mat.width1, "AA🐻AA")
|
||||
assert.deepEqual(r_mat.width2, "🐻A🐻A🐻🐻🐻🐻")
|
||||
assert.deepEqual(r_mat.mixed, "A🐻XA🐻")
|
||||
|
||||
// when indexing in the middle of a multibyte char it indexes at the char before
|
||||
doc.splice("/width2", 4, 1, "X")
|
||||
mat = doc.applyPatches(mat)
|
||||
remote.loadIncremental(doc.saveIncremental());
|
||||
r_mat = remote.applyPatches(r_mat)
|
||||
|
||||
assert.deepEqual(mat.width2, "🐻AXA🐻🐻🐻🐻")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -66,6 +66,17 @@ fn get_changes(doc: &Automerge, patches: Vec<Patch>) {
|
|||
doc.path_to_object(&obj)
|
||||
)
|
||||
}
|
||||
Patch::Splice {
|
||||
obj, index, value, ..
|
||||
} => {
|
||||
println!(
|
||||
"splice '{:?}' at {:?} in obj {:?}, object path {:?}",
|
||||
value,
|
||||
index,
|
||||
obj,
|
||||
doc.path_to_object(&obj)
|
||||
)
|
||||
}
|
||||
Patch::Increment {
|
||||
obj, prop, value, ..
|
||||
} => {
|
||||
|
@ -83,6 +94,12 @@ fn get_changes(doc: &Automerge, patches: Vec<Patch>) {
|
|||
obj,
|
||||
doc.path_to_object(&obj)
|
||||
),
|
||||
Patch::Expose { obj, prop, .. } => println!(
|
||||
"expose {:?} in obj {:?}, object path {:?}",
|
||||
prop,
|
||||
obj,
|
||||
doc.path_to_object(&obj)
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -125,6 +125,10 @@ impl<Obs: Observation> AutoCommitWithObs<Obs> {
|
|||
self.doc.get_actor()
|
||||
}
|
||||
|
||||
pub fn set_utf16(&mut self, enabled: bool) {
|
||||
self.doc.utf16 = enabled
|
||||
}
|
||||
|
||||
fn ensure_transaction_open(&mut self) {
|
||||
if self.transaction.is_none() {
|
||||
let args = self.doc.transaction_args();
|
||||
|
@ -491,6 +495,25 @@ impl<Obs: Observation> Transactable for AutoCommitWithObs<Obs> {
|
|||
)
|
||||
}
|
||||
|
||||
fn splice_text<O: AsRef<ExId>>(
|
||||
&mut self,
|
||||
obj: O,
|
||||
pos: usize,
|
||||
del: usize,
|
||||
text: &str,
|
||||
) -> Result<(), AutomergeError> {
|
||||
self.ensure_transaction_open();
|
||||
let (current, tx) = self.transaction.as_mut().unwrap();
|
||||
tx.splice_text(
|
||||
&mut self.doc,
|
||||
current.observer(),
|
||||
obj.as_ref(),
|
||||
pos,
|
||||
del,
|
||||
text,
|
||||
)
|
||||
}
|
||||
|
||||
fn text<O: AsRef<ExId>>(&self, obj: O) -> Result<String, AutomergeError> {
|
||||
self.doc.text(obj)
|
||||
}
|
||||
|
|
|
@ -58,6 +58,7 @@ pub struct Automerge {
|
|||
pub(crate) actor: Actor,
|
||||
/// The maximum operation counter this document has seen.
|
||||
pub(crate) max_op: u64,
|
||||
pub utf16: bool,
|
||||
}
|
||||
|
||||
impl Automerge {
|
||||
|
@ -74,6 +75,7 @@ impl Automerge {
|
|||
saved: Default::default(),
|
||||
actor: Actor::Unused(ActorId::random()),
|
||||
max_op: 0,
|
||||
utf16: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -322,9 +324,7 @@ impl Automerge {
|
|||
&self,
|
||||
obj: O,
|
||||
) -> Result<Vec<(ExId, Prop)>, AutomergeError> {
|
||||
let mut path = self.parents(obj.as_ref().clone())?.collect::<Vec<_>>();
|
||||
path.reverse();
|
||||
Ok(path)
|
||||
Ok(self.parents(obj.as_ref().clone())?.path())
|
||||
}
|
||||
|
||||
/// Get the keys of the object `obj`.
|
||||
|
@ -513,11 +513,7 @@ impl Automerge {
|
|||
let query = self.ops.search(&obj, query::ListVals::new());
|
||||
let mut buffer = String::new();
|
||||
for q in &query.ops {
|
||||
if let OpType::Put(ScalarValue::Str(s)) = &q.action {
|
||||
buffer.push_str(s);
|
||||
} else {
|
||||
buffer.push('\u{fffc}');
|
||||
}
|
||||
buffer.push_str(q.to_str());
|
||||
}
|
||||
Ok(buffer)
|
||||
}
|
||||
|
@ -593,7 +589,7 @@ impl Automerge {
|
|||
}
|
||||
Prop::Seq(n) => self
|
||||
.ops
|
||||
.search(&obj, query::Nth::new(n))
|
||||
.search(&obj, query::Nth::new(n, false))
|
||||
.ops
|
||||
.into_iter()
|
||||
.map(|o| (o.value(), self.id_to_exid(o.id)))
|
||||
|
@ -696,6 +692,7 @@ impl Automerge {
|
|||
saved: Default::default(),
|
||||
actor: Actor::Unused(ActorId::random()),
|
||||
max_op,
|
||||
utf16: false,
|
||||
}
|
||||
}
|
||||
storage::Chunk::Change(stored_change) => {
|
||||
|
@ -806,11 +803,11 @@ impl Automerge {
|
|||
self.update_history(change, ops.len());
|
||||
if let Some(observer) = observer {
|
||||
for (obj, op) in ops {
|
||||
self.ops.insert_op_with_observer(&obj, op, *observer);
|
||||
self.insert_op_with_observer(&obj, op, *observer);
|
||||
}
|
||||
} else {
|
||||
for (obj, op) in ops {
|
||||
self.ops.insert_op(&obj, op);
|
||||
self.insert_op(&obj, op);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1242,6 +1239,118 @@ impl Automerge {
|
|||
objects.map(|os| os.iter().filter_map(|o| self.exid_to_obj(o).ok()).collect());
|
||||
self.ops.visualise(objects)
|
||||
}
|
||||
|
||||
pub(crate) fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op {
|
||||
let q = self.ops.search(obj, query::SeekOp::new(&op));
|
||||
|
||||
let succ = q.succ;
|
||||
let pos = q.pos;
|
||||
|
||||
self.ops.add_succ(obj, &succ, &op);
|
||||
|
||||
if !op.is_delete() {
|
||||
self.ops.insert(pos, obj, op.clone());
|
||||
}
|
||||
op
|
||||
}
|
||||
|
||||
pub(crate) fn insert_op_with_observer<Obs: OpObserver>(
|
||||
&mut self,
|
||||
obj: &ObjId,
|
||||
op: Op,
|
||||
observer: &mut Obs,
|
||||
) -> Op {
|
||||
let obj_type = self.ops.object_type(obj);
|
||||
let utf16 = self.utf16 && matches!(obj_type, Some(ObjType::Text));
|
||||
let q = self
|
||||
.ops
|
||||
.search(obj, query::SeekOpWithPatch::new(&op, utf16));
|
||||
|
||||
let query::SeekOpWithPatch {
|
||||
pos,
|
||||
succ,
|
||||
seen,
|
||||
seen8,
|
||||
last_width,
|
||||
values,
|
||||
had_value_before,
|
||||
..
|
||||
} = q;
|
||||
|
||||
let ex_obj = self.ops.id_to_exid(obj.0);
|
||||
|
||||
let key = match op.key {
|
||||
Key::Map(index) => self.ops.m.props[index].clone().into(),
|
||||
Key::Seq(_) => seen.into(),
|
||||
};
|
||||
|
||||
if op.insert {
|
||||
if obj_type == Some(ObjType::Text) {
|
||||
if utf16 {
|
||||
let s = op.to_str();
|
||||
let len8 = s.chars().count();
|
||||
let len16 = s.encode_utf16().count();
|
||||
let index8 = seen8;
|
||||
let index16 = seen;
|
||||
observer.splice_text_utf16(self, ex_obj, (index8, index16), (len8, len16), s);
|
||||
} else {
|
||||
observer.splice_text(self, ex_obj, seen, op.to_str());
|
||||
}
|
||||
} else {
|
||||
let value = (op.value(), self.ops.id_to_exid(op.id));
|
||||
observer.insert(self, ex_obj, seen, value);
|
||||
}
|
||||
} else if op.is_delete() {
|
||||
if let Some(winner) = &values.last() {
|
||||
let value = (winner.value(), self.ops.id_to_exid(winner.id));
|
||||
let conflict = values.len() > 1;
|
||||
observer.expose(self, ex_obj, key, value, conflict);
|
||||
} else if had_value_before {
|
||||
if utf16 {
|
||||
let len16 = last_width;
|
||||
let index8 = seen8;
|
||||
let index16 = seen;
|
||||
observer.delete_utf16(self, ex_obj, (index8, index16), (1, len16));
|
||||
} else {
|
||||
observer.delete(self, ex_obj, key);
|
||||
}
|
||||
}
|
||||
} else if let Some(value) = op.get_increment_value() {
|
||||
// only observe this increment if the counter is visible, i.e. the counter's
|
||||
// create op is in the values
|
||||
//if values.iter().any(|value| op.pred.contains(&value.id)) {
|
||||
if values
|
||||
.last()
|
||||
.map(|value| op.pred.contains(&value.id))
|
||||
.unwrap_or_default()
|
||||
{
|
||||
// we have observed the value
|
||||
observer.increment(self, ex_obj, key, (value, self.ops.id_to_exid(op.id)));
|
||||
}
|
||||
} else {
|
||||
let just_conflict = values
|
||||
.last()
|
||||
.map(|value| self.ops.m.lamport_cmp(op.id, value.id) != Ordering::Greater)
|
||||
.unwrap_or(false);
|
||||
let value = (op.value(), self.ops.id_to_exid(op.id));
|
||||
if op.is_list_op() && !had_value_before {
|
||||
observer.insert(self, ex_obj, seen, value);
|
||||
} else if just_conflict {
|
||||
observer.flag_conflict(self, ex_obj, key);
|
||||
} else {
|
||||
let conflict = !values.is_empty();
|
||||
observer.put(self, ex_obj, key, value, conflict);
|
||||
}
|
||||
}
|
||||
|
||||
self.ops.add_succ(obj, &succ, &op);
|
||||
|
||||
if !op.is_delete() {
|
||||
self.ops.insert(pos, obj, op.clone());
|
||||
}
|
||||
|
||||
op
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Automerge {
|
||||
|
|
|
@ -1318,21 +1318,21 @@ fn compute_list_indexes_correctly_when_list_element_is_split_across_tree_nodes()
|
|||
fn get_parent_objects() {
|
||||
let mut doc = AutoCommit::new();
|
||||
let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap();
|
||||
let list = doc.insert_object(&map, 0, ObjType::List).unwrap();
|
||||
let list = doc.put_object(&map, "b", ObjType::List).unwrap();
|
||||
doc.insert(&list, 0, 2).unwrap();
|
||||
let text = doc.put_object(&list, 0, ObjType::Text).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
doc.parents(&map).unwrap().next(),
|
||||
Some((ROOT, Prop::Map("a".into())))
|
||||
Some((ROOT, Prop::Map("a".into()), true))
|
||||
);
|
||||
assert_eq!(
|
||||
doc.parents(&list).unwrap().next(),
|
||||
Some((map, Prop::Seq(0)))
|
||||
Some((map, Prop::Map("b".into()), true))
|
||||
);
|
||||
assert_eq!(
|
||||
doc.parents(&text).unwrap().next(),
|
||||
Some((list, Prop::Seq(0)))
|
||||
Some((list, Prop::Seq(0), true))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1340,7 +1340,7 @@ fn get_parent_objects() {
|
|||
fn get_path_to_object() {
|
||||
let mut doc = AutoCommit::new();
|
||||
let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap();
|
||||
let list = doc.insert_object(&map, 0, ObjType::List).unwrap();
|
||||
let list = doc.put_object(&map, "b", ObjType::List).unwrap();
|
||||
doc.insert(&list, 0, 2).unwrap();
|
||||
let text = doc.put_object(&list, 0, ObjType::Text).unwrap();
|
||||
|
||||
|
@ -1350,13 +1350,16 @@ fn get_path_to_object() {
|
|||
);
|
||||
assert_eq!(
|
||||
doc.path_to_object(&list).unwrap(),
|
||||
vec![(ROOT, Prop::Map("a".into())), (map.clone(), Prop::Seq(0)),]
|
||||
vec![
|
||||
(ROOT, Prop::Map("a".into())),
|
||||
(map.clone(), Prop::Map("b".into())),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
doc.path_to_object(&text).unwrap(),
|
||||
vec![
|
||||
(ROOT, Prop::Map("a".into())),
|
||||
(map, Prop::Seq(0)),
|
||||
(map, Prop::Map("b".into())),
|
||||
(list, Prop::Seq(0)),
|
||||
]
|
||||
);
|
||||
|
@ -1366,14 +1369,14 @@ fn get_path_to_object() {
|
|||
fn parents_iterator() {
|
||||
let mut doc = AutoCommit::new();
|
||||
let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap();
|
||||
let list = doc.insert_object(&map, 0, ObjType::List).unwrap();
|
||||
let list = doc.put_object(&map, "b", ObjType::List).unwrap();
|
||||
doc.insert(&list, 0, 2).unwrap();
|
||||
let text = doc.put_object(&list, 0, ObjType::Text).unwrap();
|
||||
|
||||
let mut parents = doc.parents(text).unwrap();
|
||||
assert_eq!(parents.next(), Some((list, Prop::Seq(0))));
|
||||
assert_eq!(parents.next(), Some((map, Prop::Seq(0))));
|
||||
assert_eq!(parents.next(), Some((ROOT, Prop::Map("a".into()))));
|
||||
assert_eq!(parents.next(), Some((list, Prop::Seq(0), true)));
|
||||
assert_eq!(parents.next(), Some((map, Prop::Map("b".into()), true)));
|
||||
assert_eq!(parents.next(), Some((ROOT, Prop::Map("a".into()), true)));
|
||||
assert_eq!(parents.next(), None);
|
||||
}
|
||||
|
||||
|
@ -1383,27 +1386,28 @@ fn can_insert_a_grapheme_into_text() {
|
|||
let mut tx = doc.transaction();
|
||||
let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap();
|
||||
let polar_bear = "🐻❄️";
|
||||
tx.insert(&text, 0, polar_bear).unwrap();
|
||||
tx.splice_text(&text, 0, 0, polar_bear).unwrap();
|
||||
tx.commit();
|
||||
let s = doc.text(&text).unwrap();
|
||||
assert_eq!(s, polar_bear);
|
||||
let len = doc.length(&text);
|
||||
assert_eq!(len, 1); // just one grapheme
|
||||
assert_eq!(len, 4); // 4 utf8 chars
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_insert_long_string_into_text() {
|
||||
fn long_strings_spliced_into_text_get_segmented_by_utf8_chars() {
|
||||
let mut doc = Automerge::new();
|
||||
let mut tx = doc.transaction();
|
||||
let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap();
|
||||
let polar_bear = "🐻❄️";
|
||||
let polar_bear_army = polar_bear.repeat(100);
|
||||
tx.insert(&text, 0, &polar_bear_army).unwrap();
|
||||
tx.splice_text(&text, 0, 0, &polar_bear_army).unwrap();
|
||||
tx.commit();
|
||||
let s = doc.text(&text).unwrap();
|
||||
assert_eq!(s, polar_bear_army);
|
||||
let len = doc.length(&text);
|
||||
assert_eq!(len, 1); // many graphemes
|
||||
assert_eq!(len, polar_bear.chars().count() * 100);
|
||||
assert_eq!(len, 400);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::storage::load::Error as LoadError;
|
||||
use crate::types::{ActorId, ScalarValue};
|
||||
use crate::value::DataType;
|
||||
use crate::ChangeHash;
|
||||
use crate::{ChangeHash, ObjType};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
|
@ -28,6 +28,8 @@ pub enum AutomergeError {
|
|||
InvalidObjId(String),
|
||||
#[error("invalid obj id format `{0}`")]
|
||||
InvalidObjIdFormat(String),
|
||||
#[error("invalid op for object of type `{0}`")]
|
||||
InvalidOp(ObjType),
|
||||
#[error("seq {0} is out of bounds")]
|
||||
InvalidSeq(u64),
|
||||
#[error("invalid type of value, expected `{expected}` but received `{unexpected}`")]
|
||||
|
@ -47,6 +49,12 @@ pub enum AutomergeError {
|
|||
NotAnObject,
|
||||
}
|
||||
|
||||
impl PartialEq for AutomergeError {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
std::mem::discriminant(self) == std::mem::discriminant(other)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "wasm")]
|
||||
impl From<AutomergeError> for wasm_bindgen::JsValue {
|
||||
fn from(err: AutomergeError) -> Self {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::exid::ExId;
|
||||
use crate::Parents;
|
||||
use crate::Automerge;
|
||||
use crate::Prop;
|
||||
use crate::Value;
|
||||
|
||||
|
@ -7,22 +7,37 @@ use crate::Value;
|
|||
pub trait OpObserver {
|
||||
/// A new value has been inserted into the given object.
|
||||
///
|
||||
/// - `parents`: A parents iterator that can be used to collect path information
|
||||
/// - `doc`: a handle to the doc after the op has been inserted, can be used to query information
|
||||
/// - `objid`: the object that has been inserted into.
|
||||
/// - `index`: the index the new value has been inserted at.
|
||||
/// - `tagged_value`: the value that has been inserted and the id of the operation that did the
|
||||
/// insert.
|
||||
fn insert(
|
||||
&mut self,
|
||||
parents: Parents<'_>,
|
||||
doc: &Automerge,
|
||||
objid: ExId,
|
||||
index: usize,
|
||||
tagged_value: (Value<'_>, ExId),
|
||||
);
|
||||
|
||||
fn splice_text(&mut self, _doc: &Automerge, _objid: ExId, _index: usize, _value: &str) {
|
||||
panic!("splice_text not implemented in observer")
|
||||
}
|
||||
|
||||
fn splice_text_utf16(
|
||||
&mut self,
|
||||
_doc: &Automerge,
|
||||
_objid: ExId,
|
||||
_index: (usize, usize),
|
||||
_len: (usize, usize),
|
||||
_value: &str,
|
||||
) {
|
||||
panic!("splice_text_utf16 not supported in observer")
|
||||
}
|
||||
|
||||
/// A new value has been put into the given object.
|
||||
///
|
||||
/// - `parents`: A parents iterator that can be used to collect path information
|
||||
/// - `doc`: a handle to the doc after the op has been inserted, can be used to query information
|
||||
/// - `objid`: the object that has been put into.
|
||||
/// - `prop`: the prop that the value as been put at.
|
||||
/// - `tagged_value`: the value that has been put into the object and the id of the operation
|
||||
|
@ -30,34 +45,64 @@ pub trait OpObserver {
|
|||
/// - `conflict`: whether this put conflicts with other operations.
|
||||
fn put(
|
||||
&mut self,
|
||||
parents: Parents<'_>,
|
||||
doc: &Automerge,
|
||||
objid: ExId,
|
||||
prop: Prop,
|
||||
tagged_value: (Value<'_>, ExId),
|
||||
conflict: bool,
|
||||
);
|
||||
|
||||
/// When a delete op exposes a previously conflicted value
|
||||
/// Similar to a put op - except for maps, lists and text, edits
|
||||
/// may already exist and need to be queried
|
||||
///
|
||||
/// - `doc`: a handle to the doc after the op has been inserted, can be used to query information
|
||||
/// - `objid`: the object that has been put into.
|
||||
/// - `prop`: the prop that the value as been put at.
|
||||
/// - `tagged_value`: the value that has been put into the object and the id of the operation
|
||||
/// that did the put.
|
||||
/// - `conflict`: whether this put conflicts with other operations.
|
||||
fn expose(
|
||||
&mut self,
|
||||
doc: &Automerge,
|
||||
objid: ExId,
|
||||
prop: Prop,
|
||||
tagged_value: (Value<'_>, ExId),
|
||||
conflict: bool,
|
||||
);
|
||||
|
||||
/// Flag a new conflict on a value without changing it
|
||||
///
|
||||
/// - `doc`: a handle to the doc after the op has been inserted, can be used to query information
|
||||
/// - `objid`: the object that has been put into.
|
||||
/// - `prop`: the prop that the value as been put at.
|
||||
fn flag_conflict(&mut self, doc: &Automerge, objid: ExId, prop: Prop);
|
||||
|
||||
/// A counter has been incremented.
|
||||
///
|
||||
/// - `parents`: A parents iterator that can be used to collect path information
|
||||
/// - `doc`: a handle to the doc after the op has been inserted, can be used to query information
|
||||
/// - `objid`: the object that contains the counter.
|
||||
/// - `prop`: they prop that the chounter is at.
|
||||
/// - `tagged_value`: the amount the counter has been incremented by, and the the id of the
|
||||
/// increment operation.
|
||||
fn increment(
|
||||
&mut self,
|
||||
parents: Parents<'_>,
|
||||
objid: ExId,
|
||||
prop: Prop,
|
||||
tagged_value: (i64, ExId),
|
||||
);
|
||||
fn increment(&mut self, doc: &Automerge, objid: ExId, prop: Prop, tagged_value: (i64, ExId));
|
||||
|
||||
/// A value has beeen deleted.
|
||||
///
|
||||
/// - `parents`: A parents iterator that can be used to collect path information
|
||||
/// - `doc`: a handle to the doc after the op has been inserted, can be used to query information
|
||||
/// - `objid`: the object that has been deleted in.
|
||||
/// - `prop`: the prop of the value that has been deleted.
|
||||
fn delete(&mut self, parents: Parents<'_>, objid: ExId, prop: Prop);
|
||||
fn delete(&mut self, doc: &Automerge, objid: ExId, prop: Prop);
|
||||
|
||||
fn delete_utf16(
|
||||
&mut self,
|
||||
_doc: &Automerge,
|
||||
_objid: ExId,
|
||||
_index: (usize, usize),
|
||||
_len: (usize, usize),
|
||||
) {
|
||||
panic!("delete_utf16 not supported in observer")
|
||||
}
|
||||
|
||||
/// Branch of a new op_observer later to be merged
|
||||
///
|
||||
|
@ -77,16 +122,18 @@ pub trait OpObserver {
|
|||
impl OpObserver for () {
|
||||
fn insert(
|
||||
&mut self,
|
||||
_parents: Parents<'_>,
|
||||
_doc: &Automerge,
|
||||
_objid: ExId,
|
||||
_index: usize,
|
||||
_tagged_value: (Value<'_>, ExId),
|
||||
) {
|
||||
}
|
||||
|
||||
fn splice_text(&mut self, _doc: &Automerge, _objid: ExId, _index: usize, _value: &str) {}
|
||||
|
||||
fn put(
|
||||
&mut self,
|
||||
_parents: Parents<'_>,
|
||||
_doc: &Automerge,
|
||||
_objid: ExId,
|
||||
_prop: Prop,
|
||||
_tagged_value: (Value<'_>, ExId),
|
||||
|
@ -94,16 +141,28 @@ impl OpObserver for () {
|
|||
) {
|
||||
}
|
||||
|
||||
fn expose(
|
||||
&mut self,
|
||||
_doc: &Automerge,
|
||||
_objid: ExId,
|
||||
_prop: Prop,
|
||||
_tagged_value: (Value<'_>, ExId),
|
||||
_conflict: bool,
|
||||
) {
|
||||
}
|
||||
|
||||
fn flag_conflict(&mut self, _doc: &Automerge, _objid: ExId, _prop: Prop) {}
|
||||
|
||||
fn increment(
|
||||
&mut self,
|
||||
_parents: Parents<'_>,
|
||||
_doc: &Automerge,
|
||||
_objid: ExId,
|
||||
_prop: Prop,
|
||||
_tagged_value: (i64, ExId),
|
||||
) {
|
||||
}
|
||||
|
||||
fn delete(&mut self, _parents: Parents<'_>, _objid: ExId, _prop: Prop) {}
|
||||
fn delete(&mut self, _doc: &Automerge, _objid: ExId, _prop: Prop) {}
|
||||
|
||||
fn merge(&mut self, _other: &Self) {}
|
||||
|
||||
|
@ -125,59 +184,87 @@ impl VecOpObserver {
|
|||
}
|
||||
|
||||
impl OpObserver for VecOpObserver {
|
||||
fn insert(
|
||||
&mut self,
|
||||
mut parents: Parents<'_>,
|
||||
obj: ExId,
|
||||
index: usize,
|
||||
(value, id): (Value<'_>, ExId),
|
||||
) {
|
||||
let path = parents.path();
|
||||
fn insert(&mut self, doc: &Automerge, obj: ExId, index: usize, (value, id): (Value<'_>, ExId)) {
|
||||
if let Ok(mut p) = doc.parents(&obj) {
|
||||
self.patches.push(Patch::Insert {
|
||||
obj,
|
||||
path,
|
||||
path: p.path(),
|
||||
index,
|
||||
value: (value.into_owned(), id),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn splice_text(&mut self, doc: &Automerge, obj: ExId, index: usize, value: &str) {
|
||||
if let Ok(mut p) = doc.parents(&obj) {
|
||||
self.patches.push(Patch::Splice {
|
||||
obj,
|
||||
path: p.path(),
|
||||
index,
|
||||
value: value.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn put(
|
||||
&mut self,
|
||||
mut parents: Parents<'_>,
|
||||
doc: &Automerge,
|
||||
obj: ExId,
|
||||
prop: Prop,
|
||||
(value, id): (Value<'_>, ExId),
|
||||
conflict: bool,
|
||||
) {
|
||||
let path = parents.path();
|
||||
if let Ok(mut p) = doc.parents(&obj) {
|
||||
self.patches.push(Patch::Put {
|
||||
obj,
|
||||
path,
|
||||
path: p.path(),
|
||||
prop,
|
||||
value: (value.into_owned(), id),
|
||||
conflict,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn increment(
|
||||
fn expose(
|
||||
&mut self,
|
||||
mut parents: Parents<'_>,
|
||||
doc: &Automerge,
|
||||
obj: ExId,
|
||||
prop: Prop,
|
||||
tagged_value: (i64, ExId),
|
||||
(value, id): (Value<'_>, ExId),
|
||||
conflict: bool,
|
||||
) {
|
||||
let path = parents.path();
|
||||
if let Ok(mut p) = doc.parents(&obj) {
|
||||
self.patches.push(Patch::Expose {
|
||||
obj,
|
||||
path: p.path(),
|
||||
prop,
|
||||
value: (value.into_owned(), id),
|
||||
conflict,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn flag_conflict(&mut self, mut _doc: &Automerge, _obj: ExId, _prop: Prop) {}
|
||||
|
||||
fn increment(&mut self, doc: &Automerge, obj: ExId, prop: Prop, tagged_value: (i64, ExId)) {
|
||||
if let Ok(mut p) = doc.parents(&obj) {
|
||||
self.patches.push(Patch::Increment {
|
||||
obj,
|
||||
path,
|
||||
path: p.path(),
|
||||
prop,
|
||||
value: tagged_value,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn delete(&mut self, mut parents: Parents<'_>, obj: ExId, prop: Prop) {
|
||||
let path = parents.path();
|
||||
self.patches.push(Patch::Delete { obj, path, prop })
|
||||
fn delete(&mut self, doc: &Automerge, obj: ExId, prop: Prop) {
|
||||
if let Ok(mut p) = doc.parents(&obj) {
|
||||
self.patches.push(Patch::Delete {
|
||||
obj,
|
||||
path: p.path(),
|
||||
prop,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn merge(&mut self, other: &Self) {
|
||||
|
@ -205,7 +292,20 @@ pub enum Patch {
|
|||
/// Whether this put conflicts with another.
|
||||
conflict: bool,
|
||||
},
|
||||
/// Inserting a new element into a list/text
|
||||
/// Exposing (via delete) an old but conflicted value with a prop in a map, or a list element
|
||||
Expose {
|
||||
/// path to the object
|
||||
path: Vec<(ExId, Prop)>,
|
||||
/// The object that was put into.
|
||||
obj: ExId,
|
||||
/// The prop that the new value was put at.
|
||||
prop: Prop,
|
||||
/// The value that was put, and the id of the operation that put it there.
|
||||
value: (Value<'static>, ExId),
|
||||
/// Whether this put conflicts with another.
|
||||
conflict: bool,
|
||||
},
|
||||
/// Inserting a new element into a list
|
||||
Insert {
|
||||
/// path to the object
|
||||
path: Vec<(ExId, Prop)>,
|
||||
|
@ -216,6 +316,17 @@ pub enum Patch {
|
|||
/// The value that was inserted, and the id of the operation that inserted it there.
|
||||
value: (Value<'static>, ExId),
|
||||
},
|
||||
/// Splicing a text object
|
||||
Splice {
|
||||
/// path to the object
|
||||
path: Vec<(ExId, Prop)>,
|
||||
/// The object that was inserted into.
|
||||
obj: ExId,
|
||||
/// The index that the new value was inserted at.
|
||||
index: usize,
|
||||
/// The value that was spliced
|
||||
value: String,
|
||||
},
|
||||
/// Incrementing a counter.
|
||||
Increment {
|
||||
/// path to the object
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::exid::ExId;
|
|||
use crate::indexed_cache::IndexedCache;
|
||||
use crate::op_tree::{self, OpTree};
|
||||
use crate::parents::Parents;
|
||||
use crate::query::{self, OpIdSearch, TreeQuery};
|
||||
use crate::query::{self, OpIdVisSearch, TreeQuery};
|
||||
use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpIds, OpType, Prop};
|
||||
use crate::{ObjType, OpObserver};
|
||||
use fxhash::FxBuildHasher;
|
||||
|
@ -73,10 +73,12 @@ impl OpSetInternal {
|
|||
Parents { obj, ops: self }
|
||||
}
|
||||
|
||||
pub(crate) fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key)> {
|
||||
pub(crate) fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key, bool)> {
|
||||
let parent = self.trees.get(obj)?.parent?;
|
||||
let key = self.search(&parent, OpIdSearch::new(obj.0)).key().unwrap();
|
||||
Some((parent, key))
|
||||
let query = self.search(&parent, OpIdVisSearch::new(obj.0));
|
||||
let key = query.key().unwrap();
|
||||
let visible = query.visible;
|
||||
Some((parent, key, visible))
|
||||
}
|
||||
|
||||
pub(crate) fn export_key(&self, obj: ObjId, key: Key) -> Prop {
|
||||
|
@ -169,7 +171,7 @@ impl OpSetInternal {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn replace<F>(&mut self, obj: &ObjId, index: usize, f: F)
|
||||
pub(crate) fn change_vis<F>(&mut self, obj: &ObjId, index: usize, f: F)
|
||||
where
|
||||
F: Fn(&mut Op),
|
||||
{
|
||||
|
@ -179,15 +181,10 @@ impl OpSetInternal {
|
|||
}
|
||||
|
||||
/// Add `op` as a successor to each op at `op_indices` in `obj`
|
||||
pub(crate) fn add_succ<I: Iterator<Item = usize>>(
|
||||
&mut self,
|
||||
obj: &ObjId,
|
||||
op_indices: I,
|
||||
op: &Op,
|
||||
) {
|
||||
pub(crate) fn add_succ(&mut self, obj: &ObjId, op_indices: &[usize], op: &Op) {
|
||||
if let Some(tree) = self.trees.get_mut(obj) {
|
||||
for i in op_indices {
|
||||
tree.internal.update(i, |old_op| {
|
||||
tree.internal.update(*i, |old_op| {
|
||||
old_op.add_succ(op, |left, right| self.m.lamport_cmp(*left, *right))
|
||||
});
|
||||
}
|
||||
|
@ -231,96 +228,6 @@ impl OpSetInternal {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op {
|
||||
let q = self.search(obj, query::SeekOp::new(&op));
|
||||
|
||||
let succ = q.succ;
|
||||
let pos = q.pos;
|
||||
|
||||
self.add_succ(obj, succ.iter().copied(), &op);
|
||||
|
||||
if !op.is_delete() {
|
||||
self.insert(pos, obj, op.clone());
|
||||
}
|
||||
op
|
||||
}
|
||||
|
||||
pub(crate) fn insert_op_with_observer<Obs: OpObserver>(
|
||||
&mut self,
|
||||
obj: &ObjId,
|
||||
op: Op,
|
||||
observer: &mut Obs,
|
||||
) -> Op {
|
||||
let q = self.search(obj, query::SeekOpWithPatch::new(&op));
|
||||
|
||||
let query::SeekOpWithPatch {
|
||||
pos,
|
||||
succ,
|
||||
seen,
|
||||
values,
|
||||
had_value_before,
|
||||
..
|
||||
} = q;
|
||||
|
||||
let ex_obj = self.id_to_exid(obj.0);
|
||||
let parents = self.parents(*obj);
|
||||
|
||||
let key = match op.key {
|
||||
Key::Map(index) => self.m.props[index].clone().into(),
|
||||
Key::Seq(_) => seen.into(),
|
||||
};
|
||||
|
||||
if op.insert {
|
||||
let value = (op.value(), self.id_to_exid(op.id));
|
||||
observer.insert(parents, ex_obj, seen, value);
|
||||
} else if op.is_delete() {
|
||||
if let Some(winner) = &values.last() {
|
||||
let value = (winner.value(), self.id_to_exid(winner.id));
|
||||
let conflict = values.len() > 1;
|
||||
observer.put(parents, ex_obj, key, value, conflict);
|
||||
} else if had_value_before {
|
||||
observer.delete(parents, ex_obj, key);
|
||||
}
|
||||
} else if let Some(value) = op.get_increment_value() {
|
||||
// only observe this increment if the counter is visible, i.e. the counter's
|
||||
// create op is in the values
|
||||
//if values.iter().any(|value| op.pred.contains(&value.id)) {
|
||||
if values
|
||||
.last()
|
||||
.map(|value| op.pred.contains(&value.id))
|
||||
.unwrap_or_default()
|
||||
{
|
||||
// we have observed the value
|
||||
observer.increment(parents, ex_obj, key, (value, self.id_to_exid(op.id)));
|
||||
}
|
||||
} else {
|
||||
let winner = if let Some(last_value) = values.last() {
|
||||
if self.m.lamport_cmp(op.id, last_value.id) == Ordering::Greater {
|
||||
&op
|
||||
} else {
|
||||
last_value
|
||||
}
|
||||
} else {
|
||||
&op
|
||||
};
|
||||
let value = (winner.value(), self.id_to_exid(winner.id));
|
||||
if op.is_list_op() && !had_value_before {
|
||||
observer.insert(parents, ex_obj, seen, value);
|
||||
} else {
|
||||
let conflict = !values.is_empty();
|
||||
observer.put(parents, ex_obj, key, value, conflict);
|
||||
}
|
||||
}
|
||||
|
||||
self.add_succ(obj, succ.iter().copied(), &op);
|
||||
|
||||
if !op.is_delete() {
|
||||
self.insert(pos, obj, op.clone());
|
||||
}
|
||||
|
||||
op
|
||||
}
|
||||
|
||||
pub(crate) fn object_type(&self, id: &ObjId) -> Option<ObjType> {
|
||||
self.trees.get(id).map(|tree| tree.objtype)
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
op_tree::OpTreeInternal,
|
||||
storage::load::{DocObserver, LoadedObject},
|
||||
types::{ObjId, Op},
|
||||
OpObserver,
|
||||
Automerge, OpObserver,
|
||||
};
|
||||
|
||||
/// An opset builder which creates an optree for each object as it finishes loading, inserting the
|
||||
|
@ -78,10 +78,10 @@ impl<'a, O: OpObserver> DocObserver for ObservedOpSetBuilder<'a, O> {
|
|||
}
|
||||
|
||||
fn finish(self, _metadata: super::OpSetMetadata) -> Self::Output {
|
||||
let mut opset = OpSet::new();
|
||||
let mut opset = Automerge::new();
|
||||
for (obj, op) in self.ops {
|
||||
opset.insert_op_with_observer(&obj, op, self.observer);
|
||||
}
|
||||
opset
|
||||
opset.ops
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use std::{
|
|||
pub(crate) use crate::op_set::OpSetMetadata;
|
||||
use crate::{
|
||||
clock::Clock,
|
||||
query::{self, Index, QueryResult, ReplaceArgs, TreeQuery},
|
||||
query::{self, ChangeVisibility, Index, QueryResult, TreeQuery},
|
||||
};
|
||||
use crate::{
|
||||
types::{ObjId, Op, OpId},
|
||||
|
@ -618,24 +618,20 @@ impl OpTreeNode {
|
|||
/// Update the operation at the given index using the provided function.
|
||||
///
|
||||
/// This handles updating the indices after the update.
|
||||
pub(crate) fn update<F>(&mut self, index: usize, f: F) -> ReplaceArgs
|
||||
pub(crate) fn update<F>(&mut self, index: usize, f: F) -> ChangeVisibility
|
||||
where
|
||||
F: FnOnce(&mut Op),
|
||||
{
|
||||
if self.is_leaf() {
|
||||
let new_element = self.elements.get_mut(index).unwrap();
|
||||
let old_id = new_element.id;
|
||||
let old_visible = new_element.visible();
|
||||
let old_vis = new_element.visible();
|
||||
f(new_element);
|
||||
let replace_args = ReplaceArgs {
|
||||
old_id,
|
||||
new_id: new_element.id,
|
||||
old_visible,
|
||||
new_visible: new_element.visible(),
|
||||
new_key: new_element.elemid_or_key(),
|
||||
};
|
||||
self.index.replace(&replace_args);
|
||||
replace_args
|
||||
self.index.change_vis(ChangeVisibility {
|
||||
old_vis,
|
||||
new_vis: new_element.visible(),
|
||||
key: new_element.elemid_or_key(),
|
||||
utf16_len: new_element.width(true),
|
||||
})
|
||||
} else {
|
||||
let mut cumulative_len = 0;
|
||||
let len = self.len();
|
||||
|
@ -646,23 +642,18 @@ impl OpTreeNode {
|
|||
}
|
||||
Ordering::Equal => {
|
||||
let new_element = self.elements.get_mut(child_index).unwrap();
|
||||
let old_id = new_element.id;
|
||||
let old_visible = new_element.visible();
|
||||
let old_vis = new_element.visible();
|
||||
f(new_element);
|
||||
let replace_args = ReplaceArgs {
|
||||
old_id,
|
||||
new_id: new_element.id,
|
||||
old_visible,
|
||||
new_visible: new_element.visible(),
|
||||
new_key: new_element.elemid_or_key(),
|
||||
};
|
||||
self.index.replace(&replace_args);
|
||||
return replace_args;
|
||||
return self.index.change_vis(ChangeVisibility {
|
||||
old_vis,
|
||||
new_vis: new_element.visible(),
|
||||
key: new_element.elemid_or_key(),
|
||||
utf16_len: new_element.width(true),
|
||||
});
|
||||
}
|
||||
Ordering::Greater => {
|
||||
let replace_args = child.update(index - cumulative_len, f);
|
||||
self.index.replace(&replace_args);
|
||||
return replace_args;
|
||||
let vis_args = child.update(index - cumulative_len, f);
|
||||
return self.index.change_vis(vis_args);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,23 +10,36 @@ pub struct Parents<'a> {
|
|||
|
||||
impl<'a> Parents<'a> {
|
||||
pub fn path(&mut self) -> Vec<(ExId, Prop)> {
|
||||
let mut path = self.collect::<Vec<_>>();
|
||||
let mut path = self.map(|(id, prop, _)| (id, prop)).collect::<Vec<_>>();
|
||||
path.reverse();
|
||||
path
|
||||
}
|
||||
|
||||
pub fn visible_path(&mut self) -> Option<Vec<(ExId, Prop)>> {
|
||||
let mut path = Vec::new();
|
||||
for (id, prop, vis) in self {
|
||||
if !vis {
|
||||
return None;
|
||||
}
|
||||
path.push((id, prop))
|
||||
}
|
||||
path.reverse();
|
||||
Some(path)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Parents<'a> {
|
||||
type Item = (ExId, Prop);
|
||||
type Item = (ExId, Prop, bool);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.obj.is_root() {
|
||||
None
|
||||
} else if let Some((obj, key)) = self.ops.parent_object(&self.obj) {
|
||||
} else if let Some((obj, key, visible)) = self.ops.parent_object(&self.obj) {
|
||||
self.obj = obj;
|
||||
Some((
|
||||
self.ops.id_to_exid(self.obj.0),
|
||||
self.ops.export_key(self.obj, key),
|
||||
visible,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
|
|
|
@ -20,6 +20,7 @@ mod map_range_at;
|
|||
mod nth;
|
||||
mod nth_at;
|
||||
mod opid;
|
||||
mod opid_vis;
|
||||
mod prop;
|
||||
mod prop_at;
|
||||
mod seek_op;
|
||||
|
@ -40,6 +41,7 @@ pub(crate) use map_range_at::MapRangeAt;
|
|||
pub(crate) use nth::Nth;
|
||||
pub(crate) use nth_at::NthAt;
|
||||
pub(crate) use opid::OpIdSearch;
|
||||
pub(crate) use opid_vis::OpIdVisSearch;
|
||||
pub(crate) use prop::Prop;
|
||||
pub(crate) use prop_at::PropAt;
|
||||
pub(crate) use seek_op::SeekOp;
|
||||
|
@ -47,12 +49,11 @@ pub(crate) use seek_op_with_patch::SeekOpWithPatch;
|
|||
|
||||
// use a struct for the args for clarity as they are passed up the update chain in the optree
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ReplaceArgs {
|
||||
pub(crate) old_id: OpId,
|
||||
pub(crate) new_id: OpId,
|
||||
pub(crate) old_visible: bool,
|
||||
pub(crate) new_visible: bool,
|
||||
pub(crate) new_key: Key,
|
||||
pub(crate) struct ChangeVisibility {
|
||||
pub(crate) old_vis: bool,
|
||||
pub(crate) new_vis: bool,
|
||||
pub(crate) key: Key,
|
||||
pub(crate) utf16_len: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
|
@ -100,6 +101,7 @@ pub(crate) enum QueryResult {
|
|||
pub(crate) struct Index {
|
||||
/// The map of visible keys to the number of visible operations for that key.
|
||||
pub(crate) visible: HashMap<Key, usize, FxBuildHasher>,
|
||||
pub(crate) visible16: usize,
|
||||
/// Set of opids found in this node and below.
|
||||
pub(crate) ops: HashSet<OpId, FxBuildHasher>,
|
||||
}
|
||||
|
@ -108,53 +110,65 @@ impl Index {
|
|||
pub(crate) fn new() -> Self {
|
||||
Index {
|
||||
visible: Default::default(),
|
||||
visible16: 0,
|
||||
ops: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the number of visible elements in this index.
|
||||
pub(crate) fn visible_len(&self) -> usize {
|
||||
pub(crate) fn visible_len(&self, utf16: bool) -> usize {
|
||||
if utf16 {
|
||||
self.visible16
|
||||
} else {
|
||||
self.visible.len()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn has_visible(&self, seen: &Key) -> bool {
|
||||
self.visible.contains_key(seen)
|
||||
}
|
||||
|
||||
pub(crate) fn replace(
|
||||
&mut self,
|
||||
ReplaceArgs {
|
||||
old_id,
|
||||
new_id,
|
||||
old_visible,
|
||||
new_visible,
|
||||
new_key,
|
||||
}: &ReplaceArgs,
|
||||
) {
|
||||
if old_id != new_id {
|
||||
self.ops.remove(old_id);
|
||||
self.ops.insert(*new_id);
|
||||
}
|
||||
|
||||
match (new_visible, old_visible, new_key) {
|
||||
(false, true, key) => match self.visible.get(key).copied() {
|
||||
pub(crate) fn change_vis(&mut self, change_vis: ChangeVisibility) -> ChangeVisibility {
|
||||
let ChangeVisibility {
|
||||
old_vis,
|
||||
new_vis,
|
||||
key,
|
||||
utf16_len,
|
||||
} = &change_vis;
|
||||
match (old_vis, new_vis) {
|
||||
(true, false) => match self.visible.get(key).copied() {
|
||||
Some(n) if n == 1 => {
|
||||
self.visible.remove(key);
|
||||
self.visible16 -= *utf16_len;
|
||||
}
|
||||
Some(n) => {
|
||||
self.visible.insert(*key, n - 1);
|
||||
}
|
||||
None => panic!("remove overun in index"),
|
||||
},
|
||||
(true, false, key) => *self.visible.entry(*key).or_default() += 1,
|
||||
(false, true) => {
|
||||
if let Some(n) = self.visible.get(key) {
|
||||
self.visible.insert(*key, n + 1);
|
||||
} else {
|
||||
self.visible.insert(*key, 1);
|
||||
self.visible16 += *utf16_len;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
change_vis
|
||||
}
|
||||
|
||||
pub(crate) fn insert(&mut self, op: &Op) {
|
||||
self.ops.insert(op.id);
|
||||
if op.visible() {
|
||||
*self.visible.entry(op.elemid_or_key()).or_default() += 1;
|
||||
let key = op.elemid_or_key();
|
||||
if let Some(n) = self.visible.get(&key) {
|
||||
self.visible.insert(key, n + 1);
|
||||
} else {
|
||||
self.visible.insert(key, 1);
|
||||
self.visible16 += op.width(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -165,6 +179,7 @@ impl Index {
|
|||
match self.visible.get(&key).copied() {
|
||||
Some(n) if n == 1 => {
|
||||
self.visible.remove(&key);
|
||||
self.visible16 -= op.width(true);
|
||||
}
|
||||
Some(n) => {
|
||||
self.visible.insert(key, n - 1);
|
||||
|
@ -178,9 +193,13 @@ impl Index {
|
|||
for id in &other.ops {
|
||||
self.ops.insert(*id);
|
||||
}
|
||||
for (elem, n) in other.visible.iter() {
|
||||
*self.visible.entry(*elem).or_default() += n;
|
||||
for (elem, other_len) in other.visible.iter() {
|
||||
self.visible
|
||||
.entry(*elem)
|
||||
.and_modify(|len| *len += *other_len)
|
||||
.or_insert(*other_len);
|
||||
}
|
||||
self.visible16 += other.visible16;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ impl<'a> TreeQuery<'a> for ElemIdPos {
|
|||
QueryResult::Descend
|
||||
} else {
|
||||
// not in this node, try the next one
|
||||
self.pos += child.index.visible_len();
|
||||
self.pos += child.index.visible_len(false);
|
||||
QueryResult::Next
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,9 @@ pub(crate) struct InsertNth {
|
|||
target: usize,
|
||||
/// the number of visible operations seen
|
||||
seen: usize,
|
||||
seen8: usize,
|
||||
last_width: usize,
|
||||
utf16: bool,
|
||||
//pub pos: usize,
|
||||
/// the number of operations (including non-visible) that we have seen
|
||||
n: usize,
|
||||
|
@ -22,15 +25,21 @@ pub(crate) struct InsertNth {
|
|||
}
|
||||
|
||||
impl InsertNth {
|
||||
pub(crate) fn new(target: usize) -> Self {
|
||||
pub(crate) fn new(target: usize, utf16: bool) -> Self {
|
||||
let (valid, last_valid_insert) = if target == 0 {
|
||||
(Some(0), Some(Key::Seq(HEAD)))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
InsertNth {
|
||||
// seen and target are both in the "native" units (utf8/utf16)
|
||||
// if in utf8 mode seen8 just shadows the value
|
||||
// if in utf16 mode seen will be utf16 and seen8 will be utf8
|
||||
target,
|
||||
seen: 0,
|
||||
seen8: 0,
|
||||
last_width: 0,
|
||||
utf16,
|
||||
n: 0,
|
||||
valid,
|
||||
last_seen: None,
|
||||
|
@ -39,6 +48,15 @@ impl InsertNth {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn index_utf8(&self) -> usize {
|
||||
self.seen8
|
||||
}
|
||||
|
||||
pub(crate) fn index_utf16(&self) -> usize {
|
||||
// if in utf8 mode this just returns utf8
|
||||
self.seen
|
||||
}
|
||||
|
||||
pub(crate) fn pos(&self) -> usize {
|
||||
self.valid.unwrap_or(self.n)
|
||||
}
|
||||
|
@ -46,26 +64,18 @@ impl InsertNth {
|
|||
pub(crate) fn key(&self) -> Result<Key, AutomergeError> {
|
||||
self.last_valid_insert
|
||||
.ok_or(AutomergeError::InvalidIndex(self.target))
|
||||
//if self.target == 0 {
|
||||
/*
|
||||
if self.last_insert.is_none() {
|
||||
Ok(HEAD.into())
|
||||
} else if self.seen == self.target && self.last_insert.is_some() {
|
||||
Ok(Key::Seq(self.last_insert.unwrap()))
|
||||
} else {
|
||||
Err(AutomergeError::InvalidIndex(self.target))
|
||||
}
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TreeQuery<'a> for InsertNth {
|
||||
fn query_node(&mut self, child: &OpTreeNode) -> QueryResult {
|
||||
// if this node has some visible elements then we may find our target within
|
||||
let mut num_vis = child.index.visible_len();
|
||||
let mut num_vis = child.index.visible_len(self.utf16);
|
||||
let mut num_vis8 = child.index.visible_len(false);
|
||||
if let Some(last_seen) = self.last_seen {
|
||||
if child.index.has_visible(&last_seen) {
|
||||
num_vis -= 1;
|
||||
num_vis8 -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,6 +86,7 @@ impl<'a> TreeQuery<'a> for InsertNth {
|
|||
// our target is not in this node so try the next one
|
||||
self.n += child.len();
|
||||
self.seen += num_vis;
|
||||
self.seen8 += num_vis8;
|
||||
|
||||
// We have updated seen by the number of visible elements in this index, before we skip it.
|
||||
// We also need to keep track of the last elemid that we have seen (and counted as seen).
|
||||
|
@ -103,7 +114,9 @@ impl<'a> TreeQuery<'a> for InsertNth {
|
|||
if self.seen >= self.target {
|
||||
return QueryResult::Finish;
|
||||
}
|
||||
self.seen += 1;
|
||||
self.last_width = element.width(self.utf16);
|
||||
self.seen += self.last_width;
|
||||
self.seen8 += 1;
|
||||
self.last_seen = Some(element.elemid_or_key());
|
||||
self.last_valid_insert = self.last_seen
|
||||
}
|
||||
|
|
|
@ -5,17 +5,21 @@ use std::fmt::Debug;
|
|||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct Len {
|
||||
pub(crate) len: usize,
|
||||
utf16: bool,
|
||||
}
|
||||
|
||||
impl Len {
|
||||
pub(crate) fn new() -> Self {
|
||||
Len { len: 0 }
|
||||
Len {
|
||||
len: 0,
|
||||
utf16: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TreeQuery<'a> for Len {
|
||||
fn query_node(&mut self, child: &OpTreeNode) -> QueryResult {
|
||||
self.len = child.index.visible_len();
|
||||
self.len = child.index.visible_len(self.utf16);
|
||||
QueryResult::Finish
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
use crate::error::AutomergeError;
|
||||
use crate::op_set::OpSet;
|
||||
use crate::op_tree::OpTreeNode;
|
||||
use crate::query::{QueryResult, TreeQuery};
|
||||
use crate::types::{Key, Op};
|
||||
use crate::types::{Key, Op, OpIds};
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct Nth<'a> {
|
||||
target: usize,
|
||||
seen: usize,
|
||||
seen8: usize,
|
||||
utf16: bool,
|
||||
last_width: usize,
|
||||
/// last_seen is the target elemid of the last `seen` operation.
|
||||
/// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes.
|
||||
last_seen: Option<Key>,
|
||||
|
@ -17,10 +21,13 @@ pub(crate) struct Nth<'a> {
|
|||
}
|
||||
|
||||
impl<'a> Nth<'a> {
|
||||
pub(crate) fn new(target: usize) -> Self {
|
||||
pub(crate) fn new(target: usize, utf16: bool) -> Self {
|
||||
Nth {
|
||||
target,
|
||||
seen: 0,
|
||||
seen8: 0,
|
||||
last_width: 1,
|
||||
utf16,
|
||||
last_seen: None,
|
||||
ops: vec![],
|
||||
ops_pos: vec![],
|
||||
|
@ -28,6 +35,10 @@ impl<'a> Nth<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn pred(&self, ops: &OpSet) -> OpIds {
|
||||
ops.m.sorted_opids(self.ops.iter().map(|o| o.id))
|
||||
}
|
||||
|
||||
/// Get the key
|
||||
pub(crate) fn key(&self) -> Result<Key, AutomergeError> {
|
||||
// the query collects the ops so we can use that to get the key they all use
|
||||
|
@ -37,14 +48,28 @@ impl<'a> Nth<'a> {
|
|||
Err(AutomergeError::InvalidIndex(self.target))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn index(&self) -> usize {
|
||||
self.seen - self.last_width
|
||||
}
|
||||
|
||||
pub(crate) fn index_utf16(&self) -> usize {
|
||||
self.index()
|
||||
}
|
||||
|
||||
pub(crate) fn index_utf8(&self) -> usize {
|
||||
self.seen8 - 1
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TreeQuery<'a> for Nth<'a> {
|
||||
fn query_node(&mut self, child: &OpTreeNode) -> QueryResult {
|
||||
let mut num_vis = child.index.visible_len();
|
||||
let mut num_vis = child.index.visible_len(self.utf16);
|
||||
let mut num_vis8 = child.index.visible_len(false);
|
||||
if let Some(last_seen) = self.last_seen {
|
||||
if child.index.has_visible(&last_seen) {
|
||||
num_vis -= 1;
|
||||
num_vis8 -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -54,6 +79,7 @@ impl<'a> TreeQuery<'a> for Nth<'a> {
|
|||
// skip this node as no useful ops in it
|
||||
self.pos += child.len();
|
||||
self.seen += num_vis;
|
||||
self.seen8 += num_vis8;
|
||||
|
||||
// We have updated seen by the number of visible elements in this index, before we skip it.
|
||||
// We also need to keep track of the last elemid that we have seen (and counted as seen).
|
||||
|
@ -79,11 +105,13 @@ impl<'a> TreeQuery<'a> for Nth<'a> {
|
|||
}
|
||||
let visible = element.visible();
|
||||
if visible && self.last_seen.is_none() {
|
||||
self.seen += 1;
|
||||
self.last_width = element.width(self.utf16);
|
||||
self.seen += self.last_width;
|
||||
self.seen8 += 1;
|
||||
// we have a new visible element
|
||||
self.last_seen = Some(element.elemid_or_key())
|
||||
}
|
||||
if self.seen == self.target + 1 && visible {
|
||||
if self.seen > self.target && visible {
|
||||
self.ops.push(element);
|
||||
self.ops_pos.push(self.pos);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::op_tree::OpTreeNode;
|
||||
use crate::query::{QueryResult, TreeQuery};
|
||||
use crate::types::{ElemId, Key, Op, OpId};
|
||||
use crate::types::{Key, Op, OpId};
|
||||
|
||||
/// Search for an OpId in a tree.
|
||||
/// Returns the index of the operation in the tree.
|
||||
|
@ -30,10 +30,6 @@ impl OpIdSearch {
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn key(&self) -> &Option<Key> {
|
||||
&self.key
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TreeQuery<'a> for OpIdSearch {
|
||||
|
@ -49,11 +45,6 @@ impl<'a> TreeQuery<'a> for OpIdSearch {
|
|||
fn query_element(&mut self, element: &Op) -> QueryResult {
|
||||
if element.id == self.target {
|
||||
self.found = true;
|
||||
if element.insert {
|
||||
self.key = Some(Key::Seq(ElemId(element.id)));
|
||||
} else {
|
||||
self.key = Some(element.key);
|
||||
}
|
||||
QueryResult::Finish
|
||||
} else {
|
||||
self.pos += 1;
|
||||
|
|
62
rust/automerge/src/query/opid_vis.rs
Normal file
62
rust/automerge/src/query/opid_vis.rs
Normal file
|
@ -0,0 +1,62 @@
|
|||
use crate::op_tree::OpTreeNode;
|
||||
use crate::query::{QueryResult, TreeQuery};
|
||||
use crate::types::{Key, Op, OpId};
|
||||
|
||||
/// Search for an OpId in a tree.
|
||||
/// Returns the index of the operation in the tree.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct OpIdVisSearch {
|
||||
target: OpId,
|
||||
found: bool,
|
||||
pub(crate) visible: bool,
|
||||
key: Option<Key>,
|
||||
}
|
||||
|
||||
impl OpIdVisSearch {
|
||||
pub(crate) fn new(target: OpId) -> Self {
|
||||
OpIdVisSearch {
|
||||
target,
|
||||
found: false,
|
||||
visible: true,
|
||||
key: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn key(&self) -> &Option<Key> {
|
||||
&self.key
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TreeQuery<'a> for OpIdVisSearch {
|
||||
fn query_node(&mut self, child: &OpTreeNode) -> QueryResult {
|
||||
if child.index.ops.contains(&self.target) {
|
||||
QueryResult::Descend
|
||||
} else {
|
||||
QueryResult::Next
|
||||
}
|
||||
}
|
||||
|
||||
fn query_element(&mut self, element: &Op) -> QueryResult {
|
||||
if element.id == self.target {
|
||||
self.found = true;
|
||||
self.key = Some(element.elemid_or_key());
|
||||
if element.visible() {
|
||||
QueryResult::Next
|
||||
} else {
|
||||
self.visible = false;
|
||||
QueryResult::Finish
|
||||
}
|
||||
} else if self.found {
|
||||
if self.key != Some(element.elemid_or_key()) {
|
||||
QueryResult::Finish
|
||||
} else if element.visible() {
|
||||
self.visible = false;
|
||||
QueryResult::Finish
|
||||
} else {
|
||||
QueryResult::Next
|
||||
}
|
||||
} else {
|
||||
QueryResult::Next
|
||||
}
|
||||
}
|
||||
}
|
|
@ -45,7 +45,7 @@ impl<'a> TreeQuery<'a> for Prop<'a> {
|
|||
{
|
||||
if self.pos + child.len() >= start {
|
||||
// skip empty nodes
|
||||
if child.index.visible_len() == 0 {
|
||||
if child.index.visible_len(false) == 0 {
|
||||
if self.pos + child.len() >= optree_len {
|
||||
self.pos = optree_len;
|
||||
QueryResult::Finish
|
||||
|
|
|
@ -14,6 +14,7 @@ pub(crate) struct SeekOp<'a> {
|
|||
pub(crate) succ: Vec<usize>,
|
||||
/// whether a position has been found
|
||||
found: bool,
|
||||
utf16: bool,
|
||||
/// The found start position of the key if there is one yet (for map objects).
|
||||
start: Option<usize>,
|
||||
}
|
||||
|
@ -25,6 +26,7 @@ impl<'a> SeekOp<'a> {
|
|||
succ: vec![],
|
||||
pos: 0,
|
||||
found: false,
|
||||
utf16: false,
|
||||
start: None,
|
||||
}
|
||||
}
|
||||
|
@ -70,7 +72,7 @@ impl<'a> TreeQuery<'a> for SeekOp<'a> {
|
|||
if let Some(start) = self.start {
|
||||
if self.pos + child.len() >= start {
|
||||
// skip empty nodes
|
||||
if child.index.visible_len() == 0 {
|
||||
if child.index.visible_len(false) == 0 {
|
||||
self.pos += child.len();
|
||||
QueryResult::Next
|
||||
} else {
|
||||
|
|
|
@ -10,7 +10,10 @@ pub(crate) struct SeekOpWithPatch<'a> {
|
|||
pub(crate) pos: usize,
|
||||
pub(crate) succ: Vec<usize>,
|
||||
found: bool,
|
||||
utf16: bool,
|
||||
pub(crate) seen: usize,
|
||||
pub(crate) seen8: usize,
|
||||
pub(crate) last_width: usize,
|
||||
last_seen: Option<Key>,
|
||||
pub(crate) values: Vec<&'a Op>,
|
||||
pub(crate) had_value_before: bool,
|
||||
|
@ -19,13 +22,16 @@ pub(crate) struct SeekOpWithPatch<'a> {
|
|||
}
|
||||
|
||||
impl<'a> SeekOpWithPatch<'a> {
|
||||
pub(crate) fn new(op: &Op) -> Self {
|
||||
pub(crate) fn new(op: &Op, utf16: bool) -> Self {
|
||||
SeekOpWithPatch {
|
||||
op: op.clone(),
|
||||
succ: vec![],
|
||||
pos: 0,
|
||||
found: false,
|
||||
utf16,
|
||||
seen: 0,
|
||||
seen8: 0,
|
||||
last_width: 0,
|
||||
last_seen: None,
|
||||
values: vec![],
|
||||
had_value_before: false,
|
||||
|
@ -57,7 +63,8 @@ impl<'a> SeekOpWithPatch<'a> {
|
|||
self.last_seen = None
|
||||
}
|
||||
if e.visible() && self.last_seen.is_none() {
|
||||
self.seen += 1;
|
||||
self.seen += e.width(self.utf16);
|
||||
self.seen8 += 1;
|
||||
self.last_seen = Some(e.elemid_or_key())
|
||||
}
|
||||
}
|
||||
|
@ -101,7 +108,8 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> {
|
|||
// elements it contains. However, it could happen that a visible element is
|
||||
// split across two tree nodes. To avoid double-counting in this situation, we
|
||||
// subtract one if the last visible element also appears in this tree node.
|
||||
let mut num_vis = child.index.visible_len();
|
||||
let mut num_vis = child.index.visible_len(self.utf16);
|
||||
let mut num_vis8 = child.index.visible_len(false);
|
||||
if num_vis > 0 {
|
||||
// FIXME: I think this is wrong: we should subtract one only if this
|
||||
// subtree contains a *visible* (i.e. empty succs) operation for the list
|
||||
|
@ -110,9 +118,12 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> {
|
|||
if let Some(last_seen) = self.last_seen {
|
||||
if child.index.has_visible(&last_seen) {
|
||||
num_vis -= 1;
|
||||
num_vis8 -= 1;
|
||||
}
|
||||
}
|
||||
self.seen += num_vis;
|
||||
self.seen8 += num_vis8;
|
||||
//self.seen += child.index.visible16;
|
||||
|
||||
// FIXME: this is also wrong: `last_seen` needs to be the elemId of the
|
||||
// last *visible* list element in this subtree, but I think this returns
|
||||
|
@ -130,7 +141,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> {
|
|||
if let Some(start) = self.start {
|
||||
if self.pos + child.len() >= start {
|
||||
// skip empty nodes
|
||||
if child.index.visible_len() == 0 {
|
||||
if child.index.visible_len(self.utf16) == 0 {
|
||||
self.pos += child.len();
|
||||
QueryResult::Next
|
||||
} else {
|
||||
|
@ -173,6 +184,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> {
|
|||
self.values.push(e);
|
||||
}
|
||||
self.succ.push(self.pos);
|
||||
self.last_width = e.width(self.utf16);
|
||||
|
||||
if e.visible() {
|
||||
self.had_value_before = true;
|
||||
|
@ -218,6 +230,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> {
|
|||
self.values.push(e);
|
||||
}
|
||||
self.succ.push(self.pos);
|
||||
self.last_width = e.width(self.utf16);
|
||||
}
|
||||
if e.visible() {
|
||||
self.had_value_before = true;
|
||||
|
@ -235,6 +248,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> {
|
|||
self.values.push(e);
|
||||
}
|
||||
self.succ.push(self.pos);
|
||||
self.last_width = e.width(self.utf16);
|
||||
}
|
||||
|
||||
// If the new op is an insertion, skip over any existing list elements whose elemId is
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::automerge::Actor;
|
|||
use crate::exid::ExId;
|
||||
use crate::query::{self, OpIdSearch};
|
||||
use crate::storage::Change as StoredChange;
|
||||
use crate::types::{Key, ObjId, OpId};
|
||||
use crate::types::{Key, ObjId, OpId, OpIds};
|
||||
use crate::{op_tree::OpSetMetadata, types::Op, Automerge, Change, ChangeHash, OpObserver, Prop};
|
||||
use crate::{AutomergeError, ObjType, OpType, ScalarValue};
|
||||
|
||||
|
@ -16,7 +16,7 @@ pub(crate) struct TransactionInner {
|
|||
time: i64,
|
||||
message: Option<String>,
|
||||
deps: Vec<ChangeHash>,
|
||||
operations: Vec<(ObjId, Prop, Op)>,
|
||||
operations: Vec<(ObjId, Op)>,
|
||||
}
|
||||
|
||||
/// Arguments required to create a new transaction
|
||||
|
@ -117,8 +117,6 @@ impl TransactionInner {
|
|||
use crate::storage::{change::PredOutOfOrder, convert::op_as_actor_id};
|
||||
|
||||
let actor = metadata.actors.get(self.actor).clone();
|
||||
let ops = self.operations.iter().map(|o| (&o.0, &o.2));
|
||||
//let (ops, other_actors) = encode_change_ops(ops, actor.clone(), actors, props);
|
||||
let deps = self.deps.clone();
|
||||
let stored = match StoredChange::builder()
|
||||
.with_actor(actor)
|
||||
|
@ -128,7 +126,8 @@ impl TransactionInner {
|
|||
.with_dependencies(deps)
|
||||
.with_timestamp(self.time)
|
||||
.build(
|
||||
ops.into_iter()
|
||||
self.operations
|
||||
.iter()
|
||||
.map(|(obj, op)| op_as_actor_id(obj, op, metadata)),
|
||||
) {
|
||||
Ok(s) => s,
|
||||
|
@ -152,10 +151,10 @@ impl TransactionInner {
|
|||
pub(crate) fn rollback(self, doc: &mut Automerge) -> usize {
|
||||
let num = self.pending_ops();
|
||||
// remove in reverse order so sets are removed before makes etc...
|
||||
for (obj, _prop, op) in self.operations.into_iter().rev() {
|
||||
for (obj, op) in self.operations.into_iter().rev() {
|
||||
for pred_id in &op.pred {
|
||||
if let Some(p) = doc.ops.search(&obj, OpIdSearch::new(*pred_id)).index() {
|
||||
doc.ops.replace(&obj, p, |o| o.remove_succ(&op));
|
||||
doc.ops.change_vis(&obj, p, |o| o.remove_succ(&op));
|
||||
}
|
||||
}
|
||||
if let Some(pos) = doc.ops.search(&obj, OpIdSearch::new(op.id)).index() {
|
||||
|
@ -196,6 +195,15 @@ impl TransactionInner {
|
|||
let obj = doc.exid_to_obj(ex_obj)?;
|
||||
let value = value.into();
|
||||
let prop = prop.into();
|
||||
let obj_type = doc
|
||||
.ops
|
||||
.object_type(&obj)
|
||||
.ok_or(AutomergeError::NotAnObject)?;
|
||||
match (&prop, obj_type) {
|
||||
(Prop::Map(_), ObjType::Map) => Ok(()),
|
||||
(Prop::Seq(_), ObjType::List) => Ok(()),
|
||||
_ => Err(AutomergeError::InvalidOp(obj_type)),
|
||||
}?;
|
||||
self.local_op(doc, op_observer, obj, prop, value.into())?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -223,6 +231,15 @@ impl TransactionInner {
|
|||
) -> Result<ExId, AutomergeError> {
|
||||
let obj = doc.exid_to_obj(ex_obj)?;
|
||||
let prop = prop.into();
|
||||
let obj_type = doc
|
||||
.ops
|
||||
.object_type(&obj)
|
||||
.ok_or(AutomergeError::NotAnObject)?;
|
||||
match (&prop, obj_type) {
|
||||
(Prop::Map(_), ObjType::Map) => Ok(()),
|
||||
(Prop::Seq(_), ObjType::List) => Ok(()),
|
||||
_ => Err(AutomergeError::InvalidOp(obj_type)),
|
||||
}?;
|
||||
let id = self
|
||||
.local_op(doc, op_observer, obj, prop, value.into())?
|
||||
.unwrap();
|
||||
|
@ -234,6 +251,28 @@ impl TransactionInner {
|
|||
OpId(self.start_op.get() + self.pending_ops() as u64, self.actor)
|
||||
}
|
||||
|
||||
fn next_insert(&mut self, key: Key, value: ScalarValue) -> Op {
|
||||
Op {
|
||||
id: self.next_id(),
|
||||
action: OpType::Put(value),
|
||||
key,
|
||||
succ: Default::default(),
|
||||
pred: Default::default(),
|
||||
insert: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn next_delete(&mut self, key: Key, pred: OpIds) -> Op {
|
||||
Op {
|
||||
id: self.next_id(),
|
||||
action: OpType::Delete,
|
||||
key,
|
||||
succ: Default::default(),
|
||||
pred,
|
||||
insert: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn insert_local_op<Obs: OpObserver>(
|
||||
&mut self,
|
||||
|
@ -245,7 +284,7 @@ impl TransactionInner {
|
|||
obj: ObjId,
|
||||
succ_pos: &[usize],
|
||||
) {
|
||||
doc.ops.add_succ(&obj, succ_pos.iter().copied(), &op);
|
||||
doc.ops.add_succ(&obj, succ_pos, &op);
|
||||
|
||||
if !op.is_delete() {
|
||||
doc.ops.insert(pos, &obj, op.clone());
|
||||
|
@ -263,6 +302,13 @@ impl TransactionInner {
|
|||
value: V,
|
||||
) -> Result<(), AutomergeError> {
|
||||
let obj = doc.exid_to_obj(ex_obj)?;
|
||||
let obj_type = doc
|
||||
.ops
|
||||
.object_type(&obj)
|
||||
.ok_or(AutomergeError::NotAnObject)?;
|
||||
if obj_type != ObjType::List {
|
||||
return Err(AutomergeError::InvalidOp(obj_type));
|
||||
}
|
||||
let value = value.into();
|
||||
tracing::trace!(obj=?obj, value=?value, "inserting value");
|
||||
self.do_insert(doc, op_observer, obj, index, value.into())?;
|
||||
|
@ -278,6 +324,13 @@ impl TransactionInner {
|
|||
value: ObjType,
|
||||
) -> Result<ExId, AutomergeError> {
|
||||
let obj = doc.exid_to_obj(ex_obj)?;
|
||||
let obj_type = doc
|
||||
.ops
|
||||
.object_type(&obj)
|
||||
.ok_or(AutomergeError::NotAnObject)?;
|
||||
if obj_type != ObjType::List {
|
||||
return Err(AutomergeError::InvalidOp(obj_type));
|
||||
}
|
||||
let id = self.do_insert(doc, op_observer, obj, index, value.into())?;
|
||||
let id = doc.id_to_exid(id);
|
||||
Ok(id)
|
||||
|
@ -293,7 +346,7 @@ impl TransactionInner {
|
|||
) -> Result<OpId, AutomergeError> {
|
||||
let id = self.next_id();
|
||||
|
||||
let query = doc.ops.search(&obj, query::InsertNth::new(index));
|
||||
let query = doc.ops.search(&obj, query::InsertNth::new(index, false));
|
||||
|
||||
let key = query.key()?;
|
||||
|
||||
|
@ -384,7 +437,7 @@ impl TransactionInner {
|
|||
index: usize,
|
||||
action: OpType,
|
||||
) -> Result<Option<OpId>, AutomergeError> {
|
||||
let query = doc.ops.search(&obj, query::Nth::new(index));
|
||||
let query = doc.ops.search(&obj, query::Nth::new(index, false));
|
||||
|
||||
let id = self.next_id();
|
||||
let pred = doc.ops.m.sorted_opids(query.ops.iter().map(|o| o.id));
|
||||
|
@ -438,7 +491,24 @@ impl TransactionInner {
|
|||
) -> Result<(), AutomergeError> {
|
||||
let obj = doc.exid_to_obj(ex_obj)?;
|
||||
let prop = prop.into();
|
||||
let obj_type = doc
|
||||
.ops
|
||||
.object_type(&obj)
|
||||
.ok_or(AutomergeError::NotAnObject)?;
|
||||
if obj_type == ObjType::Text {
|
||||
let pos = prop.to_index().ok_or(AutomergeError::InvalidOp(obj_type))?;
|
||||
self.inner_splice(
|
||||
doc,
|
||||
op_observer,
|
||||
obj,
|
||||
pos,
|
||||
1,
|
||||
vec![],
|
||||
SpliceType::Text("", doc.utf16),
|
||||
)?;
|
||||
} else {
|
||||
self.local_op(doc, op_observer, obj, prop, OpType::Delete)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -447,30 +517,170 @@ impl TransactionInner {
|
|||
pub(crate) fn splice<Obs: OpObserver>(
|
||||
&mut self,
|
||||
doc: &mut Automerge,
|
||||
mut op_observer: Option<&mut Obs>,
|
||||
op_observer: Option<&mut Obs>,
|
||||
ex_obj: &ExId,
|
||||
mut pos: usize,
|
||||
pos: usize,
|
||||
del: usize,
|
||||
vals: impl IntoIterator<Item = ScalarValue>,
|
||||
) -> Result<(), AutomergeError> {
|
||||
let obj = doc.exid_to_obj(ex_obj)?;
|
||||
let obj_type = doc
|
||||
.ops
|
||||
.object_type(&obj)
|
||||
.ok_or(AutomergeError::NotAnObject)?;
|
||||
if obj_type != ObjType::List {
|
||||
return Err(AutomergeError::InvalidOp(obj_type));
|
||||
}
|
||||
let vals = vals.into_iter().collect();
|
||||
self.inner_splice(doc, op_observer, obj, pos, del, vals, SpliceType::List)
|
||||
}
|
||||
|
||||
/// Splice string into a text object
|
||||
pub(crate) fn splice_text<Obs: OpObserver>(
|
||||
&mut self,
|
||||
doc: &mut Automerge,
|
||||
op_observer: Option<&mut Obs>,
|
||||
ex_obj: &ExId,
|
||||
pos: usize,
|
||||
del: usize,
|
||||
text: &str,
|
||||
) -> Result<(), AutomergeError> {
|
||||
let obj = doc.exid_to_obj(ex_obj)?;
|
||||
let obj_type = doc
|
||||
.ops
|
||||
.object_type(&obj)
|
||||
.ok_or(AutomergeError::NotAnObject)?;
|
||||
if obj_type != ObjType::Text {
|
||||
return Err(AutomergeError::InvalidOp(obj_type));
|
||||
}
|
||||
let vals = text.chars().map(ScalarValue::from).collect();
|
||||
self.inner_splice(
|
||||
doc,
|
||||
op_observer,
|
||||
obj,
|
||||
pos,
|
||||
del,
|
||||
vals,
|
||||
SpliceType::Text(text, doc.utf16),
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn inner_splice<Obs: OpObserver>(
|
||||
&mut self,
|
||||
doc: &mut Automerge,
|
||||
mut op_observer: Option<&mut Obs>,
|
||||
obj: ObjId,
|
||||
mut index: usize,
|
||||
mut del: usize,
|
||||
vals: Vec<ScalarValue>,
|
||||
splice_type: SpliceType<'_>,
|
||||
) -> Result<(), AutomergeError> {
|
||||
let ex_obj = doc.ops.id_to_exid(obj.0);
|
||||
let utf16 = splice_type.is_utf16();
|
||||
let mut index8 = index;
|
||||
let mut index16 = index;
|
||||
let mut del16 = 0;
|
||||
|
||||
// delete `del` items - performing the query for each one
|
||||
let mut i = 0;
|
||||
while i < del {
|
||||
// TODO: could do this with a single custom query
|
||||
let query = doc.ops.search(&obj, query::Nth::new(index, utf16));
|
||||
|
||||
// adjust index if we happen to be on a half character
|
||||
index = query.index();
|
||||
|
||||
if i == 0 {
|
||||
// set here incase no inserts
|
||||
index8 = query.index_utf8();
|
||||
index16 = query.index_utf16();
|
||||
}
|
||||
|
||||
let step = if let Some(op) = query.ops.last() {
|
||||
op.width(utf16)
|
||||
} else {
|
||||
// truncate the delete if past the end
|
||||
del = i;
|
||||
break;
|
||||
};
|
||||
|
||||
let op = self.next_delete(query.key()?, query.pred(&doc.ops));
|
||||
|
||||
doc.ops.add_succ(&obj, &query.ops_pos, &op);
|
||||
|
||||
self.operations.push((obj, op));
|
||||
|
||||
del16 += step;
|
||||
|
||||
i += step;
|
||||
}
|
||||
|
||||
if del > 0 {
|
||||
if let Some(obs) = op_observer.as_mut() {
|
||||
if utf16 {
|
||||
obs.delete_utf16(doc, ex_obj.clone(), (index8, index16), (del, del16));
|
||||
} else {
|
||||
// TODO: obs.delete() should take a length
|
||||
for _ in 0..del {
|
||||
// This unwrap and rewrap of the option is necessary to appeas the borrow checker :(
|
||||
if let Some(obs) = op_observer.as_mut() {
|
||||
self.local_op(doc, Some(*obs), obj, pos.into(), OpType::Delete)?;
|
||||
} else {
|
||||
self.local_op::<Obs>(doc, None, obj, pos.into(), OpType::Delete)?;
|
||||
obs.delete(doc, ex_obj.clone(), Prop::Seq(index));
|
||||
}
|
||||
}
|
||||
for v in vals {
|
||||
// As above this unwrap and rewrap of the option is necessary to appeas the borrow checker :(
|
||||
if let Some(obs) = op_observer.as_mut() {
|
||||
self.do_insert(doc, Some(*obs), obj, pos, v.clone().into())?;
|
||||
} else {
|
||||
self.do_insert::<Obs>(doc, None, obj, pos, v.clone().into())?;
|
||||
}
|
||||
}
|
||||
|
||||
// do the insert query for the first item and then
|
||||
// insert the remaining ops one after the other
|
||||
if !vals.is_empty() {
|
||||
let query = doc.ops.search(&obj, query::InsertNth::new(index, utf16));
|
||||
let mut pos = query.pos();
|
||||
let mut key = query.key()?;
|
||||
|
||||
// set here incase no deletes
|
||||
index8 = query.index_utf8();
|
||||
index16 = query.index_utf16();
|
||||
let len8 = vals.len();
|
||||
|
||||
for v in &vals {
|
||||
let op = self.next_insert(key, v.clone());
|
||||
|
||||
doc.ops.insert(pos, &obj, op.clone());
|
||||
|
||||
pos += 1;
|
||||
key = op.id.into();
|
||||
|
||||
self.operations.push((obj, op));
|
||||
}
|
||||
|
||||
// handle the observer
|
||||
if let Some(obs) = op_observer.as_mut() {
|
||||
match splice_type {
|
||||
SpliceType::List => {
|
||||
let start = self.operations.len() - len8;
|
||||
for (offset, v) in vals.iter().enumerate() {
|
||||
let op = &self.operations[start + offset].1;
|
||||
let value = (v.clone().into(), doc.ops.id_to_exid(op.id));
|
||||
obs.insert(doc, ex_obj.clone(), index + offset, value)
|
||||
}
|
||||
}
|
||||
SpliceType::Text(text, utf16) => {
|
||||
if utf16 {
|
||||
let len16 = text.encode_utf16().count();
|
||||
obs.splice_text_utf16(
|
||||
doc,
|
||||
ex_obj,
|
||||
(index8, index16),
|
||||
(len8, len16),
|
||||
text,
|
||||
)
|
||||
} else {
|
||||
obs.splice_text(doc, ex_obj, index8, text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -485,28 +695,41 @@ impl TransactionInner {
|
|||
// TODO - id_to_exid should be a noop if not used - change type to Into<ExId>?
|
||||
if let Some(op_observer) = op_observer {
|
||||
let ex_obj = doc.ops.id_to_exid(obj.0);
|
||||
let parents = doc.ops.parents(obj);
|
||||
if op.insert {
|
||||
let obj_type = doc.ops.object_type(&obj);
|
||||
match (obj_type, prop) {
|
||||
(Some(ObjType::List), Prop::Seq(index)) => {
|
||||
let value = (op.value(), doc.ops.id_to_exid(op.id));
|
||||
match prop {
|
||||
Prop::Map(_) => panic!("insert into a map"),
|
||||
Prop::Seq(index) => op_observer.insert(parents, ex_obj, index, value),
|
||||
op_observer.insert(doc, ex_obj, index, value)
|
||||
}
|
||||
(Some(ObjType::Text), Prop::Seq(index)) => {
|
||||
// FIXME
|
||||
op_observer.splice_text(doc, ex_obj, index, op.to_str())
|
||||
}
|
||||
// this should be a warning - not a panic
|
||||
_ => panic!("insert into a map"),
|
||||
}
|
||||
} else if op.is_delete() {
|
||||
op_observer.delete(parents, ex_obj, prop.clone());
|
||||
op_observer.delete(doc, ex_obj, prop);
|
||||
} else if let Some(value) = op.get_increment_value() {
|
||||
op_observer.increment(
|
||||
parents,
|
||||
ex_obj,
|
||||
prop.clone(),
|
||||
(value, doc.ops.id_to_exid(op.id)),
|
||||
);
|
||||
op_observer.increment(doc, ex_obj, prop, (value, doc.ops.id_to_exid(op.id)));
|
||||
} else {
|
||||
let value = (op.value(), doc.ops.id_to_exid(op.id));
|
||||
op_observer.put(parents, ex_obj, prop.clone(), value, false);
|
||||
op_observer.put(doc, ex_obj, prop, value, false);
|
||||
}
|
||||
}
|
||||
self.operations.push((obj, prop, op));
|
||||
self.operations.push((obj, op));
|
||||
}
|
||||
}
|
||||
|
||||
enum SpliceType<'a> {
|
||||
List,
|
||||
Text(&'a str, bool /* utf16 */),
|
||||
}
|
||||
|
||||
impl<'a> SpliceType<'a> {
|
||||
fn is_utf16(&self) -> bool {
|
||||
matches!(self, SpliceType::Text(_, true))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -191,6 +191,16 @@ impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> {
|
|||
self.do_tx(|tx, doc, obs| tx.splice(doc, obs, obj.as_ref(), pos, del, vals))
|
||||
}
|
||||
|
||||
fn splice_text<O: AsRef<ExId>>(
|
||||
&mut self,
|
||||
obj: O,
|
||||
pos: usize,
|
||||
del: usize,
|
||||
text: &str,
|
||||
) -> Result<(), AutomergeError> {
|
||||
self.do_tx(|tx, doc, obs| tx.splice_text(doc, obs, obj.as_ref(), pos, del, text))
|
||||
}
|
||||
|
||||
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_, '_> {
|
||||
self.doc.keys(obj)
|
||||
}
|
||||
|
|
|
@ -91,10 +91,7 @@ pub trait Transactable {
|
|||
pos: usize,
|
||||
del: usize,
|
||||
text: &str,
|
||||
) -> Result<(), AutomergeError> {
|
||||
let vals = text.chars().map(|c| c.into());
|
||||
self.splice(obj, pos, del, vals)
|
||||
}
|
||||
) -> Result<(), AutomergeError>;
|
||||
|
||||
/// Get the keys of the given object, it should be a map.
|
||||
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_, '_>;
|
||||
|
@ -193,9 +190,7 @@ pub trait Transactable {
|
|||
fn parents<O: AsRef<ExId>>(&self, obj: O) -> Result<Parents<'_>, AutomergeError>;
|
||||
|
||||
fn path_to_object<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<(ExId, Prop)>, AutomergeError> {
|
||||
let mut path = self.parents(obj.as_ref().clone())?.collect::<Vec<_>>();
|
||||
path.reverse();
|
||||
Ok(path)
|
||||
Ok(self.parents(obj.as_ref().clone())?.path())
|
||||
}
|
||||
|
||||
/// The heads this transaction will be based on
|
||||
|
|
|
@ -393,6 +393,15 @@ pub enum Prop {
|
|||
Seq(usize),
|
||||
}
|
||||
|
||||
impl Prop {
|
||||
pub(crate) fn to_index(&self) -> Option<usize> {
|
||||
match self {
|
||||
Prop::Map(_) => None,
|
||||
Prop::Seq(n) => Some(*n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Prop {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
|
@ -491,6 +500,26 @@ impl Op {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn width(&self, utf16: bool) -> usize {
|
||||
if utf16 {
|
||||
if let OpType::Put(ScalarValue::Str(s)) = &self.action {
|
||||
s.encode_utf16().count()
|
||||
} else {
|
||||
1 // "\u{fffc}".to_owned().encode_utf16().count()
|
||||
}
|
||||
} else {
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn to_str(&self) -> &str {
|
||||
if let OpType::Put(ScalarValue::Str(s)) = &self.action {
|
||||
s
|
||||
} else {
|
||||
"\u{fffc}"
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn visible(&self) -> bool {
|
||||
if self.is_inc() {
|
||||
false
|
||||
|
|
|
@ -1123,8 +1123,7 @@ fn test_merging_test_conflicts_then_saving_and_loading() {
|
|||
|
||||
let mut doc1 = new_doc_with_actor(actor1);
|
||||
let text = doc1.put_object(ROOT, "text", ObjType::Text).unwrap();
|
||||
doc1.splice(&text, 0, 0, "hello".chars().map(|c| c.to_string().into()))
|
||||
.unwrap();
|
||||
doc1.splice_text(&text, 0, 0, "hello").unwrap();
|
||||
|
||||
let mut doc2 = AutoCommit::load(&doc1.save()).unwrap();
|
||||
doc2.set_actor(actor2);
|
||||
|
@ -1133,11 +1132,10 @@ fn test_merging_test_conflicts_then_saving_and_loading() {
|
|||
"text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"o"}]},
|
||||
}};
|
||||
|
||||
doc2.splice(&text, 4, 1, Vec::new()).unwrap();
|
||||
doc2.splice(&text, 4, 0, vec!["!".into()]).unwrap();
|
||||
doc2.splice(&text, 5, 0, vec![" ".into()]).unwrap();
|
||||
doc2.splice(&text, 6, 0, "world".chars().map(|c| c.into()))
|
||||
.unwrap();
|
||||
doc2.splice_text(&text, 4, 1, "").unwrap();
|
||||
doc2.splice_text(&text, 4, 0, "!").unwrap();
|
||||
doc2.splice_text(&text, 5, 0, " ").unwrap();
|
||||
doc2.splice_text(&text, 6, 0, "world").unwrap();
|
||||
|
||||
assert_doc!(
|
||||
doc2.document(),
|
||||
|
@ -1373,3 +1371,29 @@ fn simple_bad_saveload() {
|
|||
let bytes = doc.save();
|
||||
Automerge::load(&bytes).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ops_on_wrong_objets() -> Result<(), AutomergeError> {
|
||||
let mut doc = AutoCommit::new();
|
||||
let list = doc.put_object(&automerge::ROOT, "list", ObjType::List)?;
|
||||
doc.insert(&list, 0, "a")?;
|
||||
doc.insert(&list, 1, "b")?;
|
||||
let e1 = doc.put(&list, "a", "AAA");
|
||||
assert_eq!(e1, Err(AutomergeError::InvalidOp(ObjType::List)));
|
||||
let e2 = doc.splice_text(&list, 0, 0, "hello world");
|
||||
assert_eq!(e2, Err(AutomergeError::InvalidOp(ObjType::List)));
|
||||
let map = doc.put_object(&automerge::ROOT, "map", ObjType::Map)?;
|
||||
doc.put(&map, "a", "AAA")?;
|
||||
doc.put(&map, "b", "BBB")?;
|
||||
let e3 = doc.insert(&map, 0, "b");
|
||||
assert_eq!(e3, Err(AutomergeError::InvalidOp(ObjType::Map)));
|
||||
let e4 = doc.splice_text(&map, 0, 0, "hello world");
|
||||
assert_eq!(e4, Err(AutomergeError::InvalidOp(ObjType::Map)));
|
||||
let text = doc.put_object(&automerge::ROOT, "text", ObjType::Text)?;
|
||||
doc.splice_text(&text, 0, 0, "hello world")?;
|
||||
let e5 = doc.put(&text, "a", "AAA");
|
||||
assert_eq!(e5, Err(AutomergeError::InvalidOp(ObjType::Text)));
|
||||
let e6 = doc.insert(&text, 0, "b");
|
||||
assert_eq!(e6, Err(AutomergeError::InvalidOp(ObjType::Text)));
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
// Apply the paper editing trace to an Automerge.Text object, one char at a time
|
||||
const { edits, finalText } = require('./editing-trace')
|
||||
const Automerge = require('../automerge-js')
|
||||
const wasm_api = require('../automerge-wasm')
|
||||
|
||||
Automerge.use(wasm_api)
|
||||
const Automerge = require('../../javascript')
|
||||
|
||||
const start = new Date()
|
||||
let state = Automerge.from({text: new Automerge.Text()})
|
||||
let state = Automerge.from({text: ""})
|
||||
|
||||
state = Automerge.change(state, doc => {
|
||||
for (let i = 0; i < edits.length; i++) {
|
||||
|
@ -14,14 +11,13 @@ state = Automerge.change(state, doc => {
|
|||
console.log(`Processed ${i} edits in ${new Date() - start} ms`)
|
||||
}
|
||||
let edit = edits[i]
|
||||
if (edit[1] > 0) doc.text.deleteAt(edit[0], edit[1])
|
||||
if (edit.length > 2) doc.text.insertAt(edit[0], ...edit.slice(2))
|
||||
Automerge.splice(doc, 'text', ... edit)
|
||||
}
|
||||
})
|
||||
|
||||
let _ = Automerge.save(state)
|
||||
console.log(`Done in ${new Date() - start} ms`)
|
||||
|
||||
if (state.text.join('') !== finalText) {
|
||||
if (state.text !== finalText) {
|
||||
throw new RangeError('ERROR: final text did not match expectation')
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue