Compare commits

...

18 commits
main ... patch2

Author SHA1 Message Date
Orion Henry
0db2943b23 Merge remote-tracking branch 'origin/main' into patch2 2022-06-07 15:11:41 +02:00
Orion Henry
68ed77a5c6 merge OpObserver into AutoCommit 2022-06-07 14:46:19 +02:00
Orion Henry
76a172a838 replaced path with parents 2022-06-04 21:16:31 -04:00
Orion Henry
9460d5948e patch2 wip 2022-05-27 08:52:36 -07:00
Orion Henry
df8cae8a2b README 2022-05-23 19:25:23 +02:00
Orion Henry
3a44ccd52d clean up lint, simplify package, hand write an index.d.ts 2022-05-23 19:04:31 +02:00
Orion Henry
07f5678a2b linting in wasm 2022-05-22 13:54:59 -04:00
Orion Henry
d638a41a6c record type 2022-05-22 13:53:11 -04:00
Orion Henry
bd35361354 fixed typescript errors, pull wasm dep (mostly) out 2022-05-22 13:53:11 -04:00
Scott Trinh
d2fba6bf04 Use an UnknownObject type alias 2022-05-22 13:53:11 -04:00
Orion Henry
fd02585d2a removed a bunch of lint errors 2022-05-22 13:53:11 -04:00
Orion Henry
515a2eb94b removing some ts errors 2022-05-22 13:53:11 -04:00
Orion Henry
5e1bdb79ed eslint --fix 2022-05-22 13:53:11 -04:00
Orion Henry
1cf8f80ba4 pull wasm out of deps 2022-05-22 13:53:11 -04:00
Orion Henry
226bbeb023 tslint to eslint 2022-05-22 13:53:11 -04:00
Orion Henry
1eec70f116 example webpack for js 2022-05-22 13:53:11 -04:00
Orion Henry
4f898b67b3 able to build npm package 2022-05-22 13:53:11 -04:00
Orion Henry
551f6e1343 convert automerge-js to typescript 2022-05-22 13:53:11 -04:00
69 changed files with 1950 additions and 1673 deletions

View file

@ -137,7 +137,7 @@ pub unsafe extern "C" fn AMcommit(
if let Some(time) = time.as_ref() {
options.set_time(*time);
}
to_result(doc.commit_with::<()>(options))
to_result(doc.commit_with(options))
}
/// \memberof AMdoc

View file

@ -0,0 +1,2 @@
dist
examples

View file

@ -0,0 +1,11 @@
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: [
'@typescript-eslint',
],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
],
};

View file

@ -1,2 +1,4 @@
/node_modules
/yarn.lock
dist
index.d.ts

10
automerge-js/LICENSE Normal file
View file

@ -0,0 +1,10 @@
MIT License
Copyright 2022, Ink & Switch LLC
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

27
automerge-js/README.md Normal file
View file

@ -0,0 +1,27 @@
## Automerge JS
This is a reimplementation of Automerge as a JavaScript wrapper around the "automerge-wasm".
This package is in alpha and feedback in welcome.
The primary differences between using this package and "automerge" are as follows:
1. The low level api needs to plugged in via the use function. The only current implementation of "automerge-wasm" but another could used in theory.
```js
import * as Automerge from "automerge-js"
import * as wasm_api from "automerge-wasm"
// browsers require an async wasm load - see automerge-wasm docs
Automerge.use(wasm_api)
```
2. There is no front-end back-end split, and no patch format or patch observer. These concepts don't make sense with the wasm implementation.
3. The basic `Doc<T>` object is now a Proxy object and will behave differently in a repl environment.
4. The 'Text' class is currently very slow and needs to be re-worked.
Beyond this please refer to the Automerge [README](http://github.com/automerge/automerge/) for further information.

View file

@ -0,0 +1,8 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"target": "es6",
"module": "es6",
"outDir": "../dist/mjs"
}
}

View file

@ -0,0 +1,5 @@
yarn.lock
node_modules
public/*.wasm
public/main.js
dist

View file

@ -0,0 +1,21 @@
{
"name": "webpack-automerge-example",
"version": "0.1.0",
"description": "",
"private": true,
"scripts": {
"build": "webpack",
"start": "serve public",
"test": "node dist/node.js"
},
"author": "",
"dependencies": {
"automerge-js": "file:automerge-js-0.1.0.tgz"
},
"devDependencies": {
"serve": "^13.0.2",
"webpack": "^5.72.1",
"webpack-cli": "^4.9.2",
"webpack-node-externals": "^3.0.0"
}
}

View file

@ -0,0 +1,10 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Simple Webpack for automerge-wasm</title>
</head>
<body>
<script src="main.js"></script>
</body>
</html>

View file

@ -0,0 +1,22 @@
import * as Automerge from "automerge-js"
import init from "automerge-wasm"
// hello world code that will run correctly on web or node
init().then((api) => {
Automerge.use(api)
let doc = Automerge.init()
doc = Automerge.change(doc, (d) => d.hello = "from automerge-js")
const result = JSON.stringify(doc)
if (typeof document !== 'undefined') {
// browser
const element = document.createElement('div');
element.innerHTML = JSON.stringify(result)
document.body.appendChild(element);
} else {
// server
console.log("node:", result)
}
})

View file

@ -0,0 +1,35 @@
const path = require('path');
const nodeExternals = require('webpack-node-externals');
// the most basic webpack config for node or web targets for automerge-wasm
const serverConfig = {
// basic setup for bundling a node package
target: 'node',
externals: [nodeExternals()],
externalsPresets: { node: true },
entry: './src/index.js',
output: {
filename: 'node.js',
path: path.resolve(__dirname, 'dist'),
},
mode: "development", // or production
};
const clientConfig = {
target: 'web',
entry: './src/index.js',
output: {
filename: 'main.js',
path: path.resolve(__dirname, 'public'),
},
mode: "development", // or production
performance: { // we dont want the wasm blob to generate warnings
hints: false,
maxEntrypointSize: 512000,
maxAssetSize: 512000
}
};
module.exports = [serverConfig, clientConfig];

View file

@ -1,13 +1,46 @@
{
"name": "automerge-js",
"collaborators": [
"Orion Henry <orion@inkandswitch.com>",
"Martin Kleppmann"
],
"version": "0.1.0",
"main": "src/index.js",
"description": "Reimplementation of `automerge` on top of the automerge-wasm backend",
"homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js",
"repository": "github:automerge/automerge-rs",
"files": [
"README.md",
"LICENSE",
"package.json",
"index.d.ts",
"dist/constants.js",
"dist/types.js",
"dist/numbers.js",
"dist/index.js",
"dist/uuid.js",
"dist/counter.js",
"dist/low_level.js",
"dist/text.js",
"dist/proxies.js"
],
"types": "index.d.ts",
"main": "./dist/index.js",
"license": "MIT",
"scripts": {
"test": "mocha --bail --full-trace"
"lint": "eslint src",
"build": "tsc",
"test": "ts-mocha test/*.ts"
},
"devDependencies": {
"mocha": "^9.1.1"
"@types/expect": "^24.3.0",
"@types/mocha": "^9.1.1",
"@types/uuid": "^8.3.4",
"@typescript-eslint/eslint-plugin": "^5.25.0",
"@typescript-eslint/parser": "^5.25.0",
"eslint": "^8.15.0",
"mocha": "^10.0.0",
"ts-mocha": "^10.0.0",
"typescript": "^4.6.4"
},
"dependencies": {
"automerge-wasm": "file:../automerge-wasm",

View file

@ -1,18 +0,0 @@
// Properties of the document root object
//const OPTIONS = Symbol('_options') // object containing options passed to init()
//const CACHE = Symbol('_cache') // map from objectId to immutable object
const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers)
const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers)
const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers)
const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers)
const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers)
// Properties of all Automerge objects
//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string)
//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts
//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback
//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element
module.exports = {
STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN
}

View file

@ -0,0 +1,15 @@
// Properties of the document root object
//const OPTIONS = Symbol('_options') // object containing options passed to init()
//const CACHE = Symbol('_cache') // map from objectId to immutable object
export const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers)
export const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers)
export const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers)
export const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers)
export const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers)
// Properties of all Automerge objects
//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string)
//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts
//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback
//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element

View file

@ -1,12 +1,14 @@
import { Automerge, ObjID, Prop } from "./types"
/**
* The most basic CRDT: an integer value that can be changed only by
* incrementing and decrementing. Since addition of integers is commutative,
* the value trivially converges.
*/
class Counter {
constructor(value) {
export class Counter {
value : number;
constructor(value?: number) {
this.value = value || 0
Object.freeze(this)
}
/**
@ -17,7 +19,7 @@ class Counter {
* concatenating it with another string, as in `x + ''`.
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf
*/
valueOf() {
valueOf() : number {
return this.value
}
@ -26,7 +28,7 @@ class Counter {
* this method is called e.g. when you do `['value: ', x].join('')` or when
* you use string interpolation: `value: ${x}`.
*/
toString() {
toString() : string {
return this.valueOf().toString()
}
@ -34,7 +36,7 @@ class Counter {
* Returns the counter value, so that a JSON serialization of an Automerge
* document represents the counter simply as an integer.
*/
toJSON() {
toJSON() : number {
return this.value
}
}
@ -44,11 +46,24 @@ class Counter {
* callback.
*/
class WriteableCounter extends Counter {
context: Automerge
path: string[]
objectId: ObjID
key: Prop
constructor(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) {
super(value)
this.context = context
this.path = path
this.objectId = objectId
this.key = key
}
/**
* Increases the value of the counter by `delta`. If `delta` is not given,
* increases the value of the counter by 1.
*/
increment(delta) {
increment(delta: number) : number {
delta = typeof delta === 'number' ? delta : 1
this.context.increment(this.objectId, this.key, delta)
this.value += delta
@ -59,7 +74,7 @@ class WriteableCounter extends Counter {
* Decreases the value of the counter by `delta`. If `delta` is not given,
* decreases the value of the counter by 1.
*/
decrement(delta) {
decrement(delta: number) : number {
return this.increment(typeof delta === 'number' ? -delta : -1)
}
}
@ -71,14 +86,8 @@ class WriteableCounter extends Counter {
* the property name (key in map, or index in list) where the counter is
* located.
*/
function getWriteableCounter(value, context, path, objectId, key) {
const instance = Object.create(WriteableCounter.prototype)
instance.value = value
instance.context = context
instance.path = path
instance.objectId = objectId
instance.key = key
return instance
export function getWriteableCounter(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) {
return new WriteableCounter(value, context, path, objectId, key)
}
module.exports = { Counter, getWriteableCounter }
//module.exports = { Counter, getWriteableCounter }

View file

@ -1,372 +0,0 @@
const AutomergeWASM = require("automerge-wasm")
const uuid = require('./uuid')
let { rootProxy, listProxy, textProxy, mapProxy } = require("./proxies")
let { Counter } = require("./counter")
let { Text } = require("./text")
let { Int, Uint, Float64 } = require("./numbers")
let { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } = require("./constants")
function init(actor) {
if (typeof actor != 'string') {
actor = null
}
const state = AutomergeWASM.create(actor)
return rootProxy(state, true);
}
function clone(doc) {
const state = doc[STATE].clone()
return rootProxy(state, true);
}
function free(doc) {
return doc[STATE].free()
}
function from(data, actor) {
let doc1 = init(actor)
let doc2 = change(doc1, (d) => Object.assign(d, data))
return doc2
}
function change(doc, options, callback) {
if (callback === undefined) {
// FIXME implement options
callback = options
options = {}
}
if (typeof options === "string") {
options = { message: options }
}
if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
throw new RangeError("must be the document root");
}
if (doc[FROZEN] === true) {
throw new RangeError("Attempting to use an outdated Automerge document")
}
if (!!doc[HEADS] === true) {
throw new RangeError("Attempting to change an out of date document");
}
if (doc[READ_ONLY] === false) {
throw new RangeError("Calls to Automerge.change cannot be nested")
}
const state = doc[STATE]
const heads = state.getHeads()
try {
doc[HEADS] = heads
doc[FROZEN] = true
let root = rootProxy(state);
callback(root)
if (state.pendingOps() === 0) {
doc[FROZEN] = false
doc[HEADS] = undefined
return doc
} else {
state.commit(options.message, options.time)
return rootProxy(state, true);
}
} catch (e) {
//console.log("ERROR: ",e)
doc[FROZEN] = false
doc[HEADS] = undefined
state.rollback()
throw e
}
}
function emptyChange(doc, options) {
if (options === undefined) {
options = {}
}
if (typeof options === "string") {
options = { message: options }
}
if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
throw new RangeError("must be the document root");
}
if (doc[FROZEN] === true) {
throw new RangeError("Attempting to use an outdated Automerge document")
}
if (doc[READ_ONLY] === false) {
throw new RangeError("Calls to Automerge.change cannot be nested")
}
const state = doc[STATE]
state.commit(options.message, options.time)
return rootProxy(state, true);
}
function load(data, actor) {
const state = AutomergeWASM.load(data, actor)
return rootProxy(state, true);
}
function save(doc) {
const state = doc[STATE]
return state.save()
}
function merge(local, remote) {
if (local[HEADS] === true) {
throw new RangeError("Attempting to change an out of date document");
}
const localState = local[STATE]
const heads = localState.getHeads()
const remoteState = remote[STATE]
const changes = localState.getChangesAdded(remoteState)
localState.applyChanges(changes)
local[HEADS] = heads
return rootProxy(localState, true)
}
function getActorId(doc) {
const state = doc[STATE]
return state.getActorId()
}
function conflictAt(context, objectId, prop) {
let values = context.getAll(objectId, prop)
if (values.length <= 1) {
return
}
let result = {}
for (const conflict of values) {
const datatype = conflict[0]
const value = conflict[1]
switch (datatype) {
case "map":
result[value] = mapProxy(context, value, [ prop ], true)
break;
case "list":
result[value] = listProxy(context, value, [ prop ], true)
break;
case "text":
result[value] = textProxy(context, value, [ prop ], true)
break;
//case "table":
//case "cursor":
case "str":
case "uint":
case "int":
case "f64":
case "boolean":
case "bytes":
case "null":
result[conflict[2]] = value
break;
case "counter":
result[conflict[2]] = new Counter(value)
break;
case "timestamp":
result[conflict[2]] = new Date(value)
break;
default:
throw RangeError(`datatype ${datatype} unimplemented`)
}
}
return result
}
function getConflicts(doc, prop) {
const state = doc[STATE]
const objectId = doc[OBJECT_ID]
return conflictAt(state, objectId, prop)
}
function getLastLocalChange(doc) {
const state = doc[STATE]
try {
return state.getLastLocalChange()
} catch (e) {
return
}
}
function getObjectId(doc) {
return doc[OBJECT_ID]
}
function getChanges(oldState, newState) {
const o = oldState[STATE]
const n = newState[STATE]
const heads = oldState[HEADS]
return n.getChanges(heads || o.getHeads())
}
function getAllChanges(doc) {
const state = doc[STATE]
return state.getChanges([])
}
function applyChanges(doc, changes) {
if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
throw new RangeError("must be the document root");
}
if (doc[FROZEN] === true) {
throw new RangeError("Attempting to use an outdated Automerge document")
}
if (doc[READ_ONLY] === false) {
throw new RangeError("Calls to Automerge.change cannot be nested")
}
const state = doc[STATE]
const heads = state.getHeads()
state.applyChanges(changes)
doc[HEADS] = heads
return [rootProxy(state, true)];
}
function getHistory(doc) {
const actor = getActorId(doc)
const history = getAllChanges(doc)
return history.map((change, index) => ({
get change () {
return decodeChange(change)
},
get snapshot () {
const [state] = applyChanges(init(), history.slice(0, index + 1))
return state
}
})
)
}
function equals() {
if (!isObject(val1) || !isObject(val2)) return val1 === val2
const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort()
if (keys1.length !== keys2.length) return false
for (let i = 0; i < keys1.length; i++) {
if (keys1[i] !== keys2[i]) return false
if (!equals(val1[keys1[i]], val2[keys2[i]])) return false
}
return true
}
function encodeSyncMessage(msg) {
return AutomergeWASM.encodeSyncMessage(msg)
}
function decodeSyncMessage(msg) {
return AutomergeWASM.decodeSyncMessage(msg)
}
function encodeSyncState(state) {
return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state))
}
function decodeSyncState(state) {
return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state))
}
function generateSyncMessage(doc, inState) {
const state = doc[STATE]
const syncState = AutomergeWASM.importSyncState(inState)
const message = state.generateSyncMessage(syncState)
const outState = AutomergeWASM.exportSyncState(syncState)
return [ outState, message ]
}
function receiveSyncMessage(doc, inState, message) {
const syncState = AutomergeWASM.importSyncState(inState)
if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
throw new RangeError("must be the document root");
}
if (doc[FROZEN] === true) {
throw new RangeError("Attempting to use an outdated Automerge document")
}
if (!!doc[HEADS] === true) {
throw new RangeError("Attempting to change an out of date document");
}
if (doc[READ_ONLY] === false) {
throw new RangeError("Calls to Automerge.change cannot be nested")
}
const state = doc[STATE]
const heads = state.getHeads()
state.receiveSyncMessage(syncState, message)
const outState = AutomergeWASM.exportSyncState(syncState)
doc[HEADS] = heads
return [rootProxy(state, true), outState, null];
}
function initSyncState() {
return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState(change))
}
function encodeChange(change) {
return AutomergeWASM.encodeChange(change)
}
function decodeChange(data) {
return AutomergeWASM.decodeChange(data)
}
function encodeSyncMessage(change) {
return AutomergeWASM.encodeSyncMessage(change)
}
function decodeSyncMessage(data) {
return AutomergeWASM.decodeSyncMessage(data)
}
function getMissingDeps(doc, heads) {
const state = doc[STATE]
return state.getMissingDeps(heads)
}
function getHeads(doc) {
const state = doc[STATE]
return doc[HEADS] || state.getHeads()
}
function dump(doc) {
const state = doc[STATE]
state.dump()
}
function toJS(doc) {
if (typeof doc === "object") {
if (doc instanceof Uint8Array) {
return doc
}
if (doc === null) {
return doc
}
if (doc instanceof Array) {
return doc.map((a) => toJS(a))
}
if (doc instanceof Text) {
return doc.map((a) => toJS(a))
}
let tmp = {}
for (index in doc) {
tmp[index] = toJS(doc[index])
}
return tmp
} else {
return doc
}
}
module.exports = {
init, from, change, emptyChange, clone, free,
load, save, merge, getChanges, getAllChanges, applyChanges,
getLastLocalChange, getObjectId, getActorId, getConflicts,
encodeChange, decodeChange, equals, getHistory, getHeads, uuid,
generateSyncMessage, receiveSyncMessage, initSyncState,
decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState,
getMissingDeps,
dump, Text, Counter, Int, Uint, Float64, toJS,
}
// depricated
// Frontend, setDefaultBackend, Backend
// more...
/*
for (let name of ['getObjectId', 'getObjectById',
'setActorId',
'Text', 'Table', 'Counter', 'Observable' ]) {
module.exports[name] = Frontend[name]
}
*/

382
automerge-js/src/index.ts Normal file
View file

@ -0,0 +1,382 @@
export { uuid } from './uuid'
import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies"
import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants"
import { Counter } from "./types"
export { Text, Counter, Int, Uint, Float64 } from "./types"
import { ApiHandler, LowLevelApi, UseApi } from "./low_level"
import { ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "./types"
import { SyncState, SyncMessage, DecodedSyncMessage, AutomergeValue } from "./types"
export type ChangeOptions = { message?: string, time?: number }
export type Doc<T> = { readonly [P in keyof T]: Doc<T[P]> }
export type ChangeFn<T> = (doc: T) => void
export interface State<T> {
change: DecodedChange
snapshot: T
}
export function use(api: LowLevelApi) {
UseApi(api)
}
function _state<T>(doc: Doc<T>) : Automerge {
const state = Reflect.get(doc,STATE)
if (state == undefined) {
throw new RangeError("must be the document root")
}
return state
}
function _frozen<T>(doc: Doc<T>) : boolean {
return Reflect.get(doc,FROZEN) === true
}
function _heads<T>(doc: Doc<T>) : Heads | undefined {
return Reflect.get(doc,HEADS)
}
function _obj<T>(doc: Doc<T>) : ObjID {
return Reflect.get(doc,OBJECT_ID)
}
function _readonly<T>(doc: Doc<T>) : boolean {
return Reflect.get(doc,READ_ONLY) === true
}
export function init<T>(actor?: ActorId) : Doc<T>{
if (typeof actor !== "string") {
actor = undefined
}
const state = ApiHandler.create(actor)
return rootProxy(state, true);
}
export function clone<T>(doc: Doc<T>) : Doc<T> {
const state = _state(doc).clone()
return rootProxy(state, true);
}
export function free<T>(doc: Doc<T>) {
return _state(doc).free()
}
export function from<T>(initialState: T | Doc<T>, actor?: ActorId): Doc<T> {
return change(init(actor), (d) => Object.assign(d, initialState))
}
export function change<T>(doc: Doc<T>, options: string | ChangeOptions | ChangeFn<T>, callback?: ChangeFn<T>): Doc<T> {
if (typeof options === 'function') {
return _change(doc, {}, options)
} else if (typeof callback === 'function') {
if (typeof options === "string") {
options = { message: options }
}
return _change(doc, options, callback)
} else {
throw RangeError("Invalid args for change")
}
}
function _change<T>(doc: Doc<T>, options: ChangeOptions, callback: ChangeFn<T>): Doc<T> {
if (typeof callback !== "function") {
throw new RangeError("invalid change function");
}
if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") {
throw new RangeError("must be the document root");
}
if (_frozen(doc) === true) {
throw new RangeError("Attempting to use an outdated Automerge document")
}
if (!!_heads(doc) === true) {
throw new RangeError("Attempting to change an out of date document");
}
if (_readonly(doc) === false) {
throw new RangeError("Calls to Automerge.change cannot be nested")
}
const state = _state(doc)
const heads = state.getHeads()
try {
Reflect.set(doc,HEADS,heads)
Reflect.set(doc,FROZEN,true)
const root : T = rootProxy(state);
callback(root)
if (state.pendingOps() === 0) {
Reflect.set(doc,FROZEN,false)
Reflect.set(doc,HEADS,undefined)
return doc
} else {
state.commit(options.message, options.time)
return rootProxy(state, true);
}
} catch (e) {
//console.log("ERROR: ",e)
Reflect.set(doc,FROZEN,false)
Reflect.set(doc,HEADS,undefined)
state.rollback()
throw e
}
}
export function emptyChange<T>(doc: Doc<T>, options: ChangeOptions) {
if (options === undefined) {
options = {}
}
if (typeof options === "string") {
options = { message: options }
}
if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") {
throw new RangeError("must be the document root");
}
if (_frozen(doc) === true) {
throw new RangeError("Attempting to use an outdated Automerge document")
}
if (_readonly(doc) === false) {
throw new RangeError("Calls to Automerge.change cannot be nested")
}
const state = _state(doc)
state.commit(options.message, options.time)
return rootProxy(state, true);
}
export function load<T>(data: Uint8Array, actor: ActorId) : Doc<T> {
const state = ApiHandler.load(data, actor)
return rootProxy(state, true);
}
export function save<T>(doc: Doc<T>) : Uint8Array {
const state = _state(doc)
return state.save()
}
export function merge<T>(local: Doc<T>, remote: Doc<T>) : Doc<T> {
if (!!_heads(local) === true) {
throw new RangeError("Attempting to change an out of date document");
}
const localState = _state(local)
const heads = localState.getHeads()
const remoteState = _state(remote)
const changes = localState.getChangesAdded(remoteState)
localState.applyChanges(changes)
Reflect.set(local,HEADS,heads)
return rootProxy(localState, true)
}
export function getActorId<T>(doc: Doc<T>) : ActorId {
const state = _state(doc)
return state.getActorId()
}
type Conflicts = { [key: string]: AutomergeValue }
function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : Conflicts | undefined {
const values = context.getAll(objectId, prop)
if (values.length <= 1) {
return
}
const result : Conflicts = {}
for (const fullVal of values) {
switch (fullVal[0]) {
case "map":
result[fullVal[1]] = mapProxy(context, fullVal[1], [ prop ], true)
break;
case "list":
result[fullVal[1]] = listProxy(context, fullVal[1], [ prop ], true)
break;
case "text":
result[fullVal[1]] = textProxy(context, fullVal[1], [ prop ], true)
break;
//case "table":
//case "cursor":
case "str":
case "uint":
case "int":
case "f64":
case "boolean":
case "bytes":
case "null":
result[fullVal[2]] = fullVal[1]
break;
case "counter":
result[fullVal[2]] = new Counter(fullVal[1])
break;
case "timestamp":
result[fullVal[2]] = new Date(fullVal[1])
break;
default:
throw RangeError(`datatype ${fullVal[0]} unimplemented`)
}
}
return result
}
export function getConflicts<T>(doc: Doc<T>, prop: Prop) : Conflicts | undefined {
const state = _state(doc)
const objectId = _obj(doc)
return conflictAt(state, objectId, prop)
}
export function getLastLocalChange<T>(doc: Doc<T>) : Change | undefined {
const state = _state(doc)
try {
return state.getLastLocalChange()
} catch (e) {
return
}
}
export function getObjectId<T>(doc: Doc<T>) : ObjID {
return _obj(doc)
}
export function getChanges<T>(oldState: Doc<T>, newState: Doc<T>) : Change[] {
const o = _state(oldState)
const n = _state(newState)
const heads = _heads(oldState)
return n.getChanges(heads || o.getHeads())
}
export function getAllChanges<T>(doc: Doc<T>) : Change[] {
const state = _state(doc)
return state.getChanges([])
}
export function applyChanges<T>(doc: Doc<T>, changes: Change[]) : [Doc<T>] {
if (doc === undefined || _obj(doc) !== "_root") {
throw new RangeError("must be the document root");
}
if (_frozen(doc) === true) {
throw new RangeError("Attempting to use an outdated Automerge document")
}
if (_readonly(doc) === false) {
throw new RangeError("Calls to Automerge.change cannot be nested")
}
const state = _state(doc)
const heads = state.getHeads()
state.applyChanges(changes)
Reflect.set(doc,HEADS,heads)
return [rootProxy(state, true)];
}
export function getHistory<T>(doc: Doc<T>) : State<T>[] {
const history = getAllChanges(doc)
return history.map((change, index) => ({
get change () {
return decodeChange(change)
},
get snapshot () {
const [state] = applyChanges(init(), history.slice(0, index + 1))
return <T>state
}
})
)
}
// FIXME : no tests
export function equals(val1: unknown, val2: unknown) : boolean {
if (!isObject(val1) || !isObject(val2)) return val1 === val2
const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort()
if (keys1.length !== keys2.length) return false
for (let i = 0; i < keys1.length; i++) {
if (keys1[i] !== keys2[i]) return false
if (!equals(val1[keys1[i]], val2[keys2[i]])) return false
}
return true
}
export function encodeSyncState(state: SyncState) : Uint8Array {
return ApiHandler.encodeSyncState(ApiHandler.importSyncState(state))
}
export function decodeSyncState(state: Uint8Array) : SyncState {
return ApiHandler.exportSyncState(ApiHandler.decodeSyncState(state))
}
export function generateSyncMessage<T>(doc: Doc<T>, inState: SyncState) : [ SyncState, SyncMessage | null ] {
const state = _state(doc)
const syncState = ApiHandler.importSyncState(inState)
const message = state.generateSyncMessage(syncState)
const outState = ApiHandler.exportSyncState(syncState)
return [ outState, message ]
}
export function receiveSyncMessage<T>(doc: Doc<T>, inState: SyncState, message: SyncMessage) : [ Doc<T>, SyncState, null ] {
const syncState = ApiHandler.importSyncState(inState)
if (doc === undefined || _obj(doc) !== "_root") {
throw new RangeError("must be the document root");
}
if (_frozen(doc) === true) {
throw new RangeError("Attempting to use an outdated Automerge document")
}
if (!!_heads(doc) === true) {
throw new RangeError("Attempting to change an out of date document");
}
if (_readonly(doc) === false) {
throw new RangeError("Calls to Automerge.change cannot be nested")
}
const state = _state(doc)
const heads = state.getHeads()
state.receiveSyncMessage(syncState, message)
Reflect.set(doc,HEADS,heads)
const outState = ApiHandler.exportSyncState(syncState)
return [rootProxy(state, true), outState, null];
}
export function initSyncState() : SyncState {
return ApiHandler.exportSyncState(ApiHandler.initSyncState())
}
export function encodeChange(change: DecodedChange) : Change {
return ApiHandler.encodeChange(change)
}
export function decodeChange(data: Change) : DecodedChange {
return ApiHandler.decodeChange(data)
}
export function encodeSyncMessage(message: DecodedSyncMessage) : SyncMessage {
return ApiHandler.encodeSyncMessage(message)
}
export function decodeSyncMessage(message: SyncMessage) : DecodedSyncMessage {
return ApiHandler.decodeSyncMessage(message)
}
export function getMissingDeps<T>(doc: Doc<T>, heads: Heads) : Heads {
const state = _state(doc)
return state.getMissingDeps(heads)
}
export function getHeads<T>(doc: Doc<T>) : Heads {
const state = _state(doc)
return _heads(doc) || state.getHeads()
}
export function dump<T>(doc: Doc<T>) {
const state = _state(doc)
state.dump()
}
// FIXME - return T?
export function toJS<T>(doc: Doc<T>) : MaterializeValue {
let state = _state(doc)
let heads = _heads(doc)
return state.materialize("_root", heads)
}
function isObject(obj: unknown) : obj is Record<string,unknown> {
return typeof obj === 'object' && obj !== null
}

View file

@ -0,0 +1,26 @@
import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "automerge-wasm"
import { API as LowLevelApi } from "automerge-wasm"
export { API as LowLevelApi } from "automerge-wasm"
export function UseApi(api: LowLevelApi) {
for (const k in api) {
ApiHandler[k] = api[k]
}
}
/* eslint-disable */
export const ApiHandler : LowLevelApi = {
create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") },
load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") },
encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called") },
decodeChange(change: Change): DecodedChange { throw new RangeError("Automerge.use() not called") },
initSyncState(): SyncState { throw new RangeError("Automerge.use() not called") },
encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { throw new RangeError("Automerge.use() not called") },
decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { throw new RangeError("Automerge.use() not called") },
encodeSyncState(state: SyncState): Uint8Array { throw new RangeError("Automerge.use() not called") },
decodeSyncState(data: Uint8Array): SyncState { throw new RangeError("Automerge.use() not called") },
exportSyncState(state: SyncState): JsSyncState { throw new RangeError("Automerge.use() not called") },
importSyncState(state: JsSyncState): SyncState { throw new RangeError("Automerge.use() not called") },
}
/* eslint-enable */

View file

@ -1,7 +1,9 @@
// Convience classes to allow users to stricly specify the number type they want
class Int {
constructor(value) {
export class Int {
value: number;
constructor(value: number) {
if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) {
throw new RangeError(`Value ${value} cannot be a uint`)
}
@ -10,8 +12,10 @@ class Int {
}
}
class Uint {
constructor(value) {
export class Uint {
value: number;
constructor(value: number) {
if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) {
throw new RangeError(`Value ${value} cannot be a uint`)
}
@ -20,8 +24,10 @@ class Uint {
}
}
class Float64 {
constructor(value) {
export class Float64 {
value: number;
constructor(value: number) {
if (typeof value !== 'number') {
throw new RangeError(`Value ${value} cannot be a float64`)
}
@ -30,4 +36,3 @@ class Float64 {
}
}
module.exports = { Int, Uint, Float64 }

View file

@ -1,9 +1,10 @@
const AutomergeWASM = require("automerge-wasm")
const { Int, Uint, Float64 } = require("./numbers");
const { Counter, getWriteableCounter } = require("./counter");
const { Text } = require("./text");
const { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } = require("./constants")
import { Automerge, Heads, ObjID } from "./types"
import { Int, Uint, Float64 } from "./numbers"
import { Counter, getWriteableCounter } from "./counter"
import { Text } from "./text"
import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } from "./constants"
import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue, Prop } from "./types"
function parseListIndex(key) {
if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10)
@ -17,9 +18,9 @@ function parseListIndex(key) {
return key
}
function valueAt(target, prop) {
function valueAt(target, prop: Prop) : AutomergeValue | undefined {
const { context, objectId, path, readonly, heads} = target
let value = context.get(objectId, prop, heads)
const value = context.get(objectId, prop, heads)
if (value === undefined) {
return
}
@ -97,8 +98,8 @@ function import_value(value) {
}
const MapHandler = {
get (target, key) {
const { context, objectId, path, readonly, frozen, heads, cache } = target
get (target, key) : AutomergeValue {
const { context, objectId, readonly, frozen, heads, cache } = target
if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] }
if (key === OBJECT_ID) return objectId
if (key === READ_ONLY) return readonly
@ -112,20 +113,20 @@ const MapHandler = {
},
set (target, key, val) {
let { context, objectId, path, readonly, frozen} = target
const { context, objectId, path, readonly, frozen} = target
target.cache = {} // reset cache on set
if (val && val[OBJECT_ID]) {
throw new RangeError('Cannot create a reference to an existing document object')
}
if (key === FROZEN) {
target.frozen = val
return
return true
}
if (key === HEADS) {
target.heads = val
return
return true
}
let [ value, datatype ] = import_value(val)
const [ value, datatype ] = import_value(val)
if (frozen) {
throw new RangeError("Attempting to use an outdated Automerge document")
}
@ -133,27 +134,30 @@ const MapHandler = {
throw new RangeError(`Object property "${key}" cannot be modified`)
}
switch (datatype) {
case "list":
case "list": {
const list = context.putObject(objectId, key, [])
const proxyList = listProxy(context, list, [ ... path, key ], readonly );
for (let i = 0; i < value.length; i++) {
proxyList[i] = value[i]
}
break;
case "text":
break
}
case "text": {
const text = context.putObject(objectId, key, "", "text")
const proxyText = textProxy(context, text, [ ... path, key ], readonly );
for (let i = 0; i < value.length; i++) {
proxyText[i] = value.get(i)
}
break;
case "map":
break
}
case "map": {
const map = context.putObject(objectId, key, {})
const proxyMap = mapProxy(context, map, [ ... path, key ], readonly );
for (const key in value) {
proxyMap[key] = value[key]
}
break;
}
default:
context.put(objectId, key, value, datatype)
}
@ -161,7 +165,7 @@ const MapHandler = {
},
deleteProperty (target, key) {
const { context, objectId, path, readonly, frozen } = target
const { context, objectId, readonly } = target
target.cache = {} // reset cache on delete
if (readonly) {
throw new RangeError(`Object property "${key}" cannot be modified`)
@ -176,7 +180,7 @@ const MapHandler = {
},
getOwnPropertyDescriptor (target, key) {
const { context, objectId } = target
// const { context, objectId } = target
const value = this.get(target, key)
if (typeof value !== 'undefined') {
return {
@ -194,9 +198,9 @@ const MapHandler = {
const ListHandler = {
get (target, index) {
const {context, objectId, path, readonly, frozen, heads } = target
const {context, objectId, readonly, frozen, heads } = target
index = parseListIndex(index)
if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } }
if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
if (index === OBJECT_ID) return objectId
if (index === READ_ONLY) return readonly
@ -224,18 +228,18 @@ const ListHandler = {
},
set (target, index, val) {
let {context, objectId, path, readonly, frozen } = target
const {context, objectId, path, readonly, frozen } = target
index = parseListIndex(index)
if (val && val[OBJECT_ID]) {
throw new RangeError('Cannot create a reference to an existing document object')
}
if (index === FROZEN) {
target.frozen = val
return
return true
}
if (index === HEADS) {
target.heads = val
return
return true
}
if (typeof index == "string") {
throw new RangeError('list index must be a number')
@ -248,7 +252,7 @@ const ListHandler = {
throw new RangeError(`Object property "${index}" cannot be modified`)
}
switch (datatype) {
case "list":
case "list": {
let list
if (index >= context.length(objectId)) {
list = context.insertObject(objectId, index, [])
@ -258,7 +262,8 @@ const ListHandler = {
const proxyList = listProxy(context, list, [ ... path, index ], readonly);
proxyList.splice(0,0,...value)
break;
case "text":
}
case "text": {
let text
if (index >= context.length(objectId)) {
text = context.insertObject(objectId, index, "", "text")
@ -268,7 +273,8 @@ const ListHandler = {
const proxyText = textProxy(context, text, [ ... path, index ], readonly);
proxyText.splice(0,0,...value)
break;
case "map":
}
case "map": {
let map
if (index >= context.length(objectId)) {
map = context.insertObject(objectId, index, {})
@ -280,6 +286,7 @@ const ListHandler = {
proxyMap[key] = value[key]
}
break;
}
default:
if (index >= context.length(objectId)) {
context.insert(objectId, index, value, datatype)
@ -310,23 +317,23 @@ const ListHandler = {
},
getOwnPropertyDescriptor (target, index) {
const {context, objectId, path, readonly, frozen, heads} = target
const {context, objectId, heads} = target
if (index === 'length') return {writable: true, value: context.length(objectId, heads) }
if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId}
index = parseListIndex(index)
let value = valueAt(target, index)
const value = valueAt(target, index)
return { configurable: true, enumerable: true, value }
},
getPrototypeOf(target) { return Object.getPrototypeOf([]) },
ownKeys (target) {
const {context, objectId, heads } = target
let keys = []
getPrototypeOf(target) { return Object.getPrototypeOf(target) },
ownKeys (/*target*/) : string[] {
const keys : string[] = []
// uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array
// but not uncommenting it causes for (i in list) {} to not enumerate values properly
//const {context, objectId, heads } = target
//for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) }
keys.push("length");
return keys
@ -336,10 +343,10 @@ const ListHandler = {
const TextHandler = Object.assign({}, ListHandler, {
get (target, index) {
// FIXME this is a one line change from ListHandler.get()
const {context, objectId, path, readonly, frozen, heads } = target
const {context, objectId, readonly, frozen, heads } = target
index = parseListIndex(index)
if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } }
if (index === OBJECT_ID) return objectId
if (index === READ_ONLY) return readonly
if (index === FROZEN) return frozen
@ -363,29 +370,30 @@ const TextHandler = Object.assign({}, ListHandler, {
return textMethods(target)[index] || listMethods(target)[index]
}
},
getPrototypeOf(target) {
getPrototypeOf(/*target*/) {
return Object.getPrototypeOf(new Text())
},
})
function mapProxy(context, objectId, path, readonly, heads) {
export function mapProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : MapValue {
return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler)
}
function listProxy(context, objectId, path, readonly, heads) {
let target = []
export function listProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : ListValue {
const target = []
Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}})
return new Proxy(target, ListHandler)
}
function textProxy(context, objectId, path, readonly, heads) {
let target = []
export function textProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : TextValue {
const target = []
Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}})
return new Proxy(target, TextHandler)
}
function rootProxy(context, readonly) {
return mapProxy(context, "_root", [], readonly)
export function rootProxy<T>(context: Automerge, readonly?: boolean) : T {
/* eslint-disable-next-line */
return <any>mapProxy(context, "_root", [], !!readonly)
}
function listMethods(target) {
@ -400,18 +408,20 @@ function listMethods(target) {
return this
},
fill(val, start, end) {
// FIXME
let list = context.getObject(objectId)
let [value, datatype] = valueAt(target, index)
for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) {
context.put(objectId, index, value, datatype)
fill(val: ScalarValue, start: number, end: number) {
// FIXME needs tests
const [value, datatype] = import_value(val)
start = parseListIndex(start || 0)
end = parseListIndex(end || context.length(objectId))
for (let i = start; i < end; i++) {
context.put(objectId, i, value, datatype)
}
return this
},
indexOf(o, start = 0) {
indexOf(/*o, start = 0*/) {
// FIXME
/*
const id = o[OBJECT_ID]
if (id) {
const list = context.getObject(objectId)
@ -424,6 +434,7 @@ function listMethods(target) {
} else {
return context.indexOf(objectId, o, start)
}
*/
},
insertAt(index, ...values) {
@ -432,17 +443,17 @@ function listMethods(target) {
},
pop() {
let length = context.length(objectId)
const length = context.length(objectId)
if (length == 0) {
return undefined
}
let last = valueAt(target, length - 1)
const last = valueAt(target, length - 1)
context.delete(objectId, length - 1)
return last
},
push(...values) {
let len = context.length(objectId)
const len = context.length(objectId)
this.splice(len, 0, ...values)
return context.length(objectId)
},
@ -457,7 +468,7 @@ function listMethods(target) {
splice(index, del, ...vals) {
index = parseListIndex(index)
del = parseListIndex(del)
for (let val of vals) {
for (const val of vals) {
if (val && val[OBJECT_ID]) {
throw new RangeError('Cannot create a reference to an existing document object')
}
@ -468,32 +479,37 @@ function listMethods(target) {
if (readonly) {
throw new RangeError("Sequence object cannot be modified outside of a change block")
}
let result = []
const result : AutomergeValue[] = []
for (let i = 0; i < del; i++) {
let value = valueAt(target, index)
result.push(value)
const value = valueAt(target, index)
if (value !== undefined) {
result.push(value)
}
context.delete(objectId, index)
}
const values = vals.map((val) => import_value(val))
for (let [value,datatype] of values) {
for (const [value,datatype] of values) {
switch (datatype) {
case "list":
case "list": {
const list = context.insertObject(objectId, index, [])
const proxyList = listProxy(context, list, [ ... path, index ], readonly);
proxyList.splice(0,0,...value)
break;
case "text":
}
case "text": {
const text = context.insertObject(objectId, index, "", "text")
const proxyText = textProxy(context, text, [ ... path, index ], readonly);
proxyText.splice(0,0,...value)
break;
case "map":
}
case "map": {
const map = context.insertObject(objectId, index, {})
const proxyMap = mapProxy(context, map, [ ... path, index ], readonly);
for (const key in value) {
proxyMap[key] = value[key]
}
break;
}
default:
context.insert(objectId, index, value, datatype)
}
@ -508,10 +524,10 @@ function listMethods(target) {
},
entries() {
let i = 0;
const i = 0;
const iterator = {
next: () => {
let value = valueAt(target, i)
const value = valueAt(target, i)
if (value === undefined) {
return { value: undefined, done: true }
} else {
@ -524,10 +540,10 @@ function listMethods(target) {
keys() {
let i = 0;
let len = context.length(objectId, heads)
const len = context.length(objectId, heads)
const iterator = {
next: () => {
let value = undefined
let value : undefined | number = undefined
if (i < len) { value = i; i++ }
return { value, done: true }
}
@ -536,10 +552,10 @@ function listMethods(target) {
},
values() {
let i = 0;
const i = 0;
const iterator = {
next: () => {
let value = valueAt(target, i)
const value = valueAt(target, i)
if (value === undefined) {
return { value: undefined, done: true }
} else {
@ -553,18 +569,18 @@ function listMethods(target) {
// Read-only methods that can delegate to the JavaScript built-in implementations
// FIXME - super slow
for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
'slice', 'some', 'toLocaleString', 'toString']) {
methods[method] = (...args) => {
const list = []
while (true) {
let value = valueAt(target, list.length)
if (value == undefined) {
break
const list : AutomergeValue = []
let value
do {
value = valueAt(target, list.length)
if (value !== undefined) {
list.push(value)
}
list.push(value)
}
} while (value !== undefined)
return list[method](...args)
}
@ -574,21 +590,21 @@ function listMethods(target) {
}
function textMethods(target) {
const {context, objectId, path, readonly, frozen, heads } = target
const {context, objectId, heads } = target
const methods = {
set (index, value) {
set (index: number, value) {
return this[index] = value
},
get (index) {
get (index: number) : AutomergeValue {
return this[index]
},
toString () {
toString () : string {
return context.text(objectId, heads).replace(//g,'')
},
toSpans () {
let spans = []
toSpans () : AutomergeValue[] {
const spans : AutomergeValue[] = []
let chars = ''
let length = this.length
const length = context.length(objectId)
for (let i = 0; i < length; i++) {
const value = this[i]
if (typeof value === 'string') {
@ -606,12 +622,10 @@ function textMethods(target) {
}
return spans
},
toJSON () {
toJSON () : string {
return this.toString()
}
}
return methods
}
module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler }

View file

@ -1,39 +1,36 @@
const { OBJECT_ID } = require('./constants')
const { isObject } = require('../src/common')
import { Value } from "./types"
class Text {
constructor (text) {
const instance = Object.create(Text.prototype)
export class Text {
elems: Value[]
constructor (text?: string | string[]) {
//const instance = Object.create(Text.prototype)
if (typeof text === 'string') {
instance.elems = [...text]
this.elems = [...text]
} else if (Array.isArray(text)) {
instance.elems = text
this.elems = text
} else if (text === undefined) {
instance.elems = []
this.elems = []
} else {
throw new TypeError(`Unsupported initial value for Text: ${text}`)
}
return instance
}
get length () {
get length () : number {
return this.elems.length
}
get (index) {
get (index: number) : Value {
return this.elems[index]
}
getElemId (index) {
return undefined
}
/**
* Iterates over the text elements character by character, including any
* inline objects.
*/
[Symbol.iterator] () {
let elems = this.elems, index = -1
const elems = this.elems
let index = -1
return {
next () {
index += 1
@ -50,7 +47,7 @@ class Text {
* Returns the content of the Text object as a simple string, ignoring any
* non-character elements.
*/
toString() {
toString() : string {
// Concatting to a string is faster than creating an array and then
// .join()ing for small (<100KB) arrays.
// https://jsperf.com/join-vs-loop-w-type-test
@ -68,8 +65,8 @@ class Text {
* For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans:
* => ['ab', {x: 3}, 'cd']
*/
toSpans() {
let spans = []
toSpans() : Value[] {
const spans : Value[] = []
let chars = ''
for (const elem of this.elems) {
if (typeof elem === 'string') {
@ -92,21 +89,21 @@ class Text {
* Returns the content of the Text object as a simple string, so that the
* JSON serialization of an Automerge document represents text nicely.
*/
toJSON() {
toJSON() : string {
return this.toString()
}
/**
* Updates the list item at position `index` to a new value `value`.
*/
set (index, value) {
set (index: number, value: Value) {
this.elems[index] = value
}
/**
* Inserts new list items `values` starting at position `index`.
*/
insertAt(index, ...values) {
insertAt(index: number, ...values: Value[]) {
this.elems.splice(index, 0, ... values)
}
@ -114,14 +111,20 @@ class Text {
* Deletes `numDelete` list items starting at position `index`.
* if `numDelete` is not given, one item is deleted.
*/
deleteAt(index, numDelete = 1) {
deleteAt(index: number, numDelete = 1) {
this.elems.splice(index, numDelete)
}
map<T>(callback: (e: Value) => T) {
this.elems.map(callback)
}
}
// Read-only methods that can delegate to the JavaScript built-in array
for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
'indexOf', 'join', 'lastIndexOf', 'reduce', 'reduceRight',
'slice', 'some', 'toLocaleString']) {
Text.prototype[method] = function (...args) {
const array = [...this]
@ -129,4 +132,3 @@ for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach',
}
}
module.exports = { Text }

18
automerge-js/src/types.ts Normal file
View file

@ -0,0 +1,18 @@
export { Actor as ActorId, Value, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-wasm"
export { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-wasm"
export { Text } from "./text"
export { Counter } from "./counter"
export { Int, Uint, Float64 } from "./numbers"
export type UnknownObject = Record<string | number | symbol, unknown>;
export type Dictionary<T> = Record<string, T>;
import { Counter } from "./counter"
export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array<AutomergeValue>
export type MapValue = { [key: string]: AutomergeValue }
export type ListValue = Array<AutomergeValue>
export type TextValue = Array<AutomergeValue>
export type ScalarValue = string | number | null | boolean | Date | Counter | Uint8Array

View file

@ -1,16 +0,0 @@
const { v4: uuid } = require('uuid')
function defaultFactory() {
return uuid().replace(/-/g, '')
}
let factory = defaultFactory
function makeUuid() {
return factory()
}
makeUuid.setFactory = newFactory => { factory = newFactory }
makeUuid.reset = () => { factory = defaultFactory }
module.exports = makeUuid

21
automerge-js/src/uuid.ts Normal file
View file

@ -0,0 +1,21 @@
import { v4 } from 'uuid'
function defaultFactory() {
return v4().replace(/-/g, '')
}
let factory = defaultFactory
interface UUIDFactory extends Function {
setFactory(f: typeof factory): void;
reset(): void;
}
export const uuid : UUIDFactory = () => {
return factory()
}
uuid.setFactory = newFactory => { factory = newFactory }
uuid.reset = () => { factory = defaultFactory }

View file

@ -1,7 +1,9 @@
import * as assert from 'assert'
import * as util from 'util'
import * as Automerge from '../src'
import * as AutomergeWASM from "automerge-wasm"
const assert = require('assert')
const util = require('util')
const Automerge = require('..')
Automerge.use(AutomergeWASM)
describe('Automerge', () => {
describe('basics', () => {

View file

@ -1,7 +1,10 @@
const assert = require('assert')
const { checkEncoded } = require('./helpers')
const Automerge = require('..')
const { encodeChange, decodeChange } = Automerge
import * as assert from 'assert'
import { checkEncoded } from './helpers'
import * as Automerge from '../src'
import { encodeChange, decodeChange } from '../src'
import * as AutomergeWASM from "automerge-wasm"
Automerge.use(AutomergeWASM)
describe('change encoding', () => {
it('should encode text edits', () => {

View file

@ -1,5 +1,5 @@
const assert = require('assert')
const { Encoder } = require('../src/encoding')
import * as assert from 'assert'
import { Encoder } from './legacy/encoding'
// Assertion that succeeds if the first argument deepStrictEquals at least one of the
// subsequent arguments (but we don't care which one)

View file

@ -222,21 +222,34 @@ function encodeOperationAction(op, columns) {
}
/**
* Encodes the integer `value` into the two columns `valLen` and `valRaw`,
* with the datatype tag set to `typeTag`. If `typeTag` is zero, it is set
* automatically to signed or unsigned depending on the sign of the value.
* Values with non-zero type tags are always encoded as signed integers.
* Given the datatype for a number, determine the typeTag and the value to encode
* otherwise guess
*/
function encodeInteger(value, typeTag, columns) {
let numBytes
if (value < 0 || typeTag > 0) {
numBytes = columns.valRaw.appendInt53(value)
if (!typeTag) typeTag = VALUE_TYPE.LEB128_INT
} else {
numBytes = columns.valRaw.appendUint53(value)
typeTag = VALUE_TYPE.LEB128_UINT
function getNumberTypeAndValue(op) {
switch (op.datatype) {
case "counter":
return [ VALUE_TYPE.COUNTER, op.value ]
case "timestamp":
return [ VALUE_TYPE.TIMESTAMP, op.value ]
case "uint":
return [ VALUE_TYPE.LEB128_UINT, op.value ]
case "int":
return [ VALUE_TYPE.LEB128_INT, op.value ]
case "float64": {
const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64)
view64.setFloat64(0, op.value, true)
return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ]
}
default:
// increment operators get resolved here ...
if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) {
return [ VALUE_TYPE.LEB128_INT, op.value ]
} else {
const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64)
view64.setFloat64(0, op.value, true)
return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ]
}
}
columns.valLen.appendValue(numBytes << 4 | typeTag)
}
/**
@ -256,33 +269,23 @@ function encodeValue(op, columns) {
} else if (ArrayBuffer.isView(op.value)) {
const numBytes = columns.valRaw.appendRawBytes(new Uint8Array(op.value.buffer))
columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.BYTES)
} else if (op.datatype === 'counter' && typeof op.value === 'number') {
encodeInteger(op.value, VALUE_TYPE.COUNTER, columns)
} else if (op.datatype === 'timestamp' && typeof op.value === 'number') {
encodeInteger(op.value, VALUE_TYPE.TIMESTAMP, columns)
} else if (typeof op.value === 'number') {
let [typeTag, value] = getNumberTypeAndValue(op)
let numBytes
if (typeTag === VALUE_TYPE.LEB128_UINT) {
numBytes = columns.valRaw.appendUint53(value)
} else if (typeTag === VALUE_TYPE.IEEE754) {
numBytes = columns.valRaw.appendRawBytes(value)
} else {
numBytes = columns.valRaw.appendInt53(value)
}
columns.valLen.appendValue(numBytes << 4 | typeTag)
} else if (typeof op.datatype === 'number' && op.datatype >= VALUE_TYPE.MIN_UNKNOWN &&
op.datatype <= VALUE_TYPE.MAX_UNKNOWN && op.value instanceof Uint8Array) {
const numBytes = columns.valRaw.appendRawBytes(op.value)
columns.valLen.appendValue(numBytes << 4 | op.datatype)
} else if (op.datatype) {
throw new RangeError(`Unknown datatype ${op.datatype} for value ${op.value}`)
} else if (typeof op.value === 'number') {
if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) {
encodeInteger(op.value, 0, columns)
} else {
// Encode number in 32-bit float if this can be done without loss of precision
const buf32 = new ArrayBuffer(4), view32 = new DataView(buf32)
view32.setFloat32(0, op.value, true) // true means little-endian
if (view32.getFloat32(0, true) === op.value) {
columns.valRaw.appendRawBytes(new Uint8Array(buf32))
columns.valLen.appendValue(4 << 4 | VALUE_TYPE.IEEE754)
} else {
const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64)
view64.setFloat64(0, op.value, true) // true means little-endian
columns.valRaw.appendRawBytes(new Uint8Array(buf64))
columns.valLen.appendValue(8 << 4 | VALUE_TYPE.IEEE754)
}
}
} else {
throw new RangeError(`Unsupported value in operation: ${op.value}`)
}
@ -305,15 +308,13 @@ function decodeValue(sizeTag, bytes) {
return {value: utf8ToString(bytes)}
} else {
if (sizeTag % 16 === VALUE_TYPE.LEB128_UINT) {
return {value: new Decoder(bytes).readUint53()}
return {value: new Decoder(bytes).readUint53(), datatype: "uint"}
} else if (sizeTag % 16 === VALUE_TYPE.LEB128_INT) {
return {value: new Decoder(bytes).readInt53()}
return {value: new Decoder(bytes).readInt53(), datatype: "int"}
} else if (sizeTag % 16 === VALUE_TYPE.IEEE754) {
const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength)
if (bytes.byteLength === 4) {
return {value: view.getFloat32(0, true)} // true means little-endian
} else if (bytes.byteLength === 8) {
return {value: view.getFloat64(0, true)}
if (bytes.byteLength === 8) {
return {value: view.getFloat64(0, true), datatype: "float64"}
} else {
throw new RangeError(`Invalid length for floating point number: ${bytes.byteLength}`)
}
@ -363,8 +364,8 @@ function decodeValueColumns(columns, colIndex, actorIds, result) {
* Encodes an array of operations in a set of columns. The operations need to
* be parsed with `parseAllOpIds()` beforehand. If `forDocument` is true, we use
* the column structure of a whole document, otherwise we use the column
* structure for an individual change. Returns an array of `{id, name, encoder}`
* objects.
* structure for an individual change. Returns an array of
* `{columnId, columnName, encoder}` objects.
*/
function encodeOps(ops, forDocument) {
const columns = {
@ -429,9 +430,17 @@ function encodeOps(ops, forDocument) {
let columnList = []
for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) {
if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]})
if (columns[columnName]) columnList.push({columnId, columnName, encoder: columns[columnName]})
}
return columnList.sort((a, b) => a.columnId - b.columnId)
}
function validDatatype(value, datatype) {
if (datatype === undefined) {
return (typeof value === 'string' || typeof value === 'boolean' || value === null)
} else {
return typeof value === 'number'
}
return columnList.sort((a, b) => a.id - b.id)
}
function expandMultiOps(ops, startOp, actor) {
@ -441,8 +450,10 @@ function expandMultiOps(ops, startOp, actor) {
if (op.action === 'set' && op.values && op.insert) {
if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty')
let lastElemId = op.elemId
const datatype = op.datatype
for (const value of op.values) {
expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, value, pred: [], insert: true})
if (!validDatatype(value, datatype)) throw new RangeError(`Decode failed: bad value/datatype association (${value},${datatype})`)
expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, datatype, value, pred: [], insert: true})
lastElemId = `${opNum}@${actor}`
opNum += 1
}
@ -616,7 +627,7 @@ function encodeColumnInfo(encoder, columns) {
const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0)
encoder.appendUint53(nonEmptyColumns.length)
for (let column of nonEmptyColumns) {
encoder.appendUint53(column.id)
encoder.appendUint53(column.columnId)
encoder.appendUint53(column.encoder.buffer.byteLength)
}
}
@ -696,17 +707,6 @@ function decodeContainerHeader(decoder, computeHash) {
return header
}
/**
* Returns the checksum of a change (bytes 4 to 7) as a 32-bit unsigned integer.
*/
function getChangeChecksum(change) {
if (change[0] !== MAGIC_BYTES[0] || change[1] !== MAGIC_BYTES[1] ||
change[2] !== MAGIC_BYTES[2] || change[3] !== MAGIC_BYTES[3]) {
throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83')
}
return ((change[4] << 24) | (change[5] << 16) | (change[6] << 8) | change[7]) >>> 0
}
function encodeChange(changeObj) {
const { changes, actorIds } = parseAllOpIds([changeObj], true)
const change = changes[0]
@ -868,76 +868,6 @@ function sortOpIds(a, b) {
return 0
}
function groupDocumentOps(changes) {
let byObjectId = {}, byReference = {}, objectType = {}
for (let change of changes) {
for (let i = 0; i < change.ops.length; i++) {
const op = change.ops[i], opId = `${op.id.counter}@${op.id.actorId}`
const objectId = (op.obj === '_root') ? '_root' : `${op.obj.counter}@${op.obj.actorId}`
if (op.action.startsWith('make')) {
objectType[opId] = op.action
if (op.action === 'makeList' || op.action === 'makeText') {
byReference[opId] = {'_head': []}
}
}
let key
if (objectId === '_root' || objectType[objectId] === 'makeMap' || objectType[objectId] === 'makeTable') {
key = op.key
} else if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') {
if (op.insert) {
key = opId
const ref = (op.elemId === '_head') ? '_head' : `${op.elemId.counter}@${op.elemId.actorId}`
byReference[objectId][ref].push(opId)
byReference[objectId][opId] = []
} else {
key = `${op.elemId.counter}@${op.elemId.actorId}`
}
} else {
throw new RangeError(`Unknown object type for object ${objectId}`)
}
if (!byObjectId[objectId]) byObjectId[objectId] = {}
if (!byObjectId[objectId][key]) byObjectId[objectId][key] = {}
byObjectId[objectId][key][opId] = op
op.succ = []
for (let pred of op.pred) {
const predId = `${pred.counter}@${pred.actorId}`
if (!byObjectId[objectId][key][predId]) {
throw new RangeError(`No predecessor operation ${predId}`)
}
byObjectId[objectId][key][predId].succ.push(op.id)
}
}
}
let ops = []
for (let objectId of Object.keys(byObjectId).sort(sortOpIds)) {
let keys = []
if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') {
let stack = ['_head']
while (stack.length > 0) {
const key = stack.pop()
if (key !== '_head') keys.push(key)
for (let opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId)
}
} else {
// FIXME JavaScript sorts based on UTF-16 encoding. We should change this to use the UTF-8
// encoding instead (the sort order will be different beyond the basic multilingual plane)
keys = Object.keys(byObjectId[objectId]).sort()
}
for (let key of keys) {
for (let opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) {
const op = byObjectId[objectId][key][opId]
if (op.action !== 'del') ops.push(op)
}
}
}
return ops
}
/**
* Takes a set of operations `ops` loaded from an encoded document, and
* reconstructs the changes that they originally came from.
@ -1012,57 +942,6 @@ function groupChangeOps(changes, ops) {
}
}
function encodeDocumentChanges(changes) {
const columns = { // see DOCUMENT_COLUMNS
actor : new RLEEncoder('uint'),
seq : new DeltaEncoder(),
maxOp : new DeltaEncoder(),
time : new DeltaEncoder(),
message : new RLEEncoder('utf8'),
depsNum : new RLEEncoder('uint'),
depsIndex : new DeltaEncoder(),
extraLen : new RLEEncoder('uint'),
extraRaw : new Encoder()
}
let indexByHash = {} // map from change hash to its index in the changes array
let heads = {} // change hashes that are not a dependency of any other change
for (let i = 0; i < changes.length; i++) {
const change = changes[i]
indexByHash[change.hash] = i
heads[change.hash] = true
columns.actor.appendValue(change.actorNum)
columns.seq.appendValue(change.seq)
columns.maxOp.appendValue(change.startOp + change.ops.length - 1)
columns.time.appendValue(change.time)
columns.message.appendValue(change.message)
columns.depsNum.appendValue(change.deps.length)
for (let dep of change.deps) {
if (typeof indexByHash[dep] !== 'number') {
throw new RangeError(`Unknown dependency hash: ${dep}`)
}
columns.depsIndex.appendValue(indexByHash[dep])
if (heads[dep]) delete heads[dep]
}
if (change.extraBytes) {
columns.extraLen.appendValue(change.extraBytes.byteLength << 4 | VALUE_TYPE.BYTES)
columns.extraRaw.appendRawBytes(change.extraBytes)
} else {
columns.extraLen.appendValue(VALUE_TYPE.BYTES) // zero-length byte array
}
}
let changesColumns = []
for (let {columnName, columnId} of DOCUMENT_COLUMNS) {
changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]})
}
changesColumns.sort((a, b) => a.id - b.id)
return { changesColumns, heads: Object.keys(heads).sort() }
}
function decodeDocumentChanges(changes, expectedHeads) {
let heads = {} // change hashes that are not a dependency of any other change
for (let i = 0; i < changes.length; i++) {
@ -1101,13 +980,8 @@ function decodeDocumentChanges(changes, expectedHeads) {
}
}
/**
* Transforms a list of changes into a binary representation of the document state.
*/
function encodeDocument(binaryChanges) {
const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false)
const { changesColumns, heads } = encodeDocumentChanges(changes)
const opsColumns = encodeOps(groupDocumentOps(changes), true)
function encodeDocumentHeader(doc) {
const { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes } = doc
for (let column of changesColumns) deflateColumn(column)
for (let column of opsColumns) deflateColumn(column)
@ -1124,6 +998,8 @@ function encodeDocument(binaryChanges) {
encodeColumnInfo(encoder, opsColumns)
for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer)
for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer)
for (let index of headsIndexes) encoder.appendUint53(index)
if (extraBytes) encoder.appendRawBytes(extraBytes)
}).bytes
}
@ -1138,7 +1014,7 @@ function decodeDocumentHeader(buffer) {
for (let i = 0; i < numActors; i++) {
actorIds.push(decoder.readHexString())
}
const heads = [], numHeads = decoder.readUint53()
const heads = [], headsIndexes = [], numHeads = decoder.readUint53()
for (let i = 0; i < numHeads; i++) {
heads.push(bytesToHexString(decoder.readRawBytes(32)))
}
@ -1153,9 +1029,12 @@ function decodeDocumentHeader(buffer) {
opsColumns[i].buffer = decoder.readRawBytes(opsColumns[i].bufferLen)
inflateColumn(opsColumns[i])
}
if (!decoder.done) {
for (let i = 0; i < numHeads; i++) headsIndexes.push(decoder.readUint53())
}
const extraBytes = decoder.readRawBytes(decoder.buf.byteLength - decoder.offset)
return { changesColumns, opsColumns, actorIds, heads, extraBytes }
return { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes }
}
function decodeDocument(buffer) {
@ -1173,7 +1052,7 @@ function decodeDocument(buffer) {
function deflateColumn(column) {
if (column.encoder.buffer.byteLength >= DEFLATE_MIN_SIZE) {
column.encoder = {buffer: pako.deflateRaw(column.encoder.buffer)}
column.id |= COLUMN_TYPE_DEFLATE
column.columnId |= COLUMN_TYPE_DEFLATE
}
}
@ -1187,229 +1066,9 @@ function inflateColumn(column) {
}
}
/**
* Takes all the operations for the same property (i.e. the same key in a map, or the same list
* element) and mutates the object patch to reflect the current value(s) of that property. There
* might be multiple values in the case of a conflict. `objects` is a map from objectId to the
* patch for that object. `property` contains `objId`, `key`, a list of `ops`, and `index` (the
* current list index if the object is a list). Returns true if one or more values are present,
* or false if the property has been deleted.
*/
function addPatchProperty(objects, property) {
let values = {}, counter = null
for (let op of property.ops) {
// Apply counters and their increments regardless of the number of successor operations
if (op.actionName === 'set' && op.value.datatype === 'counter') {
if (!counter) counter = {opId: op.opId, value: 0, succ: {}}
counter.value += op.value.value
for (let succId of op.succ) counter.succ[succId] = true
} else if (op.actionName === 'inc') {
if (!counter) throw new RangeError(`inc operation ${op.opId} without a counter`)
counter.value += op.value.value
delete counter.succ[op.opId]
for (let succId of op.succ) counter.succ[succId] = true
} else if (op.succ.length === 0) { // Ignore any ops that have been overwritten
if (op.actionName.startsWith('make')) {
values[op.opId] = objects[op.opId]
} else if (op.actionName === 'set') {
values[op.opId] = {value: op.value.value, type: 'value'}
if (op.value.datatype) {
values[op.opId].datatype = op.value.datatype
}
} else if (op.actionName === 'link') {
// NB. This assumes that the ID of the child object is greater than the ID of the current
// object. This is true as long as link operations are only used to redo undone make*
// operations, but it will cease to be true once subtree moves are allowed.
if (!op.childId) throw new RangeError(`link operation ${op.opId} without a childId`)
values[op.opId] = objects[op.childId]
} else {
throw new RangeError(`Unexpected action type: ${op.actionName}`)
}
}
}
// If the counter had any successor operation that was not an increment, that means the counter
// must have been deleted, so we omit it from the patch.
if (counter && Object.keys(counter.succ).length === 0) {
values[counter.opId] = {type: 'value', value: counter.value, datatype: 'counter'}
}
if (Object.keys(values).length > 0) {
let obj = objects[property.objId]
if (obj.type === 'map' || obj.type === 'table') {
obj.props[property.key] = values
} else if (obj.type === 'list' || obj.type === 'text') {
makeListEdits(obj, values, property.key, property.index)
}
return true
} else {
return false
}
}
/**
* When constructing a patch to instantiate a loaded document, this function adds the edits to
* insert one list element. Usually there is one value, but in the case of a conflict there may be
* several values. `elemId` is the ID of the list element, and `index` is the list index at which
* the value(s) should be placed.
*/
function makeListEdits(list, values, elemId, index) {
let firstValue = true
const opIds = Object.keys(values).sort((id1, id2) => compareParsedOpIds(parseOpId(id1), parseOpId(id2)))
for (const opId of opIds) {
if (firstValue) {
list.edits.push({action: 'insert', value: values[opId], elemId, opId, index})
} else {
list.edits.push({action: 'update', value: values[opId], opId, index})
}
firstValue = false
}
}
/**
* Recursively walks the patch tree, calling appendEdit on every list edit in order to consense
* consecutive sequences of insertions into multi-inserts.
*/
function condenseEdits(diff) {
if (diff.type === 'list' || diff.type === 'text') {
diff.edits.forEach(e => condenseEdits(e.value))
let newEdits = diff.edits
diff.edits = []
for (const edit of newEdits) appendEdit(diff.edits, edit)
} else if (diff.type === 'map' || diff.type === 'table') {
for (const prop of Object.keys(diff.props)) {
for (const opId of Object.keys(diff.props[prop])) {
condenseEdits(diff.props[prop][opId])
}
}
}
}
/**
* Appends a list edit operation (insert, update, remove) to an array of existing operations. If the
* last existing operation can be extended (as a multi-op), we do that.
*/
function appendEdit(existingEdits, nextEdit) {
if (existingEdits.length === 0) {
existingEdits.push(nextEdit)
return
}
let lastEdit = existingEdits[existingEdits.length - 1]
if (lastEdit.action === 'insert' && nextEdit.action === 'insert' &&
lastEdit.index === nextEdit.index - 1 &&
lastEdit.value.type === 'value' && nextEdit.value.type === 'value' &&
lastEdit.elemId === lastEdit.opId && nextEdit.elemId === nextEdit.opId &&
opIdDelta(lastEdit.elemId, nextEdit.elemId, 1)) {
lastEdit.action = 'multi-insert'
lastEdit.values = [lastEdit.value.value, nextEdit.value.value]
delete lastEdit.value
delete lastEdit.opId
} else if (lastEdit.action === 'multi-insert' && nextEdit.action === 'insert' &&
lastEdit.index + lastEdit.values.length === nextEdit.index &&
nextEdit.value.type === 'value' && nextEdit.elemId === nextEdit.opId &&
opIdDelta(lastEdit.elemId, nextEdit.elemId, lastEdit.values.length)) {
lastEdit.values.push(nextEdit.value.value)
} else if (lastEdit.action === 'remove' && nextEdit.action === 'remove' &&
lastEdit.index === nextEdit.index) {
lastEdit.count += nextEdit.count
} else {
existingEdits.push(nextEdit)
}
}
/**
* Returns true if the two given operation IDs have the same actor ID, and the counter of `id2` is
* exactly `delta` greater than the counter of `id1`.
*/
function opIdDelta(id1, id2, delta = 1) {
const parsed1 = parseOpId(id1), parsed2 = parseOpId(id2)
return parsed1.actorId === parsed2.actorId && parsed1.counter + delta === parsed2.counter
}
/**
* Parses the document (in compressed binary format) given as `documentBuffer`
* and returns a patch that can be sent to the frontend to instantiate the
* current state of that document.
*/
function constructPatch(documentBuffer) {
const { opsColumns, actorIds } = decodeDocumentHeader(documentBuffer)
const col = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce(
(acc, col) => Object.assign(acc, {[col.columnName]: col.decoder}), {})
let objects = {_root: {objectId: '_root', type: 'map', props: {}}}
let property = null
while (!col.idActor.done) {
const opId = `${col.idCtr.readValue()}@${actorIds[col.idActor.readValue()]}`
const action = col.action.readValue(), actionName = ACTIONS[action]
if (action % 2 === 0) { // even-numbered actions are object creation
const type = OBJECT_TYPE[actionName] || 'unknown'
if (type === 'list' || type === 'text') {
objects[opId] = {objectId: opId, type, edits: []}
} else {
objects[opId] = {objectId: opId, type, props: {}}
}
}
const objActor = col.objActor.readValue(), objCtr = col.objCtr.readValue()
const objId = objActor === null ? '_root' : `${objCtr}@${actorIds[objActor]}`
let obj = objects[objId]
if (!obj) throw new RangeError(`Operation for nonexistent object: ${objId}`)
const keyActor = col.keyActor.readValue(), keyCtr = col.keyCtr.readValue()
const keyStr = col.keyStr.readValue(), insert = !!col.insert.readValue()
const chldActor = col.chldActor.readValue(), chldCtr = col.chldCtr.readValue()
const childId = chldActor === null ? null : `${chldCtr}@${actorIds[chldActor]}`
const sizeTag = col.valLen.readValue()
const rawValue = col.valRaw.readRawBytes(sizeTag >> 4)
const value = decodeValue(sizeTag, rawValue)
const succNum = col.succNum.readValue()
let succ = []
for (let i = 0; i < succNum; i++) {
succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`)
}
if (!actionName || obj.type === 'unknown') continue
let key
if (obj.type === 'list' || obj.type === 'text') {
if (keyCtr === null || (keyCtr === 0 && !insert)) {
throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`)
}
key = insert ? opId : `${keyCtr}@${actorIds[keyActor]}`
} else {
if (keyStr === null) {
throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`)
}
key = keyStr
}
if (!property || property.objId !== objId || property.key !== key) {
let index = 0
if (property) {
index = property.index
if (addPatchProperty(objects, property)) index += 1
if (property.objId !== objId) index = 0
}
property = {objId, key, index, ops: []}
}
property.ops.push({opId, actionName, value, childId, succ})
}
if (property) addPatchProperty(objects, property)
condenseEdits(objects._root)
return objects._root
}
module.exports = {
COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS,
COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS, DOCUMENT_COLUMNS,
encoderByColumnId, decoderByColumnId, makeDecoders, decodeValue,
splitContainers, encodeChange, decodeChangeColumns, decodeChange, decodeChangeMeta, decodeChanges,
decodeDocumentHeader, encodeDocument, decodeDocument,
getChangeChecksum, appendEdit, constructPatch
encodeDocumentHeader, decodeDocumentHeader, decodeDocument
}

View file

@ -41,6 +41,15 @@ function equalBytes(array1, array2) {
return true
}
module.exports = {
isObject, copyObject, parseOpId, equalBytes
/**
* Creates an array containing the value `null` repeated `length` times.
*/
function createArrayOfNulls(length) {
const array = new Array(length)
for (let i = 0; i < length; i++) array[i] = null
return array
}
module.exports = {
isObject, copyObject, parseOpId, equalBytes, createArrayOfNulls
}

View file

@ -16,11 +16,10 @@
* last sync to disk), and we fall back to sending the entire document in this case.
*/
//const Backend = require('./backend')
const Backend = {} //require('./backend')
const Backend = null //require('./backend')
const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding')
const { decodeChangeMeta } = require('./columnar')
const { copyObject } = require('../src/common')
const { copyObject } = require('./common')
const HASH_SIZE = 32 // 256 bits = 32 bytes
const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification

View file

@ -1,9 +1,10 @@
const assert = require('assert')
//const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
const Automerge = require('../src')
const { assertEqualsOneOf } = require('./helpers')
const { decodeChange } = require('../src/columnar')
//const { decodeChange } = Automerge
import * as assert from 'assert'
import * as Automerge from '../src'
import { assertEqualsOneOf } from './helpers'
import { decodeChange } from './legacy/columnar'
import * as AutomergeWASM from "automerge-wasm"
Automerge.use(AutomergeWASM)
const UUID_PATTERN = /^[0-9a-f]{32}$/
const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/
@ -810,11 +811,12 @@ describe('Automerge', () => {
})
describe('concurrent use', () => {
let s1, s2, s3
let s1, s2, s3, s4
beforeEach(() => {
s1 = Automerge.init()
s2 = Automerge.init()
s3 = Automerge.init()
s4 = Automerge.init()
})
it('should merge concurrent updates of different properties', () => {

View file

@ -1,8 +1,11 @@
const assert = require('assert')
const Automerge = require('..');
const { BloomFilter } = require('../src/sync')
const { decodeChangeMeta } = require('../src/columnar')
const { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } = Automerge
import * as assert from 'assert'
import * as Automerge from '../src'
import { BloomFilter } from './legacy/sync'
import { decodeChangeMeta } from './legacy/columnar'
import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src"
import * as AutomergeWASM from "automerge-wasm"
Automerge.use(AutomergeWASM)
function inspect(a) {
const util = require("util");
@ -240,6 +243,7 @@ describe('Data sync protocol', () => {
it('should assume sent changes were recieved until we hear otherwise', () => {
let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
let s1 = initSyncState(), message = null
let s2
n1 = Automerge.change(n1, {time: 0}, doc => doc.items = [])
;[n1, n2, s1, s2 ] = sync(n1, n2)

View file

@ -1,6 +1,9 @@
const assert = require('assert')
const Automerge = require('..')
const { assertEqualsOneOf } = require('./helpers')
import * as assert from 'assert'
import * as Automerge from '../src'
import { assertEqualsOneOf } from './helpers'
import * as AutomergeWASM from "automerge-wasm"
Automerge.use(AutomergeWASM)
function attributeStateToAttributes(accumulatedAttributes) {
const attributes = {}
@ -600,7 +603,8 @@ describe('Automerge.Text', () => {
applyDeltaDocToAutomergeText(delta, doc)
})
assert.strictEqual(s2.text.join(''), 'Hello reader')
//assert.strictEqual(s2.text.join(''), 'Hello reader')
assert.strictEqual(s2.text.toString(), 'Hello reader')
})
it('should apply an insert with control characters', () => {

View file

@ -1,5 +1,8 @@
const assert = require('assert')
const Automerge = require('..')
import * as assert from 'assert'
import * as Automerge from '../src'
import * as AutomergeWASM from "automerge-wasm"
Automerge.use(AutomergeWASM)
const uuid = Automerge.uuid

View file

@ -0,0 +1,22 @@
{
"compilerOptions": {
"target": "es2016",
"sourceMap": false,
"declaration": false,
"resolveJsonModule": true,
"module": "commonjs",
"moduleResolution": "node",
"noImplicitAny": false,
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"noFallthroughCasesInSwitch": true,
"skipLibCheck": false,
"outDir": "./dist"
},
"include": [ "src/**/*" ],
"exclude": [
"./dist/**/*",
"./node_modules"
]
}

3
automerge-js/tslint.json Normal file
View file

@ -0,0 +1,3 @@
{
"extends": "tslint:recommended"
}

View file

@ -0,0 +1,3 @@
web
nodejs
examples

View file

@ -0,0 +1,11 @@
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: [
'@typescript-eslint',
],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
],
};

View file

@ -10,12 +10,12 @@
},
"author": "",
"dependencies": {
"automerge-wasm": "^0.1.2"
"automerge-wasm": "file:automerge-wasm-0.1.3.tgz"
},
"devDependencies": {
"serve": "^13.0.2",
"webpack": "^5.72.1",
"webpack-cli": "^4.9.2",
"webpack-node-externals": "^3.0.0",
"serve": "^13.0.2"
"webpack-node-externals": "^3.0.0"
}
}

View file

@ -2,10 +2,13 @@ import init, { create } from "automerge-wasm"
// hello world code that will run correctly on web or node
init().then(_ => {
const doc = create()
init().then((Automerge) => {
console.log("Automerge=", Automerge)
console.log("create=", create)
const doc = Automerge.create()
doc.put("/", "hello", "world")
const result = doc.materialize("/")
//const result = xxx
if (typeof document !== 'undefined') {
// browser

View file

@ -7,7 +7,8 @@ export type Prop = string | number;
export type Hash = string;
export type Heads = Hash[];
export type Value = string | number | boolean | null | Date | Uint8Array
export type ObjType = string | Array<Value> | Object
export type MaterializeValue = { [key:string]: MaterializeValue } | Array<MaterializeValue> | Value
export type ObjType = string | Array<ObjType | Value> | { [key: string]: ObjType | Value }
export type FullValue =
["str", string] |
["int", number] |
@ -17,12 +18,27 @@ export type FullValue =
["timestamp", Date] |
["counter", number] |
["bytes", Uint8Array] |
["null", Uint8Array] |
["null", null] |
["map", ObjID] |
["list", ObjID] |
["text", ObjID] |
["table", ObjID]
export type FullValueWithId =
["str", string, ObjID ] |
["int", number, ObjID ] |
["uint", number, ObjID ] |
["f64", number, ObjID ] |
["boolean", boolean, ObjID ] |
["timestamp", Date, ObjID ] |
["counter", number, ObjID ] |
["bytes", Uint8Array, ObjID ] |
["null", null, ObjID ] |
["map", ObjID ] |
["list", ObjID] |
["text", ObjID] |
["table", ObjID]
export enum ObjTypeName {
list = "list",
map = "map",
@ -44,10 +60,15 @@ export type Datatype =
"text" |
"list";
export type SyncHave = {
lastSync: Heads,
bloom: Uint8Array,
}
export type DecodedSyncMessage = {
heads: Heads,
need: Heads,
have: any[]
have: SyncHave[]
changes: Change[]
}
@ -89,27 +110,43 @@ export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage;
export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage;
export function encodeSyncState(state: SyncState): Uint8Array;
export function decodeSyncState(data: Uint8Array): SyncState;
export function exportSyncState(state: SyncState): JsSyncState;
export function importSyncState(state: JsSyncState): SyncState;
export class API {
create(actor?: Actor): Automerge;
load(data: Uint8Array, actor?: Actor): Automerge;
encodeChange(change: DecodedChange): Change;
decodeChange(change: Change): DecodedChange;
initSyncState(): SyncState;
encodeSyncMessage(message: DecodedSyncMessage): SyncMessage;
decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage;
encodeSyncState(state: SyncState): Uint8Array;
decodeSyncState(data: Uint8Array): SyncState;
exportSyncState(state: SyncState): JsSyncState;
importSyncState(state: JsSyncState): SyncState;
}
export class Automerge {
// change state
put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined;
put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): void;
putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID;
insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined;
insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): void;
insertObject(obj: ObjID, index: number, value: ObjType): ObjID;
push(obj: ObjID, value: Value, datatype?: Datatype): undefined;
push(obj: ObjID, value: Value, datatype?: Datatype): void;
pushObject(obj: ObjID, value: ObjType): ObjID;
splice(obj: ObjID, start: number, delete_count: number, text?: string | Array<Value>): ObjID[] | undefined;
increment(obj: ObjID, prop: Prop, value: number): void;
delete(obj: ObjID, prop: Prop): void;
// returns a single value - if there is a conflict return the winner
get(obj: ObjID, prop: any, heads?: Heads): FullValue | null;
get(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null;
// return all values in case of a conflict
getAll(obj: ObjID, arg: any, heads?: Heads): FullValue[];
getAll(obj: ObjID, arg: Prop, heads?: Heads): FullValueWithId[];
keys(obj: ObjID, heads?: Heads): string[];
text(obj: ObjID, heads?: Heads): string;
length(obj: ObjID, heads?: Heads): number;
materialize(obj?: ObjID, heads?: Heads): any;
materialize(obj?: ObjID, heads?: Heads): MaterializeValue;
// transactions
commit(message?: string, time?: number): Hash;
@ -148,17 +185,23 @@ export class Automerge {
// dump internal state to console.log
dump(): void;
}
// dump internal state to a JS object
toJS(): any;
export class JsSyncState {
sharedHeads: Heads;
lastSentHeads: Heads;
theirHeads: Heads | undefined;
theirHeed: Heads | undefined;
theirHave: SyncHave[] | undefined;
sentHashes: Heads;
}
export class SyncState {
free(): void;
clone(): SyncState;
lastSentHeads: any;
sentHashes: any;
readonly sharedHeads: any;
lastSentHeads: Heads;
sentHashes: Heads;
readonly sharedHeads: Heads;
}
export default function init (): Promise<any>;
export default function init (): Promise<API>;

View file

@ -3,4 +3,4 @@ module.exports = wasm
module.exports.load = module.exports.loadDoc
delete module.exports.loadDoc
Object.defineProperty(module.exports, "__esModule", { value: true });
module.exports.default = () => (new Promise((resolve,reject) => { resolve() }))
module.exports.default = () => (new Promise((resolve,reject) => { resolve(module.exports) }))

View file

@ -8,7 +8,7 @@
"description": "wasm-bindgen bindings to the automerge rust implementation",
"homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm",
"repository": "github:automerge/automerge-rs",
"version": "0.1.2",
"version": "0.1.3",
"license": "MIT",
"files": [
"README.md",
@ -26,24 +26,27 @@
"module": "./web/index.js",
"main": "./nodejs/index.js",
"scripts": {
"lint": "eslint test/*.ts",
"build": "cross-env PROFILE=dev TARGET=nodejs yarn target",
"release": "cross-env PROFILE=release yarn buildall",
"buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target",
"target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET && cp $TARGET-index.js $TARGET/index.js",
"test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts"
},
"dependencies": {},
"devDependencies": {
"@types/expect": "^24.3.0",
"@types/jest": "^27.4.0",
"@types/mocha": "^9.1.0",
"@types/node": "^17.0.13",
"@typescript-eslint/eslint-plugin": "^5.25.0",
"@typescript-eslint/parser": "^5.25.0",
"cross-env": "^7.0.3",
"eslint": "^8.16.0",
"fast-sha256": "^1.3.0",
"mocha": "^9.1.3",
"pako": "^2.0.4",
"rimraf": "^3.0.2",
"ts-mocha": "^9.0.2",
"typescript": "^4.5.5"
"typescript": "^4.6.4"
}
}

View file

@ -1,5 +1,6 @@
use automerge as am;
use automerge::transaction::Transactable;
use automerge::Patch;
use automerge::{Change, ChangeHash, Prop};
use js_sys::{Array, Object, Reflect, Uint8Array};
use std::collections::{BTreeSet, HashSet};
@ -7,7 +8,7 @@ use std::fmt::Display;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use crate::{ObjId, ScalarValue, Value};
use crate::{datatype, ObjId, ScalarValue, Value};
pub(crate) struct JS(pub(crate) JsValue);
pub(crate) struct AR(pub(crate) Array);
@ -346,6 +347,78 @@ pub(crate) fn to_objtype(
}
}
pub(crate) fn export_path(path: Vec<(ObjId, Prop)>, key: Prop) -> Array {
let path: Array = path.into_iter().map(|(_, p)| JsValue::from(p)).collect();
path.push(&key.into());
path
}
pub(crate) fn export_patches(patches: Vec<Patch>) -> Result<Array, JsValue> {
let result = Array::new();
for p in patches {
let patch = Array::new();
match p {
Patch::Put {
path, key, value, ..
} => {
js_set(&patch, "action", "put")?;
//js_set(&patch, "obj", obj.to_string())?;
js_set(&patch, "path", export_path(path, key))?;
//js_set(&patch, "key", key)?;
match value {
(Value::Object(obj_type), obj_id) => {
js_set(&patch, "datatype", obj_type.to_string())?;
js_set(&patch, "value", obj_id.to_string())?;
}
(Value::Scalar(value), _) => {
js_set(&patch, "datatype", datatype(&value))?;
js_set(&patch, "value", ScalarValue(value))?;
}
};
//js_set(&patch, "conflict", conflict)?;
}
Patch::Insert {
path, index, value, ..
} => {
js_set(&patch, "action", "insert")?;
//js_set(&patch, "obj", obj.to_string())?;
js_set(&patch, "path", export_path(path, index.into()))?;
//js_set(&patch, "key", index as f64)?;
match value {
(Value::Object(obj_type), obj_id) => {
js_set(&patch, "datatype", obj_type.to_string())?;
js_set(&patch, "value", obj_id.to_string())?;
}
(Value::Scalar(value), _) => {
js_set(&patch, "datatype", datatype(&value))?;
js_set(&patch, "value", ScalarValue(value))?;
}
};
}
Patch::Increment {
path, key, value, ..
} => {
js_set(&patch, "action", "increment")?;
//js_set(&patch, "obj", obj.to_string())?;
js_set(&patch, "path", export_path(path, key))?;
//js_set(&patch, "key", key)?;
js_set(&patch, "value", value.0 as f64)?;
}
Patch::Delete { path, key, .. } => {
js_set(&patch, "action", "delete")?;
//js_set(&patch, "obj", obj.to_string())?;
js_set(&patch, "path", export_path(path, key))?;
//js_set(&patch, "key", key)?;
}
}
result.push(&patch);
}
Ok(result)
}
pub(crate) fn get_heads(heads: Option<Array>) -> Option<Vec<ChangeHash>> {
let heads = heads?;
let heads: Result<Vec<ChangeHash>, _> = heads.iter().map(|j| j.into_serde()).collect();

View file

@ -28,10 +28,7 @@
#![allow(clippy::unused_unit)]
use am::transaction::CommitOptions;
use am::transaction::Transactable;
use am::ApplyOptions;
use automerge as am;
use automerge::Patch;
use automerge::VecOpObserver;
use automerge::{Change, ObjId, Prop, Value, ROOT};
use js_sys::{Array, Object, Uint8Array};
use std::convert::TryInto;
@ -43,8 +40,8 @@ mod sync;
mod value;
use interop::{
get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, to_js_err,
to_objtype, to_prop, AR, JS,
export_patches, get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at,
to_js_err, to_objtype, to_prop, AR, JS,
};
use sync::SyncState;
use value::{datatype, ScalarValue};
@ -64,7 +61,6 @@ static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
#[derive(Debug)]
pub struct Automerge {
doc: automerge::AutoCommit,
observer: Option<VecOpObserver>,
}
#[wasm_bindgen]
@ -75,28 +71,13 @@ impl Automerge {
let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec());
automerge.set_actor(a);
}
Ok(Automerge {
doc: automerge,
observer: None,
})
}
fn ensure_transaction_closed(&mut self) {
if self.doc.pending_ops() > 0 {
let mut opts = CommitOptions::default();
if let Some(observer) = self.observer.as_mut() {
opts.set_op_observer(observer);
}
self.doc.commit_with(opts);
}
Ok(Automerge { doc: automerge })
}
#[allow(clippy::should_implement_trait)]
pub fn clone(&mut self, actor: Option<String>) -> Result<Automerge, JsValue> {
self.ensure_transaction_closed();
let mut automerge = Automerge {
doc: self.doc.clone(),
observer: None,
};
if let Some(s) = actor {
let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
@ -106,10 +87,8 @@ impl Automerge {
}
pub fn fork(&mut self, actor: Option<String>) -> Result<Automerge, JsValue> {
self.ensure_transaction_closed();
let mut automerge = Automerge {
doc: self.doc.fork(),
observer: None,
};
if let Some(s) = actor {
let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
@ -123,7 +102,6 @@ impl Automerge {
let deps: Vec<_> = JS(heads).try_into()?;
let mut automerge = Automerge {
doc: self.doc.fork_at(&deps)?,
observer: None,
};
if let Some(s) = actor {
let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
@ -147,21 +125,12 @@ impl Automerge {
if let Some(time) = time {
commit_opts.set_time(time as i64);
}
if let Some(observer) = self.observer.as_mut() {
commit_opts.set_op_observer(observer);
}
let hash = self.doc.commit_with(commit_opts);
JsValue::from_str(&hex::encode(&hash.0))
}
pub fn merge(&mut self, other: &mut Automerge) -> Result<Array, JsValue> {
self.ensure_transaction_closed();
let options = if let Some(observer) = self.observer.as_mut() {
ApplyOptions::default().with_op_observer(observer)
} else {
ApplyOptions::default()
};
let heads = self.doc.merge_with(&mut other.doc, options)?;
let heads = self.doc.merge(&mut other.doc)?;
let heads: Array = heads
.iter()
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
@ -424,18 +393,21 @@ impl Automerge {
}
#[wasm_bindgen(js_name = enablePatches)]
pub fn enable_patches(&mut self, enable: JsValue) -> Result<(), JsValue> {
pub fn enable_patches(&mut self, enable: JsValue) -> Result<Array, JsValue> {
let enable = enable
.as_bool()
.ok_or_else(|| to_js_err("expected boolean"))?;
if enable {
if self.observer.is_none() {
self.observer = Some(VecOpObserver::default());
}
self.doc
.enable_observer()
.map(|mut p| export_patches(p.take_patches()))
.unwrap_or_else(|| Ok(Array::new()))
} else {
self.observer = None;
self.doc
.disable_observer()
.map(|mut p| export_patches(p.take_patches()))
.unwrap_or_else(|| Ok(Array::new()))
}
Ok(())
}
#[wasm_bindgen(js_name = popPatches)]
@ -443,70 +415,8 @@ impl Automerge {
// transactions send out observer updates as they occur, not waiting for them to be
// committed.
// If we pop the patches then we won't be able to revert them.
self.ensure_transaction_closed();
let patches = self
.observer
.as_mut()
.map_or_else(Vec::new, |o| o.take_patches());
let result = Array::new();
for p in patches {
let patch = Object::new();
match p {
Patch::Put {
obj,
key,
value,
conflict,
} => {
js_set(&patch, "action", "put")?;
js_set(&patch, "obj", obj.to_string())?;
js_set(&patch, "key", key)?;
match value {
(Value::Object(obj_type), obj_id) => {
js_set(&patch, "datatype", obj_type.to_string())?;
js_set(&patch, "value", obj_id.to_string())?;
}
(Value::Scalar(value), _) => {
js_set(&patch, "datatype", datatype(&value))?;
js_set(&patch, "value", ScalarValue(value))?;
}
};
js_set(&patch, "conflict", conflict)?;
}
Patch::Insert { obj, index, value } => {
js_set(&patch, "action", "insert")?;
js_set(&patch, "obj", obj.to_string())?;
js_set(&patch, "key", index as f64)?;
match value {
(Value::Object(obj_type), obj_id) => {
js_set(&patch, "datatype", obj_type.to_string())?;
js_set(&patch, "value", obj_id.to_string())?;
}
(Value::Scalar(value), _) => {
js_set(&patch, "datatype", datatype(&value))?;
js_set(&patch, "value", ScalarValue(value))?;
}
};
}
Patch::Increment { obj, key, value } => {
js_set(&patch, "action", "increment")?;
js_set(&patch, "obj", obj.to_string())?;
js_set(&patch, "key", key)?;
js_set(&patch, "value", value.0)?;
}
Patch::Delete { obj, key } => {
js_set(&patch, "action", "delete")?;
js_set(&patch, "obj", obj.to_string())?;
js_set(&patch, "key", key)?;
}
}
result.push(&patch);
}
Ok(result)
export_patches(self.doc.take_patches())
}
pub fn length(&self, obj: JsValue, heads: Option<Array>) -> Result<f64, JsValue> {
@ -526,51 +436,31 @@ impl Automerge {
}
pub fn save(&mut self) -> Uint8Array {
self.ensure_transaction_closed();
Uint8Array::from(self.doc.save().as_slice())
}
#[wasm_bindgen(js_name = saveIncremental)]
pub fn save_incremental(&mut self) -> Uint8Array {
self.ensure_transaction_closed();
let bytes = self.doc.save_incremental();
Uint8Array::from(bytes.as_slice())
}
#[wasm_bindgen(js_name = loadIncremental)]
pub fn load_incremental(&mut self, data: Uint8Array) -> Result<f64, JsValue> {
self.ensure_transaction_closed();
let data = data.to_vec();
let options = if let Some(observer) = self.observer.as_mut() {
ApplyOptions::default().with_op_observer(observer)
} else {
ApplyOptions::default()
};
let len = self
.doc
.load_incremental_with(&data, options)
.map_err(to_js_err)?;
let len = self.doc.load_incremental(&data).map_err(to_js_err)?;
Ok(len as f64)
}
#[wasm_bindgen(js_name = applyChanges)]
pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> {
self.ensure_transaction_closed();
let changes: Vec<_> = JS(changes).try_into()?;
let options = if let Some(observer) = self.observer.as_mut() {
ApplyOptions::default().with_op_observer(observer)
} else {
ApplyOptions::default()
};
self.doc
.apply_changes_with(changes, options)
.map_err(to_js_err)?;
self.doc.apply_changes(changes).map_err(to_js_err)?;
Ok(())
}
#[wasm_bindgen(js_name = getChanges)]
pub fn get_changes(&mut self, have_deps: JsValue) -> Result<Array, JsValue> {
self.ensure_transaction_closed();
let deps: Vec<_> = JS(have_deps).try_into()?;
let changes = self.doc.get_changes(&deps)?;
let changes: Array = changes
@ -582,7 +472,6 @@ impl Automerge {
#[wasm_bindgen(js_name = getChangeByHash)]
pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result<JsValue, JsValue> {
self.ensure_transaction_closed();
let hash = hash.into_serde().map_err(to_js_err)?;
let change = self.doc.get_change_by_hash(&hash);
if let Some(c) = change {
@ -594,7 +483,6 @@ impl Automerge {
#[wasm_bindgen(js_name = getChangesAdded)]
pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result<Array, JsValue> {
self.ensure_transaction_closed();
let changes = self.doc.get_changes_added(&mut other.doc);
let changes: Array = changes
.iter()
@ -605,7 +493,6 @@ impl Automerge {
#[wasm_bindgen(js_name = getHeads)]
pub fn get_heads(&mut self) -> Array {
self.ensure_transaction_closed();
let heads = self.doc.get_heads();
let heads: Array = heads
.iter()
@ -622,7 +509,6 @@ impl Automerge {
#[wasm_bindgen(js_name = getLastLocalChange)]
pub fn get_last_local_change(&mut self) -> Result<Uint8Array, JsValue> {
self.ensure_transaction_closed();
if let Some(change) = self.doc.get_last_local_change() {
Ok(Uint8Array::from(change.raw_bytes()))
} else {
@ -631,13 +517,11 @@ impl Automerge {
}
pub fn dump(&mut self) {
self.ensure_transaction_closed();
self.doc.dump()
}
#[wasm_bindgen(js_name = getMissingDeps)]
pub fn get_missing_deps(&mut self, heads: Option<Array>) -> Result<Array, JsValue> {
self.ensure_transaction_closed();
let heads = get_heads(heads).unwrap_or_default();
let deps = self.doc.get_missing_deps(&heads);
let deps: Array = deps
@ -653,23 +537,16 @@ impl Automerge {
state: &mut SyncState,
message: Uint8Array,
) -> Result<(), JsValue> {
self.ensure_transaction_closed();
let message = message.to_vec();
let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?;
let options = if let Some(observer) = self.observer.as_mut() {
ApplyOptions::default().with_op_observer(observer)
} else {
ApplyOptions::default()
};
self.doc
.receive_sync_message_with(&mut state.0, message, options)
.receive_sync_message(&mut state.0, message)
.map_err(to_js_err)?;
Ok(())
}
#[wasm_bindgen(js_name = generateSyncMessage)]
pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result<JsValue, JsValue> {
self.ensure_transaction_closed();
if let Some(message) = self.doc.generate_sync_message(&mut state.0) {
Ok(Uint8Array::from(message.encode().as_slice()).into())
} else {
@ -829,17 +706,23 @@ pub fn init(actor: Option<String>) -> Result<Automerge, JsValue> {
#[wasm_bindgen(js_name = loadDoc)]
pub fn load(data: Uint8Array, actor: Option<String>) -> Result<Automerge, JsValue> {
let data = data.to_vec();
let observer = None;
let options = ApplyOptions::<()>::default();
let mut automerge = am::AutoCommit::load_with(&data, options).map_err(to_js_err)?;
let mut automerge = am::AutoCommit::load(&data).map_err(to_js_err)?;
if let Some(s) = actor {
let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
automerge.set_actor(actor);
}
Ok(Automerge {
doc: automerge,
observer,
})
Ok(Automerge { doc: automerge })
}
#[wasm_bindgen(js_name = loadWithPatches)]
pub fn load_with_patches(data: Uint8Array, actor: Option<String>) -> Result<Automerge, JsValue> {
let data = data.to_vec();
let mut automerge = am::AutoCommit::load_with_observer(&data).map_err(to_js_err)?;
if let Some(s) = actor {
let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
automerge.set_actor(actor);
}
Ok(Automerge { doc: automerge })
}
#[wasm_bindgen(js_name = encodeChange)]

View file

@ -0,0 +1,170 @@
import { describe, it } from 'mocha';
//@ts-ignore
import assert from 'assert'
//@ts-ignore
import init, { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..'
import { Prop } from '..';
function patchValue(patch: any) : any {
switch (patch.datatype) {
case "map":
return {}
case "list":
return []
case "text":
return ""
default:
return patch.value
}
}
function patchTextValue(patch: any) : any {
if (typeof patch.value === "string" && patch.value.length == 1) {
return patch.value
} else {
return "\uFFFC"
}
}
function applyPatch(obj: any, path: Prop[], patch: any) : any {
let prop = path.shift();
if (typeof prop === 'number' && Array.isArray(obj)) {
return applyPatchToArray(obj, prop, path, patch)
}
if (typeof prop === 'number' && typeof obj === 'string') {
return applyPatchToText(obj, prop, path, patch)
}
if (typeof prop === 'string' && typeof obj === 'object') {
return applyPatchToObject(obj, prop, path, patch)
}
return obj
}
type Obj = { [key:string]: any }
function applyPatchToObject(obj: Obj, prop: string, path: Prop[], patch: any) : any {
if (path.length === 0) {
switch (patch.action) {
case "increment":
return { ... obj, [prop]: obj[prop] + patchValue(patch) }
case "put":
return { ... obj, [prop]: patchValue(patch) }
case "delete":
let tmp = { ... obj }
delete tmp[prop]
return tmp
default:
throw new RangeError(`Invalid patch ${patch}`)
}
} else {
return { ... obj, [prop]: applyPatch(obj[prop], path, patch) }
}
}
function applyPatchToArray(obj: Array<any>, prop: number, path: Prop[], patch: any) : any {
if (path.length === 0) {
switch (patch.action) {
case "increment":
return [ ... obj.slice(0,prop), obj[prop] + patchValue(patch), ... obj.slice(prop + 1) ]
case "put":
return [ ... obj.slice(0,prop), patchValue(patch), ... obj.slice(prop + 1) ]
case "insert":
return [ ... obj.slice(0,prop), patchValue(patch), ... obj.slice(prop) ]
case "delete":
return [... obj.slice(0,prop), ... obj.slice(prop + 1) ]
default:
throw new RangeError(`Invalid patch ${patch}`)
}
} else {
return [ ... obj.slice(0,prop), applyPatch(obj[prop], path, patch), ... obj.slice(prop + 1) ]
}
}
function applyPatchToText(obj: string, prop: number, path: Prop[], patch: any) : any {
if (path.length === 0) {
switch (patch.action) {
case "increment":
return obj
case "put":
return obj.slice(0,prop) + patchTextValue(patch) + obj.slice(prop + 1)
case "insert":
return obj.slice(0,prop) + patchTextValue(patch) + obj.slice(prop)
case "delete":
return obj.slice(0,prop) + obj.slice(prop + 1)
default:
throw new RangeError(`Invalid patch ${patch}`)
}
} else {
return obj
}
}
function applyPatches(obj: any, patches: any) {
for (let patch of patches) {
obj = applyPatch(obj, patch.path, patch)
}
return obj
}
describe('Automerge', () => {
describe('patches', () => {
it.only('can apply nested patches', () => {
const doc1 = create()
doc1.enablePatches(true)
doc1.put("/", "str", "value")
doc1.put("/", "num", 0)
doc1.delete("/", "num")
doc1.put("/", "counter", 0, "counter")
doc1.increment("/", "counter", 100)
doc1.increment("/", "counter", 1)
doc1.put("/", "bin", new Uint8Array([1,2,3]))
doc1.put("/", "bool", true)
let sub = doc1.putObject("/", "sub", {})
let list = doc1.putObject("/", "list", [1,2,3,4,5,6])
doc1.push("/list", 100, "counter");
doc1.increment("/list", 6, 10);
let sublist = doc1.putObject("/sub", "list", [1,2,3,4,[ 1,2,3,[4,{ five: "six" } ] ] ])
doc1.put(sub, "str", "value")
doc1.put("/sub", "num", 0)
doc1.put("/sub", "bin", new Uint8Array([1,2,3]))
doc1.put("/sub", "bool", true)
let subsub = doc1.putObject("/sub", "sub", {})
doc1.put("/sub/sub", "num", 0)
doc1.put("/sub/sub", "bin", new Uint8Array([1,2,3]))
doc1.put("/sub/sub", "bool", true)
let patches = doc1.popPatches()
let js = applyPatches({}, patches)
assert.deepEqual(js,doc1.materialize("/"))
})
it.only('can handle deletes with nested patches', () => {
const doc1 = create()
doc1.enablePatches(true)
let list = doc1.putObject("/", "list", [1,2,3,['a','b','c']])
doc1.delete("/list", 1);
doc1.push("/list", 'hello');
let patches = doc1.popPatches()
let js = applyPatches({}, patches)
assert.deepEqual(js,doc1.materialize("/"))
})
it.only('can handle patches with deletes withlists holding objects', () => {
const doc1 = create()
doc1.enablePatches(true)
let list = doc1.putObject("/", "list", [1,2,3,[{n:1},{n:2},{n:3}]])
doc1.delete("/list", 1);
doc1.put("/list/2/0", "n", 100);
doc1.delete("/list", 1);
doc1.put("/list/1/1", "n", 200);
doc1.insertObject("/list/1", 3, {n:400})
let text = doc1.putObject("/", "text", "hello world");
doc1.insertObject("/text", 3, {n:1})
let patches = doc1.popPatches()
console.log(doc1.materialize("/"))
let js = applyPatches({}, patches)
assert.deepEqual(js,doc1.materialize("/"))
})
})
})

View file

@ -7,18 +7,18 @@ import init, { create, load } from '..'
describe('Automerge', () => {
describe('Readme Examples', () => {
it('Using the Library and Creating a Document (1)', () => {
let doc = create()
const doc = create()
doc.free()
})
it('Using the Library and Creating a Document (2)', (done) => {
init().then((_:any) => {
let doc = create()
const doc = create()
doc.free()
done()
})
})
it('Automerge Scalar Types (1)', () => {
let doc = create()
const doc = create()
doc.put("/", "prop1", 100) // int
doc.put("/", "prop2", 3.14) // f64
doc.put("/", "prop3", "hello world")
@ -40,7 +40,7 @@ describe('Automerge', () => {
doc.free()
})
it('Automerge Scalar Types (2)', () => {
let doc = create()
const doc = create()
doc.put("/", "prop1", 100, "int")
doc.put("/", "prop2", 100, "uint")
doc.put("/", "prop3", 100.5, "f64")
@ -54,37 +54,37 @@ describe('Automerge', () => {
doc.free()
})
it('Automerge Object Types (1)', () => {
let doc = create()
const doc = create()
// you can create an object by passing in the inital state - if blank pass in `{}`
// the return value is the Object Id
// these functions all return an object id
let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] })
let token = doc.putObject("/", "tokens", {})
const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] })
const token = doc.putObject("/", "tokens", {})
// lists can be made with javascript arrays
let birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"])
let bots = doc.putObject("/", "bots", [])
const birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"])
const bots = doc.putObject("/", "bots", [])
// text is initialized with a string
let notes = doc.putObject("/", "notes", "Hello world!")
const notes = doc.putObject("/", "notes", "Hello world!")
doc.free()
})
it('Automerge Object Types (2)', () => {
let doc = create()
const doc = create()
let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] })
const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] })
doc.put(config, "align", "right")
// Anywhere Object Ids are being used a path can also be used.
// The following two statements are equivalent:
let id = doc.get("/", "config")
const id = doc.get("/", "config")
if (id && id[0] === 'map') {
doc.put(id[1], "align", "right")
}
@ -98,14 +98,14 @@ describe('Automerge', () => {
doc.free()
})
it('Maps (1)', () => {
let doc = create()
let mymap = doc.putObject("_root", "mymap", { foo: "bar"})
const doc = create()
const mymap = doc.putObject("_root", "mymap", { foo: "bar"})
// make a new map with the foo key
doc.put(mymap, "bytes", new Uint8Array([1,2,3]))
// assign a byte array to key `bytes` of the mymap object
let submap = doc.putObject(mymap, "sub", {})
const submap = doc.putObject(mymap, "sub", {})
// make a new empty object and assign it to the key `sub` of mymap
assert.deepEqual(doc.keys(mymap),["bytes","foo","sub"])
@ -114,8 +114,8 @@ describe('Automerge', () => {
doc.free()
})
it('Lists (1)', () => {
let doc = create()
let items = doc.putObject("_root", "items", [10,"box"])
const doc = create()
const items = doc.putObject("_root", "items", [10,"box"])
// init a new list with two elements
doc.push(items, true) // push `true` to the end of the list
doc.putObject(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value
@ -130,13 +130,13 @@ describe('Automerge', () => {
doc.free()
})
it('Text (1)', () => {
let doc = create("aaaaaa")
let notes = doc.putObject("_root", "notes", "Hello world")
const doc = create("aaaaaa")
const notes = doc.putObject("_root", "notes", "Hello world")
doc.splice(notes, 6, 5, "everyone")
assert.deepEqual(doc.text(notes), "Hello everyone")
let obj = doc.insertObject(notes, 6, { hi: "there" })
const obj = doc.insertObject(notes, 6, { hi: "there" })
assert.deepEqual(doc.text(notes), "Hello \ufffceveryone")
assert.deepEqual(doc.get(notes, 6), ["map", obj])
@ -145,15 +145,15 @@ describe('Automerge', () => {
doc.free()
})
it('Querying Data (1)', () => {
let doc1 = create("aabbcc")
const doc1 = create("aabbcc")
doc1.put("_root", "key1", "val1")
let key2 = doc1.putObject("_root", "key2", [])
const key2 = doc1.putObject("_root", "key2", [])
assert.deepEqual(doc1.get("_root", "key1"), ["str", "val1"])
assert.deepEqual(doc1.get("_root", "key2"), ["list", "2@aabbcc"])
assert.deepEqual(doc1.keys("_root"), ["key1", "key2"])
let doc2 = doc1.fork("ffaaff")
const doc2 = doc1.fork("ffaaff")
// set a value concurrently
doc1.put("_root","key3","doc1val")
@ -167,11 +167,11 @@ describe('Automerge', () => {
doc1.free(); doc2.free()
})
it('Counters (1)', () => {
let doc1 = create("aaaaaa")
const doc1 = create("aaaaaa")
doc1.put("_root", "number", 0)
doc1.put("_root", "total", 0, "counter")
let doc2 = doc1.fork("bbbbbb")
const doc2 = doc1.fork("bbbbbb")
doc2.put("_root", "number", 10)
doc2.increment("_root", "total", 11)
@ -185,7 +185,7 @@ describe('Automerge', () => {
doc1.free(); doc2.free()
})
it('Transactions (1)', () => {
let doc = create()
const doc = create()
doc.put("_root", "key", "val1")
@ -209,13 +209,13 @@ describe('Automerge', () => {
doc.free()
})
it('Viewing Old Versions of the Document (1)', () => {
let doc = create()
const doc = create()
doc.put("_root", "key", "val1")
let heads1 = doc.getHeads()
const heads1 = doc.getHeads()
doc.put("_root", "key", "val2")
let heads2 = doc.getHeads()
const heads2 = doc.getHeads()
doc.put("_root", "key", "val3")
@ -227,10 +227,10 @@ describe('Automerge', () => {
doc.free()
})
it('Forking And Merging (1)', () => {
let doc1 = create()
const doc1 = create()
doc1.put("_root", "key1", "val1")
let doc2 = doc1.fork()
const doc2 = doc1.fork()
doc1.put("_root", "key2", "val2")
doc2.put("_root", "key3", "val3")
@ -243,31 +243,31 @@ describe('Automerge', () => {
doc1.free(); doc2.free()
})
it('Saving And Loading (1)', () => {
let doc1 = create()
const doc1 = create()
doc1.put("_root", "key1", "value1")
let save1 = doc1.save()
const save1 = doc1.save()
let doc2 = load(save1)
const doc2 = load(save1)
doc2.materialize("_root") // returns { key1: "value1" }
doc1.put("_root", "key2", "value2")
let saveIncremental = doc1.saveIncremental()
const saveIncremental = doc1.saveIncremental()
let save2 = doc1.save()
const save2 = doc1.save()
let save3 = new Uint8Array([... save1, ... saveIncremental])
const save3 = new Uint8Array([... save1, ... saveIncremental])
// save2 has fewer bytes than save3 but contains the same ops
doc2.loadIncremental(saveIncremental)
let doc3 = load(save2)
const doc3 = load(save2)
let doc4 = load(save3)
const doc4 = load(save3)
assert.deepEqual(doc1.materialize("_root"), { key1: "value1", key2: "value2" })
assert.deepEqual(doc2.materialize("_root"), { key1: "value1", key2: "value2" })

View file

@ -33,29 +33,29 @@ describe('Automerge', () => {
})
it('should create, clone and free', () => {
let doc1 = create()
let doc2 = doc1.clone()
const doc1 = create()
const doc2 = doc1.clone()
doc1.free()
doc2.free()
})
it('should be able to start and commit', () => {
let doc = create()
const doc = create()
doc.commit()
doc.free()
})
it('getting a nonexistant prop does not throw an error', () => {
let doc = create()
let root = "_root"
let result = doc.get(root,"hello")
const doc = create()
const root = "_root"
const result = doc.get(root,"hello")
assert.deepEqual(result,undefined)
doc.free()
})
it('should be able to set and get a simple value', () => {
let doc : Automerge = create("aabbcc")
let root = "_root"
const doc : Automerge = create("aabbcc")
const root = "_root"
let result
doc.put(root, "hello", "world")
@ -112,22 +112,22 @@ describe('Automerge', () => {
})
it('should be able to use bytes', () => {
let doc = create()
const doc = create()
doc.put("_root","data1", new Uint8Array([10,11,12]));
doc.put("_root","data2", new Uint8Array([13,14,15]), "bytes");
let value1 = doc.get("_root", "data1")
const value1 = doc.get("_root", "data1")
assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]);
let value2 = doc.get("_root", "data2")
const value2 = doc.get("_root", "data2")
assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]);
doc.free()
})
it('should be able to make sub objects', () => {
let doc = create()
let root = "_root"
const doc = create()
const root = "_root"
let result
let submap = doc.putObject(root, "submap", {})
const submap = doc.putObject(root, "submap", {})
doc.put(submap, "number", 6, "uint")
assert.strictEqual(doc.pendingOps(),2)
@ -140,10 +140,10 @@ describe('Automerge', () => {
})
it('should be able to make lists', () => {
let doc = create()
let root = "_root"
const doc = create()
const root = "_root"
let submap = doc.putObject(root, "numbers", [])
const submap = doc.putObject(root, "numbers", [])
doc.insert(submap, 0, "a");
doc.insert(submap, 1, "b");
doc.insert(submap, 2, "c");
@ -163,15 +163,15 @@ describe('Automerge', () => {
})
it('lists have insert, set, splice, and push ops', () => {
let doc = create()
let root = "_root"
const doc = create()
const root = "_root"
let submap = doc.putObject(root, "letters", [])
const submap = doc.putObject(root, "letters", [])
doc.insert(submap, 0, "a");
doc.insert(submap, 0, "b");
assert.deepEqual(doc.materialize(), { letters: ["b", "a" ] })
doc.push(submap, "c");
let heads = doc.getHeads()
const heads = doc.getHeads()
assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c" ] })
doc.push(submap, 3, "timestamp");
assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3) ] })
@ -187,17 +187,17 @@ describe('Automerge', () => {
})
it('should be able delete non-existant props', () => {
let doc = create()
const doc = create()
doc.put("_root", "foo","bar")
doc.put("_root", "bip","bap")
let hash1 = doc.commit()
const hash1 = doc.commit()
assert.deepEqual(doc.keys("_root"),["bip","foo"])
doc.delete("_root", "foo")
doc.delete("_root", "baz")
let hash2 = doc.commit()
const hash2 = doc.commit()
assert.deepEqual(doc.keys("_root"),["bip"])
assert.deepEqual(doc.keys("_root", [hash1]),["bip", "foo"])
@ -206,8 +206,8 @@ describe('Automerge', () => {
})
it('should be able to del', () => {
let doc = create()
let root = "_root"
const doc = create()
const root = "_root"
doc.put(root, "xxx", "xxx");
assert.deepEqual(doc.get(root, "xxx"),["str","xxx"])
@ -217,8 +217,8 @@ describe('Automerge', () => {
})
it('should be able to use counters', () => {
let doc = create()
let root = "_root"
const doc = create()
const root = "_root"
doc.put(root, "counter", 10, "counter");
assert.deepEqual(doc.get(root, "counter"),["counter",10])
@ -230,10 +230,10 @@ describe('Automerge', () => {
})
it('should be able to splice text', () => {
let doc = create()
let root = "_root";
const doc = create()
const root = "_root";
let text = doc.putObject(root, "text", "");
const text = doc.putObject(root, "text", "");
doc.splice(text, 0, 0, "hello ")
doc.splice(text, 6, 0, ["w","o","r","l","d"])
doc.splice(text, 11, 0, ["!","?"])
@ -247,39 +247,39 @@ describe('Automerge', () => {
})
it('should be able to insert objects into text', () => {
let doc = create()
let text = doc.putObject("/", "text", "Hello world");
let obj = doc.insertObject(text, 6, { hello: "world" });
const doc = create()
const text = doc.putObject("/", "text", "Hello world");
const obj = doc.insertObject(text, 6, { hello: "world" });
assert.deepEqual(doc.text(text), "Hello \ufffcworld");
assert.deepEqual(doc.get(text, 6), ["map", obj]);
assert.deepEqual(doc.get(obj, "hello"), ["str", "world"]);
})
it('should be able save all or incrementally', () => {
let doc = create()
const doc = create()
doc.put("_root", "foo", 1)
let save1 = doc.save()
const save1 = doc.save()
doc.put("_root", "bar", 2)
let saveMidway = doc.clone().save();
const saveMidway = doc.clone().save();
let save2 = doc.saveIncremental();
const save2 = doc.saveIncremental();
doc.put("_root", "baz", 3);
let save3 = doc.saveIncremental();
const save3 = doc.saveIncremental();
let saveA = doc.save();
let saveB = new Uint8Array([... save1, ...save2, ...save3]);
const saveA = doc.save();
const saveB = new Uint8Array([... save1, ...save2, ...save3]);
assert.notDeepEqual(saveA, saveB);
let docA = load(saveA);
let docB = load(saveB);
let docC = load(saveMidway)
const docA = load(saveA);
const docB = load(saveB);
const docC = load(saveMidway)
docC.loadIncremental(save3)
assert.deepEqual(docA.keys("_root"), docB.keys("_root"));
@ -292,12 +292,12 @@ describe('Automerge', () => {
})
it('should be able to splice text', () => {
let doc = create()
let text = doc.putObject("_root", "text", "");
const doc = create()
const text = doc.putObject("_root", "text", "");
doc.splice(text, 0, 0, "hello world");
let hash1 = doc.commit();
const hash1 = doc.commit();
doc.splice(text, 6, 0, "big bad ");
let hash2 = doc.commit();
const hash2 = doc.commit();
assert.strictEqual(doc.text(text), "hello big bad world")
assert.strictEqual(doc.length(text), 19)
assert.strictEqual(doc.text(text, [ hash1 ]), "hello world")
@ -308,10 +308,10 @@ describe('Automerge', () => {
})
it('local inc increments all visible counters in a map', () => {
let doc1 = create("aaaa")
const doc1 = create("aaaa")
doc1.put("_root", "hello", "world")
let doc2 = load(doc1.save(), "bbbb");
let doc3 = load(doc1.save(), "cccc");
const doc2 = load(doc1.save(), "bbbb");
const doc3 = load(doc1.save(), "cccc");
let heads = doc1.getHeads()
doc1.put("_root", "cnt", 20)
doc2.put("_root", "cnt", 0, "counter")
@ -331,8 +331,8 @@ describe('Automerge', () => {
[ 'counter', 15, '2@cccc' ],
])
let save1 = doc1.save()
let doc4 = load(save1)
const save1 = doc1.save()
const doc4 = load(save1)
assert.deepEqual(doc4.save(), save1);
doc1.free()
doc2.free()
@ -341,11 +341,11 @@ describe('Automerge', () => {
})
it('local inc increments all visible counters in a sequence', () => {
let doc1 = create("aaaa")
let seq = doc1.putObject("_root", "seq", [])
const doc1 = create("aaaa")
const seq = doc1.putObject("_root", "seq", [])
doc1.insert(seq, 0, "hello")
let doc2 = load(doc1.save(), "bbbb");
let doc3 = load(doc1.save(), "cccc");
const doc2 = load(doc1.save(), "bbbb");
const doc3 = load(doc1.save(), "cccc");
let heads = doc1.getHeads()
doc1.put(seq, 0, 20)
doc2.put(seq, 0, 0, "counter")
@ -365,8 +365,8 @@ describe('Automerge', () => {
[ 'counter', 15, '3@cccc' ],
])
let save = doc1.save()
let doc4 = load(save)
const save = doc1.save()
const doc4 = load(save)
assert.deepEqual(doc4.save(), save);
doc1.free()
doc2.free()
@ -375,7 +375,7 @@ describe('Automerge', () => {
})
it('paths can be used instead of objids', () => {
let doc = create("aaaa")
const doc = create("aaaa")
doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]])
assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar"}, [1,2,3]] })
assert.deepEqual(doc.materialize("/list"), [{ foo: "bar"}, [1,2,3]])
@ -383,26 +383,26 @@ describe('Automerge', () => {
})
it('should be able to fetch changes by hash', () => {
let doc1 = create("aaaa")
let doc2 = create("bbbb")
const doc1 = create("aaaa")
const doc2 = create("bbbb")
doc1.put("/","a","b")
doc2.put("/","b","c")
let head1 = doc1.getHeads()
let head2 = doc2.getHeads()
let change1 = doc1.getChangeByHash(head1[0])
let change2 = doc1.getChangeByHash(head2[0])
const head1 = doc1.getHeads()
const head2 = doc2.getHeads()
const change1 = doc1.getChangeByHash(head1[0])
const change2 = doc1.getChangeByHash(head2[0])
assert.deepEqual(change2, null)
if (change1 === null) { throw new RangeError("change1 should not be null") }
assert.deepEqual(decodeChange(change1).hash, head1[0])
})
it('recursive sets are possible', () => {
let doc = create("aaaa")
let l1 = doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]])
let l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] })
let l3 = doc.putObject("_root","info1","hello world") // 'text' object
const doc = create("aaaa")
const l1 = doc.putObject("_root","list",[{ foo: "bar"}, [1,2,3]])
const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] })
const l3 = doc.putObject("_root","info1","hello world") // 'text' object
doc.put("_root","info2","hello world") // 'str'
let l4 = doc.putObject("_root","info3","hello world")
const l4 = doc.putObject("_root","info3","hello world")
assert.deepEqual(doc.materialize(), {
"list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]],
"info1": "hello world",
@ -416,15 +416,15 @@ describe('Automerge', () => {
})
it('only returns an object id when objects are created', () => {
let doc = create("aaaa")
let r1 = doc.put("_root","foo","bar")
let r2 = doc.putObject("_root","list",[])
let r3 = doc.put("_root","counter",10, "counter")
let r4 = doc.increment("_root","counter",1)
let r5 = doc.delete("_root","counter")
let r6 = doc.insert(r2,0,10);
let r7 = doc.insertObject(r2,0,{});
let r8 = doc.splice(r2,1,0,["a","b","c"]);
const doc = create("aaaa")
const r1 = doc.put("_root","foo","bar")
const r2 = doc.putObject("_root","list",[])
const r3 = doc.put("_root","counter",10, "counter")
const r4 = doc.increment("_root","counter",1)
const r5 = doc.delete("_root","counter")
const r6 = doc.insert(r2,0,10);
const r7 = doc.insertObject(r2,0,{});
const r8 = doc.splice(r2,1,0,["a","b","c"]);
//let r9 = doc.splice(r2,1,0,["a",[],{},"d"]);
assert.deepEqual(r1,null);
assert.deepEqual(r2,"2@aaaa");
@ -439,13 +439,13 @@ describe('Automerge', () => {
})
it('objects without properties are preserved', () => {
let doc1 = create("aaaa")
let a = doc1.putObject("_root","a",{});
let b = doc1.putObject("_root","b",{});
let c = doc1.putObject("_root","c",{});
let d = doc1.put(c,"d","dd");
let saved = doc1.save();
let doc2 = load(saved);
const doc1 = create("aaaa")
const a = doc1.putObject("_root","a",{});
const b = doc1.putObject("_root","b",{});
const c = doc1.putObject("_root","c",{});
const d = doc1.put(c,"d","dd");
const saved = doc1.save();
const doc2 = load(saved);
assert.deepEqual(doc2.get("_root","a"),["map",a])
assert.deepEqual(doc2.keys(a),[])
assert.deepEqual(doc2.get("_root","b"),["map",b])
@ -458,26 +458,26 @@ describe('Automerge', () => {
})
it('should allow you to forkAt a heads', () => {
let A = create("aaaaaa")
const A = create("aaaaaa")
A.put("/", "key1","val1");
A.put("/", "key2","val2");
let heads1 = A.getHeads();
let B = A.fork("bbbbbb")
const heads1 = A.getHeads();
const B = A.fork("bbbbbb")
A.put("/", "key3","val3");
B.put("/", "key4","val4");
A.merge(B)
let heads2 = A.getHeads();
const heads2 = A.getHeads();
A.put("/", "key5","val5");
assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/",heads1))
assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/",heads2))
})
it('should handle merging text conflicts then saving & loading', () => {
let A = create("aabbcc")
let At = A.putObject('_root', 'text', "")
const A = create("aabbcc")
const At = A.putObject('_root', 'text', "")
A.splice(At, 0, 0, 'hello')
let B = A.fork()
const B = A.fork()
assert.deepEqual(B.get("_root","text"), [ "text", At])
@ -488,9 +488,9 @@ describe('Automerge', () => {
A.merge(B)
let binary = A.save()
const binary = A.save()
let C = load(binary)
const C = load(binary)
assert.deepEqual(C.get('_root', 'text'), ['text', '1@aabbcc'])
assert.deepEqual(C.text(At), 'hell! world')
@ -499,7 +499,7 @@ describe('Automerge', () => {
describe('patch generation', () => {
it('should include root object key updates', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc1.put('_root', 'hello', 'world')
doc2.enablePatches(true)
doc2.loadIncremental(doc1.saveIncremental())
@ -511,7 +511,7 @@ describe('Automerge', () => {
})
it('should include nested object creation', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc1.putObject('_root', 'birds', {friday: {robins: 3}})
doc2.enablePatches(true)
doc2.loadIncremental(doc1.saveIncremental())
@ -525,7 +525,7 @@ describe('Automerge', () => {
})
it('should delete map keys', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc1.put('_root', 'favouriteBird', 'Robin')
doc2.enablePatches(true)
doc2.loadIncremental(doc1.saveIncremental())
@ -540,7 +540,7 @@ describe('Automerge', () => {
})
it('should include list element insertion', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch'])
doc2.enablePatches(true)
doc2.loadIncremental(doc1.saveIncremental())
@ -554,7 +554,7 @@ describe('Automerge', () => {
})
it('should insert nested maps into a list', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc1.putObject('_root', 'birds', [])
doc2.loadIncremental(doc1.saveIncremental())
doc1.insertObject('1@aaaa', 0, {species: 'Goldfinch', count: 3})
@ -570,7 +570,7 @@ describe('Automerge', () => {
})
it('should calculate list indexes based on visible elements', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch'])
doc2.loadIncremental(doc1.saveIncremental())
doc1.delete('1@aaaa', 0)
@ -588,9 +588,9 @@ describe('Automerge', () => {
})
it('should handle concurrent insertions at the head of a list', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
doc1.putObject('_root', 'values', [])
let change1 = doc1.saveIncremental()
const change1 = doc1.saveIncremental()
doc2.loadIncremental(change1)
doc3.loadIncremental(change1)
doc4.loadIncremental(change1)
@ -598,7 +598,7 @@ describe('Automerge', () => {
doc1.insert('1@aaaa', 1, 'd')
doc2.insert('1@aaaa', 0, 'a')
doc2.insert('1@aaaa', 1, 'b')
let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental()
const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental()
doc3.enablePatches(true)
doc4.enablePatches(true)
doc3.loadIncremental(change2); doc3.loadIncremental(change3)
@ -621,9 +621,9 @@ describe('Automerge', () => {
})
it('should handle concurrent insertions beyond the head', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
doc1.putObject('_root', 'values', ['a', 'b'])
let change1 = doc1.saveIncremental()
const change1 = doc1.saveIncremental()
doc2.loadIncremental(change1)
doc3.loadIncremental(change1)
doc4.loadIncremental(change1)
@ -631,7 +631,7 @@ describe('Automerge', () => {
doc1.insert('1@aaaa', 3, 'f')
doc2.insert('1@aaaa', 2, 'c')
doc2.insert('1@aaaa', 3, 'd')
let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental()
const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental()
doc3.enablePatches(true)
doc4.enablePatches(true)
doc3.loadIncremental(change2); doc3.loadIncremental(change3)
@ -654,10 +654,10 @@ describe('Automerge', () => {
})
it('should handle conflicts on root object keys', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
doc1.put('_root', 'bird', 'Greenfinch')
doc2.put('_root', 'bird', 'Goldfinch')
let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
doc3.enablePatches(true)
doc4.enablePatches(true)
doc3.loadIncremental(change1); doc3.loadIncremental(change2)
@ -678,11 +678,11 @@ describe('Automerge', () => {
})
it('should handle three-way conflicts', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc')
const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc')
doc1.put('_root', 'bird', 'Greenfinch')
doc2.put('_root', 'bird', 'Chaffinch')
doc3.put('_root', 'bird', 'Goldfinch')
let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental(), change3 = doc3.saveIncremental()
const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental(), change3 = doc3.saveIncremental()
doc1.enablePatches(true)
doc2.enablePatches(true)
doc3.enablePatches(true)
@ -717,11 +717,11 @@ describe('Automerge', () => {
})
it('should allow a conflict to be resolved', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc')
const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc')
doc1.put('_root', 'bird', 'Greenfinch')
doc2.put('_root', 'bird', 'Chaffinch')
doc3.enablePatches(true)
let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
doc1.loadIncremental(change2); doc3.loadIncremental(change1)
doc2.loadIncremental(change1); doc3.loadIncremental(change2)
doc1.put('_root', 'bird', 'Goldfinch')
@ -736,12 +736,12 @@ describe('Automerge', () => {
})
it('should handle a concurrent map key overwrite and delete', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc1.put('_root', 'bird', 'Greenfinch')
doc2.loadIncremental(doc1.saveIncremental())
doc1.put('_root', 'bird', 'Goldfinch')
doc2.delete('_root', 'bird')
let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
doc1.enablePatches(true)
doc2.enablePatches(true)
doc1.loadIncremental(change2)
@ -760,15 +760,15 @@ describe('Automerge', () => {
})
it('should handle a conflict on a list element', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
doc1.putObject('_root', 'birds', ['Thrush', 'Magpie'])
let change1 = doc1.saveIncremental()
const change1 = doc1.saveIncremental()
doc2.loadIncremental(change1)
doc3.loadIncremental(change1)
doc4.loadIncremental(change1)
doc1.put('1@aaaa', 0, 'Song Thrush')
doc2.put('1@aaaa', 0, 'Redwing')
let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental()
const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental()
doc3.enablePatches(true)
doc4.enablePatches(true)
doc3.loadIncremental(change2); doc3.loadIncremental(change3)
@ -789,9 +789,9 @@ describe('Automerge', () => {
})
it('should handle a concurrent list element overwrite and delete', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd')
doc1.putObject('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush'])
let change1 = doc1.saveIncremental()
const change1 = doc1.saveIncremental()
doc2.loadIncremental(change1)
doc3.loadIncremental(change1)
doc4.loadIncremental(change1)
@ -799,7 +799,7 @@ describe('Automerge', () => {
doc1.put('1@aaaa', 1, 'Song Thrush')
doc2.put('1@aaaa', 0, 'Ring-necked parakeet')
doc2.put('1@aaaa', 2, 'Redwing')
let change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental()
const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental()
doc3.enablePatches(true)
doc4.enablePatches(true)
doc3.loadIncremental(change2); doc3.loadIncremental(change3)
@ -824,12 +824,12 @@ describe('Automerge', () => {
})
it('should handle deletion of a conflict value', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc')
const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc')
doc1.put('_root', 'bird', 'Robin')
doc2.put('_root', 'bird', 'Wren')
let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
doc2.delete('_root', 'bird')
let change3 = doc2.saveIncremental()
const change3 = doc2.saveIncremental()
doc3.enablePatches(true)
doc3.loadIncremental(change1)
doc3.loadIncremental(change2)
@ -848,10 +848,10 @@ describe('Automerge', () => {
})
it('should handle conflicting nested objects', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc1.putObject('_root', 'birds', ['Parakeet'])
doc2.putObject('_root', 'birds', {'Sparrowhawk': 1})
let change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental()
doc1.enablePatches(true)
doc2.enablePatches(true)
doc1.loadIncremental(change2)
@ -871,7 +871,7 @@ describe('Automerge', () => {
it('should support date objects', () => {
// FIXME: either use Date objects or use numbers consistently
let doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date()
const doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date()
doc1.put('_root', 'createdAt', now.getTime(), 'timestamp')
doc2.enablePatches(true)
doc2.loadIncremental(doc1.saveIncremental())
@ -883,7 +883,7 @@ describe('Automerge', () => {
})
it('should capture local put ops', () => {
let doc1 = create('aaaa')
const doc1 = create('aaaa')
doc1.enablePatches(true)
doc1.put('_root', 'key1', 1)
doc1.put('_root', 'key1', 2)
@ -902,7 +902,7 @@ describe('Automerge', () => {
})
it('should capture local insert ops', () => {
let doc1 = create('aaaa')
const doc1 = create('aaaa')
doc1.enablePatches(true)
const list = doc1.putObject('_root', 'list', [])
doc1.insert(list, 0, 1)
@ -923,7 +923,7 @@ describe('Automerge', () => {
})
it('should capture local push ops', () => {
let doc1 = create('aaaa')
const doc1 = create('aaaa')
doc1.enablePatches(true)
const list = doc1.putObject('_root', 'list', [])
doc1.push(list, 1)
@ -940,7 +940,7 @@ describe('Automerge', () => {
})
it('should capture local splice ops', () => {
let doc1 = create('aaaa')
const doc1 = create('aaaa')
doc1.enablePatches(true)
const list = doc1.putObject('_root', 'list', [])
doc1.splice(list, 0, 0, [1,2,3,4])
@ -959,7 +959,7 @@ describe('Automerge', () => {
})
it('should capture local increment ops', () => {
let doc1 = create('aaaa')
const doc1 = create('aaaa')
doc1.enablePatches(true)
doc1.put('_root', 'counter', 2, 'counter')
doc1.increment('_root', 'counter', 4)
@ -973,7 +973,7 @@ describe('Automerge', () => {
it('should capture local delete ops', () => {
let doc1 = create('aaaa')
const doc1 = create('aaaa')
doc1.enablePatches(true)
doc1.put('_root', 'key1', 1)
doc1.put('_root', 'key2', 2)
@ -989,7 +989,7 @@ describe('Automerge', () => {
})
it('should support counters in a map', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc2.enablePatches(true)
doc1.put('_root', 'starlings', 2, 'counter')
doc2.loadIncremental(doc1.saveIncremental())
@ -1004,7 +1004,7 @@ describe('Automerge', () => {
})
it('should support counters in a list', () => {
let doc1 = create('aaaa'), doc2 = create('bbbb')
const doc1 = create('aaaa'), doc2 = create('bbbb')
doc2.enablePatches(true)
const list = doc1.putObject('_root', 'list', [])
doc2.loadIncremental(doc1.saveIncremental())
@ -1029,9 +1029,9 @@ describe('Automerge', () => {
describe('sync', () => {
it('should send a sync message implying no local data', () => {
let doc = create()
let s1 = initSyncState()
let m1 = doc.generateSyncMessage(s1)
const doc = create()
const s1 = initSyncState()
const m1 = doc.generateSyncMessage(s1)
if (m1 === null) { throw new RangeError("message should not be null") }
const message: DecodedSyncMessage = decodeSyncMessage(m1)
assert.deepStrictEqual(message.heads, [])
@ -1043,21 +1043,21 @@ describe('Automerge', () => {
})
it('should not reply if we have no data as well', () => {
let n1 = create(), n2 = create()
let s1 = initSyncState(), s2 = initSyncState()
let m1 = n1.generateSyncMessage(s1)
const n1 = create(), n2 = create()
const s1 = initSyncState(), s2 = initSyncState()
const m1 = n1.generateSyncMessage(s1)
if (m1 === null) { throw new RangeError("message should not be null") }
n2.receiveSyncMessage(s2, m1)
let m2 = n2.generateSyncMessage(s2)
const m2 = n2.generateSyncMessage(s2)
assert.deepStrictEqual(m2, null)
})
it('repos with equal heads do not need a reply message', () => {
let n1 = create(), n2 = create()
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create(), n2 = create()
const s1 = initSyncState(), s2 = initSyncState()
// make two nodes with the same changes
let list = n1.putObject("_root","n", [])
const list = n1.putObject("_root","n", [])
n1.commit("",0)
for (let i = 0; i < 10; i++) {
n1.insert(list,i,i)
@ -1067,21 +1067,21 @@ describe('Automerge', () => {
assert.deepStrictEqual(n1.materialize(), n2.materialize())
// generate a naive sync message
let m1 = n1.generateSyncMessage(s1)
const m1 = n1.generateSyncMessage(s1)
if (m1 === null) { throw new RangeError("message should not be null") }
assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads())
// heads are equal so this message should be null
n2.receiveSyncMessage(s2, m1)
let m2 = n2.generateSyncMessage(s2)
const m2 = n2.generateSyncMessage(s2)
assert.strictEqual(m2, null)
})
it('n1 should offer all changes to n2 when starting from nothing', () => {
let n1 = create(), n2 = create()
const n1 = create(), n2 = create()
// make changes for n1 that n2 should request
let list = n1.putObject("_root","n",[])
const list = n1.putObject("_root","n",[])
n1.commit("",0)
for (let i = 0; i < 10; i++) {
n1.insert(list, i, i)
@ -1094,10 +1094,10 @@ describe('Automerge', () => {
})
it('should sync peers where one has commits the other does not', () => {
let n1 = create(), n2 = create()
const n1 = create(), n2 = create()
// make changes for n1 that n2 should request
let list = n1.putObject("_root","n",[])
const list = n1.putObject("_root","n",[])
n1.commit("",0)
for (let i = 0; i < 10; i++) {
n1.insert(list,i,i)
@ -1111,8 +1111,8 @@ describe('Automerge', () => {
it('should work with prior sync state', () => {
// create & synchronize two nodes
let n1 = create(), n2 = create()
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create(), n2 = create()
const s1 = initSyncState(), s2 = initSyncState()
for (let i = 0; i < 5; i++) {
n1.put("_root","x",i)
@ -1134,8 +1134,8 @@ describe('Automerge', () => {
it('should not generate messages once synced', () => {
// create & synchronize two nodes
let n1 = create('abc123'), n2 = create('def456')
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create('abc123'), n2 = create('def456')
const s1 = initSyncState(), s2 = initSyncState()
let message, patch
for (let i = 0; i < 5; i++) {
@ -1182,8 +1182,8 @@ describe('Automerge', () => {
it('should allow simultaneous messages during synchronization', () => {
// create & synchronize two nodes
let n1 = create('abc123'), n2 = create('def456')
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create('abc123'), n2 = create('def456')
const s1 = initSyncState(), s2 = initSyncState()
for (let i = 0; i < 5; i++) {
n1.put("_root", "x", i)
@ -1261,10 +1261,11 @@ describe('Automerge', () => {
})
it('should assume sent changes were recieved until we hear otherwise', () => {
let n1 = create('01234567'), n2 = create('89abcdef')
let s1 = initSyncState(), s2 = initSyncState(), message = null
const n1 = create('01234567'), n2 = create('89abcdef')
const s1 = initSyncState(), s2 = initSyncState()
let message = null
let items = n1.putObject("_root", "items", [])
const items = n1.putObject("_root", "items", [])
n1.commit("",0)
sync(n1, n2, s1, s2)
@ -1291,8 +1292,8 @@ describe('Automerge', () => {
it('should work regardless of who initiates the exchange', () => {
// create & synchronize two nodes
let n1 = create(), n2 = create()
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create(), n2 = create()
const s1 = initSyncState(), s2 = initSyncState()
for (let i = 0; i < 5; i++) {
n1.put("_root", "x", i)
@ -1319,8 +1320,8 @@ describe('Automerge', () => {
// lastSync is undefined.
// create two peers both with divergent commits
let n1 = create('01234567'), n2 = create('89abcdef')
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create('01234567'), n2 = create('89abcdef')
const s1 = initSyncState(), s2 = initSyncState()
for (let i = 0; i < 10; i++) {
n1.put("_root","x",i)
@ -1352,7 +1353,7 @@ describe('Automerge', () => {
// lastSync is c9.
// create two peers both with divergent commits
let n1 = create('01234567'), n2 = create('89abcdef')
const n1 = create('01234567'), n2 = create('89abcdef')
let s1 = initSyncState(), s2 = initSyncState()
for (let i = 0; i < 10; i++) {
@ -1381,8 +1382,8 @@ describe('Automerge', () => {
})
it('should ensure non-empty state after sync', () => {
let n1 = create('01234567'), n2 = create('89abcdef')
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create('01234567'), n2 = create('89abcdef')
const s1 = initSyncState(), s2 = initSyncState()
for (let i = 0; i < 3; i++) {
n1.put("_root","x",i)
@ -1400,8 +1401,9 @@ describe('Automerge', () => {
// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8
// n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2.
// we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2)
let n1 = create('01234567'), n2 = create('89abcdef')
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create('01234567'), n2 = create('89abcdef')
let s1 = initSyncState()
const s2 = initSyncState()
// n1 makes three changes, which we sync to n2
for (let i = 0; i < 3; i++) {
@ -1412,7 +1414,8 @@ describe('Automerge', () => {
sync(n1, n2, s1, s2)
// save a copy of n2 as "r" to simulate recovering from crash
let r, rSyncState
let r
let rSyncState
;[r, rSyncState] = [n2.clone(), s2.clone()]
// sync another few commits
@ -1446,8 +1449,8 @@ describe('Automerge', () => {
})
it('should resync after one node experiences data loss without disconnecting', () => {
let n1 = create('01234567'), n2 = create('89abcdef')
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create('01234567'), n2 = create('89abcdef')
const s1 = initSyncState(), s2 = initSyncState()
// n1 makes three changes, which we sync to n2
for (let i = 0; i < 3; i++) {
@ -1460,7 +1463,7 @@ describe('Automerge', () => {
assert.deepStrictEqual(n1.getHeads(), n2.getHeads())
assert.deepStrictEqual(n1.materialize(), n2.materialize())
let n2AfterDataLoss = create('89abcdef')
const n2AfterDataLoss = create('89abcdef')
// "n2" now has no data, but n1 still thinks it does. Note we don't do
// decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting
@ -1470,8 +1473,8 @@ describe('Automerge', () => {
})
it('should handle changes concurrent to the last sync heads', () => {
let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98')
let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState()
const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98')
const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState()
// Change 1 is known to all three nodes
//n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1)
@ -1505,7 +1508,7 @@ describe('Automerge', () => {
})
it('should handle histories with lots of branching and merging', () => {
let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98')
const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98')
n1.put("_root","x",0); n1.commit("",0)
n2.applyChanges([n1.getLastLocalChange()])
n3.applyChanges([n1.getLastLocalChange()])
@ -1526,7 +1529,7 @@ describe('Automerge', () => {
n2.applyChanges([change1])
}
let s1 = initSyncState(), s2 = initSyncState()
const s1 = initSyncState(), s2 = initSyncState()
sync(n1, n2, s1, s2)
// Having n3's last change concurrent to the last sync heads forces us into the slower code path
@ -1652,7 +1655,7 @@ describe('Automerge', () => {
assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent
// n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it
let n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState()
const n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState()
sync(n1, n3, s13, s31)
assert.deepStrictEqual(n1.getHeads(), [n1hash2])
assert.deepStrictEqual(n3.getHeads(), [n1hash2])
@ -1819,7 +1822,7 @@ describe('Automerge', () => {
// n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2};
// n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3};
// n3 has {c0, c1, c2, n3c1, n3c2, n3c3}.
let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210')
const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210')
let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState()
let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState()
let message1, message2, message3
@ -1889,8 +1892,8 @@ describe('Automerge', () => {
})
it('should allow any change to be requested', () => {
let n1 = create('01234567'), n2 = create('89abcdef')
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create('01234567'), n2 = create('89abcdef')
const s1 = initSyncState(), s2 = initSyncState()
let message = null
for (let i = 0; i < 3; i++) {
@ -1917,8 +1920,8 @@ describe('Automerge', () => {
})
it('should ignore requests for a nonexistent change', () => {
let n1 = create('01234567'), n2 = create('89abcdef')
let s1 = initSyncState(), s2 = initSyncState()
const n1 = create('01234567'), n2 = create('89abcdef')
const s1 = initSyncState(), s2 = initSyncState()
let message = null
for (let i = 0; i < 3; i++) {
@ -1940,7 +1943,7 @@ describe('Automerge', () => {
// ,-- c1 <-- c2
// c0 <-+
// `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8
let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210')
const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210')
let s1 = initSyncState(), s2 = initSyncState()
let msg, decodedMsg

View file

@ -13,5 +13,6 @@
"target": "es2016",
"typeRoots": ["./index.d.ts"]
},
"exclude": ["dist/**/*"]
"include": ["test/**/*.ts"],
"exclude": ["dist/**/*", "examples/**/*"]
}

View file

@ -8,6 +8,40 @@ export {
decodeSyncMessage,
encodeSyncState,
decodeSyncState,
exportSyncState,
importSyncState,
} from "./bindgen.js"
import {
loadDoc as load,
create,
encodeChange,
decodeChange,
initSyncState,
encodeSyncMessage,
decodeSyncMessage,
encodeSyncState,
decodeSyncState,
exportSyncState,
importSyncState,
} from "./bindgen.js"
let api = {
load,
create,
encodeChange,
decodeChange,
initSyncState,
encodeSyncMessage,
decodeSyncMessage,
encodeSyncState,
decodeSyncState,
exportSyncState,
importSyncState
}
import init from "./bindgen.js"
export default init;
export default function() {
return new Promise((resolve,reject) => init().then(() => {
resolve({ ... api, load, create, foo: "bar" })
}))
}

View file

@ -8,7 +8,7 @@ use automerge::{Automerge, ROOT};
fn main() {
let mut doc1 = Automerge::new();
let (cards, card1) = doc1
.transact_with::<_, _, AutomergeError, _, ()>(
.transact_with::<_, _, AutomergeError, _>(
|_| CommitOptions::default().with_message("Add card".to_owned()),
|tx| {
let cards = tx.put_object(ROOT, "cards", ObjType::List).unwrap();
@ -30,7 +30,7 @@ fn main() {
let binary = doc1.save();
let mut doc2 = Automerge::load(&binary).unwrap();
doc1.transact_with::<_, _, AutomergeError, _, ()>(
doc1.transact_with::<_, _, AutomergeError, _>(
|_| CommitOptions::default().with_message("Mark card as done".to_owned()),
|tx| {
tx.put(&card1, "done", true)?;
@ -39,7 +39,7 @@ fn main() {
)
.unwrap();
doc2.transact_with::<_, _, AutomergeError, _, ()>(
doc2.transact_with::<_, _, AutomergeError, _>(
|_| CommitOptions::default().with_message("Delete card".to_owned()),
|tx| {
tx.delete(&cards, 0)?;

View file

@ -2,16 +2,16 @@ use automerge::transaction::CommitOptions;
use automerge::transaction::Transactable;
use automerge::Automerge;
use automerge::AutomergeError;
use automerge::OpObserver;
use automerge::Patch;
use automerge::VecOpObserver;
use automerge::ROOT;
fn main() {
let mut doc = Automerge::new();
let mut observer = VecOpObserver::default();
let mut observer = OpObserver::default();
// a simple scalar change in the root object
doc.transact_with::<_, _, AutomergeError, _, _>(
doc.transact_with::<_, _, AutomergeError, _>(
|_result| CommitOptions::default().with_op_observer(&mut observer),
|tx| {
tx.put(ROOT, "hello", "world").unwrap();
@ -19,7 +19,7 @@ fn main() {
},
)
.unwrap();
get_changes(&doc, observer.take_patches());
get_changes(observer.take_patches());
let mut tx = doc.transaction();
let map = tx
@ -37,50 +37,49 @@ fn main() {
let m = tx.insert_object(&list, 2, automerge::ObjType::Map).unwrap();
tx.put(&m, "hi", 2).unwrap();
let _heads3 = tx.commit_with(CommitOptions::default().with_op_observer(&mut observer));
get_changes(&doc, observer.take_patches());
get_changes(observer.take_patches());
}
fn get_changes(doc: &Automerge, patches: Vec<Patch>) {
fn get_changes(patches: Vec<Patch>) {
for patch in patches {
match patch {
Patch::Put {
obj,
path,
key,
value,
conflict: _,
} => {
println!(
"put {:?} at {:?} in obj {:?}, object path {:?}",
value,
key,
obj,
doc.path_to_object(&obj)
value, key, obj, path,
)
}
Patch::Insert { obj, index, value } => {
Patch::Insert {
obj,
index,
value,
path,
} => {
println!(
"insert {:?} at {:?} in obj {:?}, object path {:?}",
value,
index,
obj,
doc.path_to_object(&obj)
value, index, obj, path,
)
}
Patch::Increment { obj, key, value } => {
Patch::Increment {
obj,
key,
value,
path,
} => {
println!(
"increment {:?} in obj {:?} by {:?}, object path {:?}",
key,
obj,
value,
doc.path_to_object(&obj)
key, obj, value, path,
)
}
Patch::Delete { obj, key } => println!(
"delete {:?} in obj {:?}, object path {:?}",
key,
obj,
doc.path_to_object(&obj)
),
Patch::Delete { obj, key, path } => {
println!("delete {:?} in obj {:?}, object path {:?}", key, obj, path)
}
}
}
}

View file

@ -5,7 +5,7 @@ use crate::op_observer::OpObserver;
use crate::transaction::{CommitOptions, Transactable};
use crate::{
sync, ApplyOptions, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType,
Parents, ScalarValue,
Parents, Patch, ScalarValue,
};
use crate::{
transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop,
@ -17,6 +17,7 @@ use crate::{
pub struct AutoCommit {
doc: Automerge,
transaction: Option<TransactionInner>,
observer: Option<OpObserver>,
}
impl Default for AutoCommit {
@ -30,6 +31,7 @@ impl AutoCommit {
Self {
doc: Automerge::new(),
transaction: None,
observer: None,
}
}
@ -56,9 +58,32 @@ impl AutoCommit {
self.doc.get_actor()
}
pub fn enable_observer(&mut self) -> Option<OpObserver> {
self.ensure_transaction_closed();
self.observer.replace(OpObserver::default())
}
pub fn disable_observer(&mut self) -> Option<OpObserver> {
self.ensure_transaction_closed();
self.observer.take()
}
pub fn with_observer(mut self) -> Self {
self.ensure_transaction_closed();
self.enable_observer();
self
}
pub fn without_observer(mut self) -> Self {
self.ensure_transaction_closed();
self.disable_observer();
self
}
fn ensure_transaction_open(&mut self) {
if self.transaction.is_none() {
self.transaction = Some(self.doc.transaction_inner());
let observer = self.observer.as_ref().map(|_| OpObserver::default());
self.transaction = Some(self.doc.transaction_inner(observer))
}
}
@ -66,7 +91,8 @@ impl AutoCommit {
self.ensure_transaction_closed();
Self {
doc: self.doc.fork(),
transaction: self.transaction.clone(),
transaction: None,
observer: None,
}
}
@ -74,13 +100,14 @@ impl AutoCommit {
self.ensure_transaction_closed();
Ok(Self {
doc: self.doc.fork_at(heads)?,
transaction: self.transaction.clone(),
transaction: None,
observer: None,
})
}
fn ensure_transaction_closed(&mut self) {
if let Some(tx) = self.transaction.take() {
tx.commit::<()>(&mut self.doc, None, None, None);
tx.commit(&mut self.doc, None, None, self.observer.as_mut());
}
}
@ -89,32 +116,25 @@ impl AutoCommit {
Ok(Self {
doc,
transaction: None,
observer: None,
})
}
pub fn load_with<Obs: OpObserver>(
data: &[u8],
options: ApplyOptions<'_, Obs>,
) -> Result<Self, AutomergeError> {
pub fn load_with_observer(data: &[u8]) -> Result<Self, AutomergeError> {
let mut observer = OpObserver::default();
let options = ApplyOptions::default().with_op_observer(&mut observer);
let doc = Automerge::load_with(data, options)?;
Ok(Self {
doc,
transaction: None,
observer: Some(observer),
})
}
pub fn load_incremental(&mut self, data: &[u8]) -> Result<usize, AutomergeError> {
self.ensure_transaction_closed();
self.doc.load_incremental(data)
}
pub fn load_incremental_with<'a, Obs: OpObserver>(
&mut self,
data: &[u8],
options: ApplyOptions<'a, Obs>,
) -> Result<usize, AutomergeError> {
self.ensure_transaction_closed();
self.doc.load_incremental_with(data, options)
self.doc
.load_incremental_with(data, self.observer.as_mut().into())
}
pub fn apply_changes(
@ -122,34 +142,16 @@ impl AutoCommit {
changes: impl IntoIterator<Item = Change>,
) -> Result<(), AutomergeError> {
self.ensure_transaction_closed();
self.doc.apply_changes(changes)
}
pub fn apply_changes_with<I: IntoIterator<Item = Change>, Obs: OpObserver>(
&mut self,
changes: I,
options: ApplyOptions<'_, Obs>,
) -> Result<(), AutomergeError> {
self.ensure_transaction_closed();
self.doc.apply_changes_with(changes, options)
self.doc
.apply_changes_with(changes, self.observer.as_mut().into())
}
/// Takes all the changes in `other` which are not in `self` and applies them
pub fn merge(&mut self, other: &mut Self) -> Result<Vec<ChangeHash>, AutomergeError> {
self.ensure_transaction_closed();
other.ensure_transaction_closed();
self.doc.merge(&mut other.doc)
}
/// Takes all the changes in `other` which are not in `self` and applies them
pub fn merge_with<'a, Obs: OpObserver>(
&mut self,
other: &mut Self,
options: ApplyOptions<'a, Obs>,
) -> Result<Vec<ChangeHash>, AutomergeError> {
self.ensure_transaction_closed();
other.ensure_transaction_closed();
self.doc.merge_with(&mut other.doc, options)
self.doc
.merge_with(&mut other.doc, self.observer.as_mut().into())
}
pub fn save(&mut self) -> Vec<u8> {
@ -186,7 +188,7 @@ impl AutoCommit {
self.doc.get_change_by_hash(hash)
}
pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> {
pub fn get_changes_added<'b>(&mut self, other: &'b mut Self) -> Vec<&'b Change> {
self.ensure_transaction_closed();
other.ensure_transaction_closed();
self.doc.get_changes_added(&other.doc)
@ -210,20 +212,10 @@ impl AutoCommit {
&mut self,
sync_state: &mut sync::State,
message: sync::Message,
) -> Result<(), AutomergeError> {
self.ensure_transaction_closed();
self.doc.receive_sync_message(sync_state, message)
}
pub fn receive_sync_message_with<'a, Obs: OpObserver>(
&mut self,
sync_state: &mut sync::State,
message: sync::Message,
options: ApplyOptions<'a, Obs>,
) -> Result<(), AutomergeError> {
self.ensure_transaction_closed();
self.doc
.receive_sync_message_with(sync_state, message, options)
.receive_sync_message_with(sync_state, message, self.observer.as_mut().into())
}
#[cfg(feature = "optree-visualisation")]
@ -240,7 +232,7 @@ impl AutoCommit {
}
pub fn commit(&mut self) -> ChangeHash {
self.commit_with::<()>(CommitOptions::default())
self.commit_with(CommitOptions::default())
}
/// Commit the current operations with some options.
@ -256,11 +248,14 @@ impl AutoCommit {
/// doc.put_object(&ROOT, "todos", ObjType::List).unwrap();
/// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as
/// i64;
/// doc.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now));
/// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now));
/// ```
pub fn commit_with<Obs: OpObserver>(&mut self, options: CommitOptions<'_, Obs>) -> ChangeHash {
pub fn commit_with<'a>(&'a mut self, mut options: CommitOptions<'a>) -> ChangeHash {
// ensure that even no changes triggers a change
self.ensure_transaction_open();
if let Some(observer) = &mut self.observer {
options = options.with_op_observer(observer);
}
let tx = self.transaction.take().unwrap();
tx.commit(
&mut self.doc,
@ -276,6 +271,15 @@ impl AutoCommit {
.map(|tx| tx.rollback(&mut self.doc))
.unwrap_or(0)
}
pub fn take_patches(&mut self) -> Vec<Patch> {
self.ensure_transaction_closed();
if let Some(observer) = &mut self.observer {
observer.take_patches()
} else {
Vec::new()
}
}
}
impl Transactable for AutoCommit {

View file

@ -113,12 +113,23 @@ impl Automerge {
/// Start a transaction.
pub fn transaction(&mut self) -> Transaction<'_> {
Transaction {
inner: Some(self.transaction_inner()),
inner: Some(self.transaction_inner(None)),
doc: self,
}
}
pub(crate) fn transaction_inner(&mut self) -> TransactionInner {
/// Start a transaction.
pub fn transaction_with_observer(&mut self) -> Transaction<'_> {
Transaction {
inner: Some(self.transaction_inner(Some(OpObserver::default()))),
doc: self,
}
}
pub(crate) fn transaction_inner(
&mut self,
op_observer: Option<OpObserver>,
) -> TransactionInner {
let actor = self.get_actor_index();
let seq = self.states.get(&actor).map_or(0, |v| v.len()) as u64 + 1;
let mut deps = self.get_heads();
@ -138,6 +149,7 @@ impl Automerge {
message: None,
extra_bytes: Default::default(),
hash: None,
op_observer,
operations: vec![],
deps,
}
@ -145,7 +157,7 @@ impl Automerge {
/// Run a transaction on this document in a closure, automatically handling commit or rollback
/// afterwards.
pub fn transact<F, O, E>(&mut self, f: F) -> transaction::Result<O, E>
pub fn transact<F, O, E, B>(&mut self, f: F) -> transaction::Result<O, E>
where
F: FnOnce(&mut Transaction<'_>) -> Result<O, E>,
{
@ -164,11 +176,10 @@ impl Automerge {
}
/// Like [`Self::transact`] but with a function for generating the commit options.
pub fn transact_with<'a, F, O, E, C, Obs>(&mut self, c: C, f: F) -> transaction::Result<O, E>
pub fn transact_with<'a, F, O, E, C>(&mut self, c: C, f: F) -> transaction::Result<O, E>
where
F: FnOnce(&mut Transaction<'_>) -> Result<O, E>,
C: FnOnce(&O) -> CommitOptions<'a, Obs>,
Obs: 'a + OpObserver,
C: FnOnce(&O) -> CommitOptions<'a>,
{
let mut tx = self.transaction();
let result = f(&mut tx);
@ -231,8 +242,8 @@ impl Automerge {
None
} else {
self.ops
.parent_object(&obj)
.map(|(id, key)| (self.id_to_exid(id.0), self.export_key(id, key)))
.parent_prop(&obj)
.map(|(id, prop)| (self.id_to_exid(id.0), prop))
}
} else {
None
@ -241,7 +252,7 @@ impl Automerge {
/// Get an iterator over the parents of an object.
pub fn parents(&self, obj: ExId) -> Parents<'_> {
Parents { obj, doc: self }
self.ops.parents(&obj)
}
pub fn path_to_object<O: AsRef<ExId>>(&self, obj: O) -> Vec<(ExId, Prop)> {
@ -250,20 +261,22 @@ impl Automerge {
path
}
/// Export a key to a prop.
fn export_key(&self, obj: ObjId, key: Key) -> Prop {
match key {
Key::Map(m) => Prop::Map(self.ops.m.props.get(m).into()),
Key::Seq(opid) => {
let i = self
.ops
.search(&obj, query::ElemIdPos::new(opid))
.index()
.unwrap();
Prop::Seq(i)
/*
/// Export a key to a prop.
fn export_key(&self, obj: ObjId, key: Key) -> Prop {
match key {
Key::Map(m) => Prop::Map(self.ops.m.props.get(m).into()),
Key::Seq(opid) => {
let i = self
.ops
.search(&obj, query::ElemIdPos::new(opid))
.index()
.unwrap();
Prop::Seq(i)
}
}
}
}
*/
/// Get the keys of the object `obj`.
///
@ -415,25 +428,7 @@ impl Automerge {
}
pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result<ObjId, AutomergeError> {
match id {
ExId::Root => Ok(ObjId::root()),
ExId::Id(ctr, actor, idx) => {
// do a direct get here b/c this could be foriegn and not be within the array
// bounds
if self.ops.m.actors.cache.get(*idx) == Some(actor) {
Ok(ObjId(OpId(*ctr, *idx)))
} else {
// FIXME - make a real error
let idx = self
.ops
.m
.actors
.lookup(actor)
.ok_or(AutomergeError::Fail)?;
Ok(ObjId(OpId(*ctr, idx)))
}
}
}
self.ops.exid_to_obj(id)
}
pub(crate) fn id_to_exid(&self, id: OpId) -> ExId {
@ -572,14 +567,11 @@ impl Automerge {
/// Load a document.
pub fn load(data: &[u8]) -> Result<Self, AutomergeError> {
Self::load_with::<()>(data, ApplyOptions::default())
Self::load_with(data, ApplyOptions::default())
}
/// Load a document.
pub fn load_with<Obs: OpObserver>(
data: &[u8],
options: ApplyOptions<'_, Obs>,
) -> Result<Self, AutomergeError> {
pub fn load_with(data: &[u8], options: ApplyOptions<'_>) -> Result<Self, AutomergeError> {
let changes = Change::load_document(data)?;
let mut doc = Self::new();
doc.apply_changes_with(changes, options)?;
@ -588,14 +580,14 @@ impl Automerge {
/// Load an incremental save of a document.
pub fn load_incremental(&mut self, data: &[u8]) -> Result<usize, AutomergeError> {
self.load_incremental_with::<()>(data, ApplyOptions::default())
self.load_incremental_with(data, ApplyOptions::default())
}
/// Load an incremental save of a document.
pub fn load_incremental_with<Obs: OpObserver>(
pub fn load_incremental_with(
&mut self,
data: &[u8],
options: ApplyOptions<'_, Obs>,
options: ApplyOptions<'_>,
) -> Result<usize, AutomergeError> {
let changes = Change::load_document(data)?;
let start = self.ops.len();
@ -619,14 +611,14 @@ impl Automerge {
&mut self,
changes: impl IntoIterator<Item = Change>,
) -> Result<(), AutomergeError> {
self.apply_changes_with::<_, ()>(changes, ApplyOptions::default())
self.apply_changes_with::<_>(changes, ApplyOptions::default())
}
/// Apply changes to this document.
pub fn apply_changes_with<I: IntoIterator<Item = Change>, Obs: OpObserver>(
pub fn apply_changes_with<I: IntoIterator<Item = Change>>(
&mut self,
changes: I,
mut options: ApplyOptions<'_, Obs>,
mut options: ApplyOptions<'_>,
) -> Result<(), AutomergeError> {
for c in changes {
if !self.history_index.contains_key(&c.hash) {
@ -651,7 +643,7 @@ impl Automerge {
Ok(())
}
fn apply_change<Obs: OpObserver>(&mut self, change: Change, observer: &mut Option<&mut Obs>) {
fn apply_change(&mut self, change: Change, observer: &mut Option<&mut OpObserver>) {
let ops = self.import_ops(&change);
self.update_history(change, ops.len());
if let Some(observer) = observer {
@ -723,14 +715,14 @@ impl Automerge {
/// Takes all the changes in `other` which are not in `self` and applies them
pub fn merge(&mut self, other: &mut Self) -> Result<Vec<ChangeHash>, AutomergeError> {
self.merge_with::<()>(other, ApplyOptions::default())
self.merge_with(other, ApplyOptions::default())
}
/// Takes all the changes in `other` which are not in `self` and applies them
pub fn merge_with<'a, Obs: OpObserver>(
pub fn merge_with<'a>(
&mut self,
other: &mut Self,
options: ApplyOptions<'a, Obs>,
options: ApplyOptions<'a>,
) -> Result<Vec<ChangeHash>, AutomergeError> {
// TODO: Make this fallible and figure out how to do this transactionally
let changes = self

View file

@ -1434,18 +1434,15 @@ fn observe_counter_change_application_overwrite() {
doc1.increment(ROOT, "counter", 5).unwrap();
doc1.commit();
let mut observer = VecOpObserver::default();
let mut doc3 = doc1.clone();
doc3.merge_with(
&mut doc2,
ApplyOptions::default().with_op_observer(&mut observer),
)
.unwrap();
let mut doc3 = doc1.fork();
doc3.enable_observer();
doc3.merge(&mut doc2).unwrap();
assert_eq!(
observer.take_patches(),
doc3.take_patches(),
vec![Patch::Put {
obj: ExId::Root,
path: vec![],
key: Prop::Map("counter".into()),
value: (
ScalarValue::Str("mystring".into()).into(),
@ -1455,16 +1452,12 @@ fn observe_counter_change_application_overwrite() {
}]
);
let mut observer = VecOpObserver::default();
let mut doc4 = doc2.clone();
doc4.merge_with(
&mut doc1,
ApplyOptions::default().with_op_observer(&mut observer),
)
.unwrap();
let mut doc4 = doc2.fork();
doc4.enable_observer();
doc4.merge(&mut doc1).unwrap();
// no patches as the increments operate on an invisible counter
assert_eq!(observer.take_patches(), vec![]);
assert_eq!(doc4.take_patches(), vec![]);
}
#[test]
@ -1476,18 +1469,14 @@ fn observe_counter_change_application() {
let changes = doc.get_changes(&[]).unwrap().into_iter().cloned();
let mut new_doc = AutoCommit::new();
let mut observer = VecOpObserver::default();
new_doc
.apply_changes_with(
changes,
ApplyOptions::default().with_op_observer(&mut observer),
)
.unwrap();
new_doc.enable_observer();
new_doc.apply_changes(changes).unwrap();
assert_eq!(
observer.take_patches(),
new_doc.take_patches(),
vec![
Patch::Put {
obj: ExId::Root,
path: vec![],
key: Prop::Map("counter".into()),
value: (
ScalarValue::counter(1).into(),
@ -1497,11 +1486,13 @@ fn observe_counter_change_application() {
},
Patch::Increment {
obj: ExId::Root,
path: vec![],
key: Prop::Map("counter".into()),
value: (2, ExId::Id(2, doc.get_actor().clone(), 0)),
},
Patch::Increment {
obj: ExId::Root,
path: vec![],
key: Prop::Map("counter".into()),
value: (5, ExId::Id(3, doc.get_actor().clone(), 0)),
}

View file

@ -508,7 +508,7 @@ pub(crate) fn export_change(
operations: change
.operations
.iter()
.map(|(obj, _, op)| export_op(op, obj, actors, props))
.map(|(obj, op)| export_op(op, obj, actors, props))
.collect(),
extra_bytes: change.extra_bytes,
}

View file

@ -102,7 +102,6 @@ pub use map_range::MapRange;
pub use map_range_at::MapRangeAt;
pub use op_observer::OpObserver;
pub use op_observer::Patch;
pub use op_observer::VecOpObserver;
pub use options::ApplyOptions;
pub use parents::Parents;
pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop};

View file

@ -1,94 +1,84 @@
use crate::exid::ExId;
use crate::parents::Parents;
use crate::Prop;
use crate::Value;
/// An observer of operations applied to the document.
pub trait OpObserver {
/// A new value has been inserted into the given object.
///
/// - `objid`: the object that has been inserted into.
/// - `index`: the index the new value has been inserted at.
/// - `tagged_value`: the value that has been inserted and the id of the operation that did the
/// insert.
fn insert(&mut self, objid: ExId, index: usize, tagged_value: (Value<'_>, ExId));
/// A new value has been put into the given object.
///
/// - `objid`: the object that has been put into.
/// - `key`: the key that the value as been put at.
/// - `tagged_value`: the value that has been put into the object and the id of the operation
/// that did the put.
/// - `conflict`: whether this put conflicts with other operations.
fn put(&mut self, objid: ExId, key: Prop, tagged_value: (Value<'_>, ExId), conflict: bool);
/// A counter has been incremented.
///
/// - `objid`: the object that contains the counter.
/// - `key`: they key that the chounter is at.
/// - `tagged_value`: the amount the counter has been incremented by, and the the id of the
/// increment operation.
fn increment(&mut self, objid: ExId, key: Prop, tagged_value: (i64, ExId));
/// A value has beeen deleted.
///
/// - `objid`: the object that has been deleted in.
/// - `key`: the key of the value that has been deleted.
fn delete(&mut self, objid: ExId, key: Prop);
}
impl OpObserver for () {
fn insert(&mut self, _objid: ExId, _index: usize, _tagged_value: (Value<'_>, ExId)) {}
fn put(&mut self, _objid: ExId, _key: Prop, _tagged_value: (Value<'_>, ExId), _conflict: bool) {
}
fn increment(&mut self, _objid: ExId, _key: Prop, _tagged_value: (i64, ExId)) {}
fn delete(&mut self, _objid: ExId, _key: Prop) {}
}
use std::fmt::Debug;
/// Capture operations into a [`Vec`] and store them as patches.
#[derive(Default, Debug, Clone)]
pub struct VecOpObserver {
patches: Vec<Patch>,
pub struct OpObserver {
pub(crate) patches: Vec<Patch>,
}
impl VecOpObserver {
impl OpObserver {
/// Take the current list of patches, leaving the internal list empty and ready for new
/// patches.
pub fn take_patches(&mut self) -> Vec<Patch> {
std::mem::take(&mut self.patches)
}
}
impl OpObserver for VecOpObserver {
fn insert(&mut self, obj_id: ExId, index: usize, (value, id): (Value<'_>, ExId)) {
pub(crate) fn merge(&mut self, other: Self) {
self.patches.extend(other.patches)
}
pub fn insert(
&mut self,
obj_id: ExId,
parents: Parents<'_>,
index: usize,
(value, id): (Value<'_>, ExId),
) {
let mut path = parents.collect::<Vec<_>>();
path.reverse();
self.patches.push(Patch::Insert {
obj: obj_id,
path,
index,
value: (value.into_owned(), id),
});
}
fn put(&mut self, objid: ExId, key: Prop, (value, id): (Value<'_>, ExId), conflict: bool) {
pub fn put(
&mut self,
obj: ExId,
parents: Parents<'_>,
key: Prop,
(value, id): (Value<'_>, ExId),
conflict: bool,
) {
let mut path = parents.collect::<Vec<_>>();
path.reverse();
self.patches.push(Patch::Put {
obj: objid,
obj,
path,
key,
value: (value.into_owned(), id),
conflict,
});
}
fn increment(&mut self, objid: ExId, key: Prop, tagged_value: (i64, ExId)) {
pub fn increment(
&mut self,
obj: ExId,
parents: Parents<'_>,
key: Prop,
tagged_value: (i64, ExId),
) {
let mut path = parents.collect::<Vec<_>>();
path.reverse();
self.patches.push(Patch::Increment {
obj: objid,
obj,
path,
key,
value: tagged_value,
});
}
fn delete(&mut self, objid: ExId, key: Prop) {
self.patches.push(Patch::Delete { obj: objid, key })
pub fn delete(&mut self, obj: ExId, parents: Parents<'_>, key: Prop) {
let mut path = parents.collect::<Vec<_>>();
path.reverse();
self.patches.push(Patch::Delete { obj, path, key })
}
}
@ -99,6 +89,7 @@ pub enum Patch {
Put {
/// The object that was put into.
obj: ExId,
path: Vec<(ExId, Prop)>,
/// The key that the new value was put at.
key: Prop,
/// The value that was put, and the id of the operation that put it there.
@ -110,6 +101,7 @@ pub enum Patch {
Insert {
/// The object that was inserted into.
obj: ExId,
path: Vec<(ExId, Prop)>,
/// The index that the new value was inserted at.
index: usize,
/// The value that was inserted, and the id of the operation that inserted it there.
@ -119,6 +111,7 @@ pub enum Patch {
Increment {
/// The object that was incremented in.
obj: ExId,
path: Vec<(ExId, Prop)>,
/// The key that was incremented.
key: Prop,
/// The amount that the counter was incremented by, and the id of the operation that
@ -129,6 +122,7 @@ pub enum Patch {
Delete {
/// The object that was deleted from.
obj: ExId,
path: Vec<(ExId, Prop)>,
/// The key that was deleted.
key: Prop,
},

View file

@ -3,7 +3,9 @@ use crate::exid::ExId;
use crate::indexed_cache::IndexedCache;
use crate::op_tree::{self, OpTree};
use crate::query::{self, OpIdSearch, TreeQuery};
use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpType};
use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpType, Prop};
use crate::AutomergeError;
use crate::Parents;
use crate::{ObjType, OpObserver};
use fxhash::FxBuildHasher;
use std::cmp::Ordering;
@ -53,7 +55,7 @@ impl OpSetInternal {
}
}
pub(crate) fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key)> {
pub(crate) fn parent(&self, obj: &ObjId) -> Option<(ObjId, Key)> {
let parent = self.trees.get(obj)?.parent?;
let key = self.search(&parent, OpIdSearch::new(obj.0)).key().unwrap();
Some((parent, key))
@ -195,11 +197,11 @@ impl OpSetInternal {
op
}
pub(crate) fn insert_op_with_observer<Obs: OpObserver>(
pub(crate) fn insert_op_with_observer(
&mut self,
obj: &ObjId,
op: Op,
observer: &mut Obs,
observer: &mut OpObserver,
) -> Op {
let q = self.search(obj, query::SeekOpWithPatch::new(&op));
@ -220,21 +222,25 @@ impl OpSetInternal {
if op.insert {
let value = (op.value(), self.id_to_exid(op.id));
observer.insert(ex_obj, seen, value);
let parents = self.parents(&ex_obj);
observer.insert(ex_obj, parents, seen, value);
} else if op.is_delete() {
if let Some(winner) = &values.last() {
let value = (winner.value(), self.id_to_exid(winner.id));
let conflict = values.len() > 1;
observer.put(ex_obj, key, value, conflict);
let parents = self.parents(&ex_obj);
observer.put(ex_obj, parents, key, value, conflict);
} else {
observer.delete(ex_obj, key);
let parents = self.parents(&ex_obj);
observer.delete(ex_obj, parents, key);
}
} else if let Some(value) = op.get_increment_value() {
// only observe this increment if the counter is visible, i.e. the counter's
// create op is in the values
if values.iter().any(|value| op.pred.contains(&value.id)) {
// we have observed the value
observer.increment(ex_obj, key, (value, self.id_to_exid(op.id)));
let parents = self.parents(&ex_obj);
observer.increment(ex_obj, parents, key, (value, self.id_to_exid(op.id)));
}
} else {
let winner = if let Some(last_value) = values.last() {
@ -248,10 +254,12 @@ impl OpSetInternal {
};
let value = (winner.value(), self.id_to_exid(winner.id));
if op.is_list_op() && !had_value_before {
observer.insert(ex_obj, seen, value);
let parents = self.parents(&ex_obj);
observer.insert(ex_obj, parents, seen, value);
} else {
let conflict = !values.is_empty();
observer.put(ex_obj, key, value, conflict);
let parents = self.parents(&ex_obj);
observer.put(ex_obj, parents, key, value, conflict);
}
}
@ -277,6 +285,62 @@ impl OpSetInternal {
dot::render(&graph, &mut out).unwrap();
String::from_utf8_lossy(&out[..]).to_string()
}
pub(crate) fn parent_prop(&self, obj: &ObjId) -> Option<(ObjId, Prop)> {
self.parent(obj)
.map(|(id, key)| (id, self.export_key(&id, key)))
}
pub(crate) fn parents(&self, obj: &ExId) -> Parents<'_> {
Parents {
obj: obj.clone(),
doc: self,
}
}
pub(crate) fn parent_object<O: AsRef<ExId>>(&self, obj: O) -> Option<(ExId, Prop)> {
if let Ok(obj) = self.exid_to_obj(obj.as_ref()) {
if obj == ObjId::root() {
// root has no parent
None
} else {
self.parent_prop(&obj)
.map(|(id, prop)| (self.id_to_exid(id.0), prop))
}
} else {
None
}
}
pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result<ObjId, AutomergeError> {
match id {
ExId::Root => Ok(ObjId::root()),
ExId::Id(ctr, actor, idx) => {
// do a direct get here b/c this could be foriegn and not be within the array
// bounds
if self.m.actors.cache.get(*idx) == Some(actor) {
Ok(ObjId(OpId(*ctr, *idx)))
} else {
// FIXME - make a real error
let idx = self.m.actors.lookup(actor).ok_or(AutomergeError::Fail)?;
Ok(ObjId(OpId(*ctr, idx)))
}
}
}
}
pub(crate) fn export_key(&self, obj: &ObjId, key: Key) -> Prop {
match key {
Key::Map(m) => Prop::Map(self.m.props.get(m).into()),
Key::Seq(opid) => {
let i = self
.search(obj, query::ElemIdPos::new(opid))
.index()
.unwrap();
Prop::Seq(i)
}
}
}
}
impl Default for OpSetInternal {

View file

@ -1,16 +1,24 @@
use crate::op_observer::OpObserver;
#[derive(Debug, Default)]
pub struct ApplyOptions<'a, Obs> {
pub op_observer: Option<&'a mut Obs>,
pub struct ApplyOptions<'a> {
pub op_observer: Option<&'a mut OpObserver>,
}
impl<'a, Obs> ApplyOptions<'a, Obs> {
pub fn with_op_observer(mut self, op_observer: &'a mut Obs) -> Self {
impl<'a> ApplyOptions<'a> {
pub fn with_op_observer(mut self, op_observer: &'a mut OpObserver) -> Self {
self.op_observer = Some(op_observer);
self
}
pub fn set_op_observer(&mut self, op_observer: &'a mut Obs) -> &mut Self {
pub fn set_op_observer(&mut self, op_observer: &'a mut OpObserver) -> &mut Self {
self.op_observer = Some(op_observer);
self
}
}
impl<'a> From<Option<&'a mut OpObserver>> for ApplyOptions<'a> {
fn from(o: Option<&'a mut OpObserver>) -> Self {
ApplyOptions { op_observer: o }
}
}

View file

@ -1,9 +1,11 @@
use crate::{exid::ExId, Automerge, Prop};
use crate::exid::ExId;
use crate::op_set::OpSet;
use crate::Prop;
#[derive(Debug)]
pub struct Parents<'a> {
pub(crate) obj: ExId,
pub(crate) doc: &'a Automerge,
pub(crate) doc: &'a OpSet,
}
impl<'a> Iterator for Parents<'a> {

View file

@ -8,7 +8,7 @@ use std::{
use crate::{
decoding, decoding::Decoder, encoding::Encodable, types::HASH_SIZE, ApplyOptions, Automerge,
AutomergeError, Change, ChangeHash, OpObserver,
AutomergeError, Change, ChangeHash,
};
mod bloom;
@ -108,14 +108,14 @@ impl Automerge {
sync_state: &mut State,
message: Message,
) -> Result<(), AutomergeError> {
self.receive_sync_message_with::<()>(sync_state, message, ApplyOptions::default())
self.receive_sync_message_with(sync_state, message, ApplyOptions::default())
}
pub fn receive_sync_message_with<'a, Obs: OpObserver>(
pub fn receive_sync_message_with<'a>(
&mut self,
sync_state: &mut State,
message: Message,
options: ApplyOptions<'a, Obs>,
options: ApplyOptions<'a>,
) -> Result<(), AutomergeError> {
let before_heads = self.get_heads();

View file

@ -1,12 +1,14 @@
use crate::op_observer::OpObserver;
/// Optional metadata for a commit.
#[derive(Debug, Default)]
pub struct CommitOptions<'a, Obs> {
pub struct CommitOptions<'a> {
pub message: Option<String>,
pub time: Option<i64>,
pub op_observer: Option<&'a mut Obs>,
pub op_observer: Option<&'a mut OpObserver>,
}
impl<'a, Obs> CommitOptions<'a, Obs> {
impl<'a> CommitOptions<'a> {
/// Add a message to the commit.
pub fn with_message<S: Into<String>>(mut self, message: S) -> Self {
self.message = Some(message.into());
@ -31,12 +33,12 @@ impl<'a, Obs> CommitOptions<'a, Obs> {
self
}
pub fn with_op_observer(mut self, op_observer: &'a mut Obs) -> Self {
pub fn with_op_observer(mut self, op_observer: &'a mut OpObserver) -> Self {
self.op_observer = Some(op_observer);
self
}
pub fn set_op_observer(&mut self, op_observer: &'a mut Obs) -> &mut Self {
pub fn set_op_observer(&mut self, op_observer: &'a mut OpObserver) -> &mut Self {
self.op_observer = Some(op_observer);
self
}

View file

@ -17,7 +17,8 @@ pub(crate) struct TransactionInner {
pub(crate) extra_bytes: Vec<u8>,
pub(crate) hash: Option<ChangeHash>,
pub(crate) deps: Vec<ChangeHash>,
pub(crate) operations: Vec<(ObjId, Prop, Op)>,
pub(crate) op_observer: Option<OpObserver>,
pub(crate) operations: Vec<(ObjId, Op)>,
}
impl TransactionInner {
@ -25,15 +26,42 @@ impl TransactionInner {
self.operations.len()
}
fn observe_op(&mut self, doc: &mut Automerge, obj: ObjId, prop: Prop, op: &Op) {
if let Some(observer) = &mut self.op_observer {
let ex_obj = doc.ops.id_to_exid(obj.0);
let parents = doc.ops.parents(&ex_obj);
if op.insert {
let value = (op.value(), doc.id_to_exid(op.id));
match prop {
Prop::Map(_) => panic!("insert into a map"),
Prop::Seq(index) => observer.insert(ex_obj, parents, index, value),
}
} else if op.is_delete() {
observer.delete(ex_obj, parents, prop);
} else if let Some(value) = op.get_increment_value() {
observer.increment(ex_obj, parents, prop, (value, doc.id_to_exid(op.id)));
} else {
let value = (op.value(), doc.ops.id_to_exid(op.id));
observer.put(ex_obj, parents, prop, value, false);
}
}
}
/// Commit the operations performed in this transaction, returning the hashes corresponding to
/// the new heads.
pub(crate) fn commit<Obs: OpObserver>(
pub(crate) fn commit(
mut self,
doc: &mut Automerge,
message: Option<String>,
time: Option<i64>,
op_observer: Option<&mut Obs>,
observer: Option<&mut OpObserver>,
) -> ChangeHash {
if let Some(tx_observer) = self.op_observer.take() {
if let Some(observer) = observer {
observer.merge(tx_observer)
}
}
if message.is_some() {
self.message = message;
}
@ -42,26 +70,6 @@ impl TransactionInner {
self.time = t;
}
if let Some(observer) = op_observer {
for (obj, prop, op) in &self.operations {
let ex_obj = doc.ops.id_to_exid(obj.0);
if op.insert {
let value = (op.value(), doc.id_to_exid(op.id));
match prop {
Prop::Map(_) => panic!("insert into a map"),
Prop::Seq(index) => observer.insert(ex_obj, *index, value),
}
} else if op.is_delete() {
observer.delete(ex_obj, prop.clone());
} else if let Some(value) = op.get_increment_value() {
observer.increment(ex_obj, prop.clone(), (value, doc.id_to_exid(op.id)));
} else {
let value = (op.value(), doc.ops.id_to_exid(op.id));
observer.put(ex_obj, prop.clone(), value, false);
}
}
}
let num_ops = self.pending_ops();
let change = export_change(self, &doc.ops.m.actors, &doc.ops.m.props);
let hash = change.hash;
@ -75,7 +83,7 @@ impl TransactionInner {
pub(crate) fn rollback(self, doc: &mut Automerge) -> usize {
let num = self.pending_ops();
// remove in reverse order so sets are removed before makes etc...
for (obj, _prop, op) in self.operations.into_iter().rev() {
for (obj, op) in self.operations.into_iter().rev() {
for pred_id in &op.pred {
if let Some(p) = doc.ops.search(&obj, OpIdSearch::new(*pred_id)).index() {
doc.ops.replace(&obj, p, |o| o.remove_succ(&op));
@ -172,7 +180,9 @@ impl TransactionInner {
doc.ops.insert(pos, &obj, op.clone());
}
self.operations.push((obj, prop, op));
self.observe_op(doc, obj, prop, &op);
self.operations.push((obj, op));
}
pub(crate) fn insert<V: Into<ScalarValue>>(
@ -224,7 +234,10 @@ impl TransactionInner {
};
doc.ops.insert(query.pos(), &obj, op.clone());
self.operations.push((obj, Prop::Seq(index), op));
self.observe_op(doc, obj, Prop::Seq(index), &op);
self.operations.push((obj, op));
Ok(id)
}

View file

@ -1,7 +1,7 @@
use std::ops::RangeBounds;
use crate::exid::ExId;
use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValue, Value, Values};
use crate::{Automerge, ChangeHash, KeysAt, ObjType, Prop, ScalarValue, Value, Values};
use crate::{AutomergeError, Keys};
use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt};
@ -39,7 +39,7 @@ impl<'a> Transaction<'a> {
self.inner
.take()
.unwrap()
.commit::<()>(self.doc, None, None, None)
.commit(self.doc, None, None, None)
}
/// Commit the operations in this transaction with some options.
@ -56,9 +56,9 @@ impl<'a> Transaction<'a> {
/// tx.put_object(ROOT, "todos", ObjType::List).unwrap();
/// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as
/// i64;
/// tx.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now));
/// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now));
/// ```
pub fn commit_with<Obs: OpObserver>(mut self, options: CommitOptions<'_, Obs>) -> ChangeHash {
pub fn commit_with(mut self, options: CommitOptions<'_>) -> ChangeHash {
self.inner.take().unwrap().commit(
self.doc,
options.message,

View file

@ -1,7 +1,6 @@
use automerge::transaction::Transactable;
use automerge::{
ActorId, ApplyOptions, AutoCommit, Automerge, AutomergeError, ObjType, ScalarValue, Value,
VecOpObserver, ROOT,
ActorId, AutoCommit, Automerge, AutomergeError, ObjType, ScalarValue, Value, ROOT,
};
mod helpers;
@ -941,12 +940,8 @@ fn observe_counter_change_application() {
let changes = doc.get_changes(&[]).unwrap().into_iter().cloned();
let mut doc = AutoCommit::new();
let mut observer = VecOpObserver::default();
doc.apply_changes_with(
changes,
ApplyOptions::default().with_op_observer(&mut observer),
)
.unwrap();
doc.enable_observer();
doc.apply_changes(changes).unwrap();
}
#[test]

View file

@ -10,6 +10,7 @@ const Automerge = require('../automerge-wasm')
const start = new Date()
let doc = Automerge.create();
doc.enablePatches(true)
let text = doc.putObject("_root", "text", "", "text")
for (let i = 0; i < edits.length; i++) {
@ -28,6 +29,10 @@ let t_time = new Date()
let t = doc.text(text);
console.log(`doc.text in ${new Date() - t_time} ms`)
let p_time = new Date()
let p = doc.popPatches();
console.log(`doc.popPatches in ${new Date() - p_time} ms`)
if (doc.text(text) !== finalText) {
throw new RangeError('ERROR: final text did not match expectation')
}