Compare commits

...

70 commits
main ... marks

Author SHA1 Message Date
Orion Henry
98328e2ecb automerge-js adding trace to out of date errors 2022-08-26 14:15:01 -05:00
Orion Henry
ee50b4a3ca dropped change set accidently 2022-08-23 11:43:21 -05:00
Orion Henry
64e7efd843 dont crash - show markbeing and markend 2022-08-22 14:40:21 -05:00
Orion Henry
50cea87c82 wasm-pack v0.1.6 2022-08-22 14:30:01 -05:00
Orion Henry
8859806da5 Merge branch 'main' into marks 2022-06-16 16:59:43 -04:00
Orion Henry
72c09c3cb9 clean up warnings/clippy/fmt 2022-06-14 15:25:03 -04:00
Peter van Hardenberg
cf97432df3 all tests passing for wasm & rust, note that we removed the Vec<ExId> / touched values from various functions 2022-06-14 11:54:07 -07:00
Peter van Hardenberg
8982b96c41 first build since merge 2022-06-14 11:44:52 -07:00
Orion Henry
d0b34a7dde Merge branch 'main' into marks 2022-04-20 22:03:29 -04:00
Orion Henry
c43dc18493 typescript bugfix 2022-04-20 15:52:25 -04:00
Orion Henry
71977451b6 typescript fix 2022-04-20 15:50:38 -04:00
Orion Henry
5be67d10cb v0.0.26 2022-04-19 11:05:37 -04:00
Orion Henry
bce30fa9b2 Merge branch 'experiment' into marks 2022-04-17 09:00:02 -04:00
Orion Henry
38f3bcb401 0.0.25 2022-03-30 13:02:43 -04:00
Orion Henry
21a7bd91dc fix attr del bug 2022-03-30 12:39:55 -04:00
Rae Mac
4e304d11c6 attribute deletion test 2022-03-29 16:06:08 -07:00
Orion Henry
08e6a86f28 fmt 2022-03-29 12:07:59 -04:00
Orion Henry
979b9fd362 Merge branch 'experiment' into marks 2022-03-29 12:04:12 -04:00
Orion Henry
c149da3a6d attr bug 2022-03-26 13:31:39 -04:00
Orion Henry
af02ba6b86 0.0.23 - getChangeByHash 2022-03-23 09:49:06 -04:00
Orion Henry
657bd22d61 Merge branch 'experiment' into marks 2022-03-23 09:35:20 -04:00
Orion Henry
2663e0315c fix test 2022-03-22 13:38:46 -04:00
Orion Henry
bebd310ab6
Merge pull request #314 from automerge/list-changed
Example test for confusing behavior
2022-03-22 13:17:50 -04:00
Rae Mac
bc98b1ecc9 Example test for confusing behavior 2022-03-22 10:06:13 -07:00
Orion Henry
84619d8331 Merge branch 'changed_objs' into marks 2022-03-21 17:39:34 -04:00
Orion Henry
5d4e1f0c42 return touched objects from apply_changes 2022-03-21 17:36:11 -04:00
Orion Henry
25afa0b12b unmark() - 0.0.21 2022-03-21 13:36:01 -04:00
Orion Henry
0cf54c36a8 0.0.21 2022-03-17 19:15:21 -04:00
Orion Henry
99b1127f5c Merge branch 'experiment' into marks 2022-03-17 14:42:35 -04:00
Orion Henry
ae87d7bc00 v20 - object replacement char 2022-03-14 14:47:12 -04:00
Orion Henry
ce9771b29c
Merge pull request #306 from blaine/marks
Additional Attribution Tests
2022-03-10 19:41:16 -05:00
Blaine Cook
e00797c512 test for attribution correctly not surfacing temporary text (inserted and deleted after baseline) 2022-03-10 15:42:57 -08:00
Orion Henry
57a0f62b75 v0.0.19 wasm 2022-03-10 09:23:23 -05:00
Orion Henry
a0f78561c4 Merge branch 'paths' into marks 2022-03-09 19:28:10 -05:00
Andrew Jeffery
ff1a20c626 Document some sync api 2022-03-09 15:15:37 -05:00
Andrew Jeffery
b14d874dfc Move sync structs to module 2022-03-09 15:15:36 -05:00
Andrew Jeffery
aad4852e30 Misc API updates
- Commit now returns just a single hash rather than a vec. Since the
  change we create from committing has all of the heads as deps there
  can only be one hash/head after committing.
- Apply changes now takes a Vec rather than a slice. This avoids having
  to clone them inside.
- transact_with now passes the result of the closure to the commit
  options function
- Remove patch struct
- Change receive_sync_message to return a () instead of the
  `Option<Patch>`
- Change `Transaction*` structs to just `*` and use the transaction
  module
- Make CommitOptions fields public
2022-03-09 15:14:22 -05:00
Andrew Jeffery
63b4c96e71 Update save call 2022-03-09 15:14:22 -05:00
Andrew Jeffery
1b1d50dfaf Update delete nothing tests 2022-03-09 15:14:22 -05:00
Andrew Jeffery
d02737ad12 Fix del missing key in map 2022-03-09 15:14:22 -05:00
Andrew Jeffery
8f4c1fc209 Add failing tests for deleting nothing 2022-03-09 15:14:22 -05:00
Andrew Jeffery
304195d720 Fix typo on QueryResult 2022-03-09 15:14:21 -05:00
Orion Henry
b81e0fd619 update wasm test for set_object 2022-03-09 15:14:20 -05:00
Orion Henry
22b62b14b5 forgot to add the new file 2022-03-08 12:03:31 -05:00
Orion Henry
cbf1ac03b2 added attribute2() - janky version 2022-03-08 12:00:02 -05:00
Orion Henry
4094e82f04 rename tests to attribute 2022-03-07 13:50:05 -05:00
Orion Henry
42446fa5c2 blame -> attribute 2022-03-07 13:45:56 -05:00
Orion Henry
6d5f16c9cd Merge branch 'experiment' into marks 2022-03-04 17:26:14 -05:00
Orion Henry
dbbdd616fd clippy/fmt 2022-03-04 14:16:06 -05:00
Orion Henry
523af57a26 Merge branch 'experiment' into marks 2022-03-04 14:09:00 -05:00
Orion Henry
d195a81d49 v17 --release 2022-03-02 18:35:58 -05:00
Orion Henry
4c11c86532 v0.0.16 - properly blame items deleted by both 2022-03-02 10:27:54 -05:00
Orion Henry
42b6ffe9d8 v0.0.15 2022-03-02 09:33:04 -05:00
Orion Henry
b21b59e6a1 blame v0.1 2022-03-01 22:09:21 -05:00
Orion Henry
c1be06a6c7 blame wip 1 2022-02-28 19:02:36 -05:00
Orion Henry
e07211278f v0.0.14 2022-02-24 18:46:20 -05:00
Orion Henry
3c3f411329 update to new autotransaction api 2022-02-24 18:43:44 -05:00
Orion Henry
5aad691e31 Merge branch 'experiment' into marks 2022-02-24 18:10:19 -05:00
Orion Henry
872efc5756 v10 2022-02-24 17:41:55 -05:00
Orion Henry
e37395f975 make() defaults to text 2022-02-24 17:41:35 -05:00
Orion Henry
a84fa64554 change MAP,LIST,TEXT to be {},[],'' - allow recursion 2022-02-24 17:41:33 -05:00
Orion Henry
a37d4a6870 spans will now respect non-graphmem values 2022-02-24 16:41:01 -05:00
Blaine Cook
5eb5714c13 add failing test for marks handling in 3-way merge scenario 2022-02-24 16:24:17 -05:00
Blaine Cook
4f9b95b5b8 add test for merge behaviour of marks 2022-02-24 16:24:17 -05:00
Orion Henry
36b4f08d20 wasm to 0.0.7 2022-02-22 12:13:01 -05:00
Orion Henry
015e8ce465 choking on bad value function 2022-02-22 12:12:59 -05:00
Orion Henry
ea2f29d681 wasm to 0.0.6 2022-02-22 12:11:49 -05:00
Orion Henry
c8cd069e51 tweak files 2022-02-22 12:11:49 -05:00
Orion Henry
2ba2da95a8 attempt at new packaging 2022-02-22 12:11:49 -05:00
Orion Henry
561cad44e3 Revert "remove marks"
This reverts commit c8c695618b.
2022-02-22 12:11:49 -05:00
37 changed files with 1748 additions and 48 deletions

View file

@ -3,6 +3,7 @@
//const CACHE = Symbol('_cache') // map from objectId to immutable object //const CACHE = Symbol('_cache') // map from objectId to immutable object
export const STATE = Symbol.for('_am_state') // object containing metadata about current state (e.g. sequence numbers) export const STATE = Symbol.for('_am_state') // object containing metadata about current state (e.g. sequence numbers)
export const HEADS = Symbol.for('_am_heads') // object containing metadata about current state (e.g. sequence numbers) export const HEADS = Symbol.for('_am_heads') // object containing metadata about current state (e.g. sequence numbers)
export const TRACE = Symbol.for('_am_trace') // object containing metadata about current state (e.g. sequence numbers)
export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current state (e.g. sequence numbers) export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current state (e.g. sequence numbers)
export const READ_ONLY = Symbol.for('_am_readOnly') // object containing metadata about current state (e.g. sequence numbers) export const READ_ONLY = Symbol.for('_am_readOnly') // object containing metadata about current state (e.g. sequence numbers)
export const FROZEN = Symbol.for('_am_frozen') // object containing metadata about current state (e.g. sequence numbers) export const FROZEN = Symbol.for('_am_frozen') // object containing metadata about current state (e.g. sequence numbers)

View file

@ -2,7 +2,7 @@
export { uuid } from './uuid' export { uuid } from './uuid'
import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies"
import { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" import { STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN } from "./constants"
import { AutomergeValue, Counter } from "./types" import { AutomergeValue, Counter } from "./types"
export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types"
@ -48,6 +48,20 @@ function _heads<T>(doc: Doc<T>) : Heads | undefined {
return Reflect.get(doc,HEADS) return Reflect.get(doc,HEADS)
} }
function _trace<T>(doc: Doc<T>) : string | undefined {
return Reflect.get(doc,TRACE)
}
function _set_heads<T>(doc: Doc<T>, heads: Heads) {
Reflect.set(doc,HEADS,heads)
Reflect.set(doc,TRACE,(new Error()).stack)
}
function _clear_heads<T>(doc: Doc<T>) {
Reflect.set(doc,HEADS,undefined)
Reflect.set(doc,TRACE,undefined)
}
function _obj<T>(doc: Doc<T>) : ObjID { function _obj<T>(doc: Doc<T>) : ObjID {
return Reflect.get(doc,OBJECT_ID) return Reflect.get(doc,OBJECT_ID)
} }
@ -104,7 +118,7 @@ function _change<T>(doc: Doc<T>, options: ChangeOptions, callback: ChangeFn<T>):
throw new RangeError("Attempting to use an outdated Automerge document") throw new RangeError("Attempting to use an outdated Automerge document")
} }
if (!!_heads(doc) === true) { if (!!_heads(doc) === true) {
throw new RangeError("Attempting to change an out of date document"); throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc));
} }
if (_readonly(doc) === false) { if (_readonly(doc) === false) {
throw new RangeError("Calls to Automerge.change cannot be nested") throw new RangeError("Calls to Automerge.change cannot be nested")
@ -112,13 +126,13 @@ function _change<T>(doc: Doc<T>, options: ChangeOptions, callback: ChangeFn<T>):
const state = _state(doc) const state = _state(doc)
const heads = state.getHeads() const heads = state.getHeads()
try { try {
Reflect.set(doc,HEADS,heads) _set_heads(doc,heads)
Reflect.set(doc,FROZEN,true) Reflect.set(doc,FROZEN,true)
const root : T = rootProxy(state); const root : T = rootProxy(state);
callback(root) callback(root)
if (state.pendingOps() === 0) { if (state.pendingOps() === 0) {
Reflect.set(doc,FROZEN,false) Reflect.set(doc,FROZEN,false)
Reflect.set(doc,HEADS,undefined) _clear_heads(doc)
return doc return doc
} else { } else {
state.commit(options.message, options.time) state.commit(options.message, options.time)
@ -127,7 +141,7 @@ function _change<T>(doc: Doc<T>, options: ChangeOptions, callback: ChangeFn<T>):
} catch (e) { } catch (e) {
//console.log("ERROR: ",e) //console.log("ERROR: ",e)
Reflect.set(doc,FROZEN,false) Reflect.set(doc,FROZEN,false)
Reflect.set(doc,HEADS,undefined) _clear_heads(doc)
state.rollback() state.rollback()
throw e throw e
} }
@ -168,14 +182,14 @@ export function save<T>(doc: Doc<T>) : Uint8Array {
export function merge<T>(local: Doc<T>, remote: Doc<T>) : Doc<T> { export function merge<T>(local: Doc<T>, remote: Doc<T>) : Doc<T> {
if (!!_heads(local) === true) { if (!!_heads(local) === true) {
throw new RangeError("Attempting to change an out of date document"); throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc));
} }
const localState = _state(local) const localState = _state(local)
const heads = localState.getHeads() const heads = localState.getHeads()
const remoteState = _state(remote) const remoteState = _state(remote)
const changes = localState.getChangesAdded(remoteState) const changes = localState.getChangesAdded(remoteState)
localState.applyChanges(changes) localState.applyChanges(changes)
Reflect.set(local,HEADS,heads) _set_heads(local,heads)
return rootProxy(localState, true) return rootProxy(localState, true)
} }
@ -267,7 +281,7 @@ export function applyChanges<T>(doc: Doc<T>, changes: Change[]) : [Doc<T>] {
const state = _state(doc) const state = _state(doc)
const heads = state.getHeads() const heads = state.getHeads()
state.applyChanges(changes) state.applyChanges(changes)
Reflect.set(doc,HEADS,heads) _set_heads(doc,heads)
return [rootProxy(state, true)]; return [rootProxy(state, true)];
} }
@ -322,7 +336,7 @@ export function receiveSyncMessage<T>(doc: Doc<T>, inState: SyncState, message:
throw new RangeError("Attempting to use an outdated Automerge document") throw new RangeError("Attempting to use an outdated Automerge document")
} }
if (!!_heads(doc) === true) { if (!!_heads(doc) === true) {
throw new RangeError("Attempting to change an out of date document"); throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc));
} }
if (_readonly(doc) === false) { if (_readonly(doc) === false) {
throw new RangeError("Calls to Automerge.change cannot be nested") throw new RangeError("Calls to Automerge.change cannot be nested")
@ -330,7 +344,7 @@ export function receiveSyncMessage<T>(doc: Doc<T>, inState: SyncState, message:
const state = _state(doc) const state = _state(doc)
const heads = state.getHeads() const heads = state.getHeads()
state.receiveSyncMessage(syncState, message) state.receiveSyncMessage(syncState, message)
Reflect.set(doc,HEADS,heads) _set_heads(doc,heads)
const outState = ApiHandler.exportSyncState(syncState) const outState = ApiHandler.exportSyncState(syncState)
return [rootProxy(state, true), outState, null]; return [rootProxy(state, true), outState, null];
} }

View file

@ -5,7 +5,7 @@ import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./t
import { Int, Uint, Float64 } from "./numbers" import { Int, Uint, Float64 } from "./numbers"
import { Counter, getWriteableCounter } from "./counter" import { Counter, getWriteableCounter } from "./counter"
import { Text } from "./text" import { Text } from "./text"
import { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants" import { STATE, HEADS, TRACE, FROZEN, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants"
function parseListIndex(key) { function parseListIndex(key) {
if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10)
@ -108,6 +108,7 @@ const MapHandler = {
if (key === READ_ONLY) return readonly if (key === READ_ONLY) return readonly
if (key === FROZEN) return frozen if (key === FROZEN) return frozen
if (key === HEADS) return heads if (key === HEADS) return heads
if (key === TRACE) return target.trace
if (key === STATE) return context; if (key === STATE) return context;
if (!cache[key]) { if (!cache[key]) {
cache[key] = valueAt(target, key) cache[key] = valueAt(target, key)
@ -129,6 +130,10 @@ const MapHandler = {
target.heads = val target.heads = val
return true return true
} }
if (key === TRACE) {
target.trace = val
return true
}
const [ value, datatype ] = import_value(val) const [ value, datatype ] = import_value(val)
if (frozen) { if (frozen) {
throw new RangeError("Attempting to use an outdated Automerge document") throw new RangeError("Attempting to use an outdated Automerge document")
@ -211,6 +216,7 @@ const ListHandler = {
if (index === READ_ONLY) return readonly if (index === READ_ONLY) return readonly
if (index === FROZEN) return frozen if (index === FROZEN) return frozen
if (index === HEADS) return heads if (index === HEADS) return heads
if (index === TRACE) return target.trace
if (index === STATE) return context; if (index === STATE) return context;
if (index === 'length') return context.length(objectId, heads); if (index === 'length') return context.length(objectId, heads);
if (index === Symbol.iterator) { if (index === Symbol.iterator) {
@ -246,6 +252,10 @@ const ListHandler = {
target.heads = val target.heads = val
return true return true
} }
if (index === TRACE) {
target.trace = val
return true
}
if (typeof index == "string") { if (typeof index == "string") {
throw new RangeError('list index must be a number') throw new RangeError('list index must be a number')
} }
@ -356,6 +366,7 @@ const TextHandler = Object.assign({}, ListHandler, {
if (index === READ_ONLY) return readonly if (index === READ_ONLY) return readonly
if (index === FROZEN) return frozen if (index === FROZEN) return frozen
if (index === HEADS) return heads if (index === HEADS) return heads
if (index === TRACE) return target.trace
if (index === STATE) return context; if (index === STATE) return context;
if (index === 'length') return context.length(objectId, heads); if (index === 'length') return context.length(objectId, heads);
if (index === Symbol.iterator) { if (index === Symbol.iterator) {

View file

@ -1,5 +1,7 @@
/node_modules /node_modules
/dev /dev
/node
/web
/target /target
Cargo.lock Cargo.lock
yarn.lock yarn.lock

View file

@ -40,10 +40,10 @@ version = "^0.2"
features = ["serde-serialize", "std"] features = ["serde-serialize", "std"]
[package.metadata.wasm-pack.profile.release] [package.metadata.wasm-pack.profile.release]
# wasm-opt = false wasm-opt = true
[package.metadata.wasm-pack.profile.profiling] [package.metadata.wasm-pack.profile.profiling]
wasm-opt = false wasm-opt = true
# The `web-sys` crate allows you to interact with the various browser APIs, # The `web-sys` crate allows you to interact with the various browser APIs,
# like the DOM. # like the DOM.

File diff suppressed because one or more lines are too long

View file

@ -1,2 +1,38 @@
import { Automerge as VanillaAutomerge } from "automerge-types"
export * from "automerge-types" export * from "automerge-types"
export { default } from "automerge-types" export { default } from "automerge-types"
export class Automerge extends VanillaAutomerge {
// experimental spans api - unstable!
mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void;
unmark(obj: ObjID, mark: ObjID): void;
spans(obj: ObjID): any;
raw_spans(obj: ObjID): any;
blame(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
attribute(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
attribute2(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
// override old methods that return automerge
clone(actor?: string): Automerge;
fork(actor?: string): Automerge;
forkAt(heads: Heads, actor?: string): Automerge;
}
export type ChangeSetDeletion = {
pos: number;
val: string;
}
export type ChangeSetAddition = {
start: number;
end: number;
};
export type ChangeSet = {
add: ChangeSetAddition[];
del: ChangeSetDeletion[];
};
export function create(actor?: Actor): Automerge;
export function load(data: Uint8Array, actor?: Actor): Automerge;

View file

@ -2,6 +2,4 @@ let wasm = require("./bindgen")
module.exports = wasm module.exports = wasm
module.exports.load = module.exports.loadDoc module.exports.load = module.exports.loadDoc
delete module.exports.loadDoc delete module.exports.loadDoc
Object.defineProperty(module.exports, "__esModule", { value: true })
module.exports.init = () => (new Promise((resolve,reject) => { resolve(module.exports) })) module.exports.init = () => (new Promise((resolve,reject) => { resolve(module.exports) }))
module.exports.default = module.exports.init

View file

@ -4,11 +4,11 @@
"Alex Good <alex@memoryandthought.me>", "Alex Good <alex@memoryandthought.me>",
"Martin Kleppmann" "Martin Kleppmann"
], ],
"name": "automerge-wasm", "name": "automerge-wasm-pack",
"description": "wasm-bindgen bindings to the automerge rust implementation", "description": "wasm-bindgen bindings to the automerge rust implementation",
"homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm",
"repository": "github:automerge/automerge-rs", "repository": "github:automerge/automerge-rs",
"version": "0.1.6", "version": "0.1.8",
"license": "MIT", "license": "MIT",
"files": [ "files": [
"README.md", "README.md",

View file

@ -352,6 +352,15 @@ pub(crate) fn get_heads(heads: Option<Array>) -> Option<Vec<ChangeHash>> {
heads.ok() heads.ok()
} }
pub(crate) fn get_js_heads(heads: JsValue) -> Result<Vec<ChangeHash>, JsValue> {
let heads = heads.dyn_into::<Array>()?;
heads
.iter()
.map(|j| j.into_serde())
.collect::<Result<Vec<_>, _>>()
.map_err(to_js_err)
}
pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue {
let keys = doc.keys(obj); let keys = doc.keys(obj);
let map = Object::new(); let map = Object::new();

View file

@ -34,6 +34,7 @@ use automerge::Patch;
use automerge::VecOpObserver; use automerge::VecOpObserver;
use automerge::{Change, ObjId, Prop, Value, ROOT}; use automerge::{Change, ObjId, Prop, Value, ROOT};
use js_sys::{Array, Object, Uint8Array}; use js_sys::{Array, Object, Uint8Array};
use regex::Regex;
use std::convert::TryInto; use std::convert::TryInto;
use wasm_bindgen::prelude::*; use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast; use wasm_bindgen::JsCast;
@ -43,8 +44,8 @@ mod sync;
mod value; mod value;
use interop::{ use interop::{
get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, to_js_err, get_heads, get_js_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at,
to_objtype, to_prop, AR, JS, to_js_err, to_objtype, to_prop, AR, JS,
}; };
use sync::SyncState; use sync::SyncState;
use value::{datatype, ScalarValue}; use value::{datatype, ScalarValue};
@ -161,12 +162,9 @@ impl Automerge {
} else { } else {
ApplyOptions::default() ApplyOptions::default()
}; };
let heads = self.doc.merge_with(&mut other.doc, options)?; let objs = self.doc.merge_with(&mut other.doc, options)?;
let heads: Array = heads let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect();
.iter() Ok(objs)
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
.collect();
Ok(heads)
} }
pub fn rollback(&mut self) -> f64 { pub fn rollback(&mut self) -> f64 {
@ -292,6 +290,18 @@ impl Automerge {
Ok(()) Ok(())
} }
pub fn make(
&mut self,
obj: JsValue,
prop: JsValue,
value: JsValue,
_datatype: JsValue,
) -> Result<JsValue, JsValue> {
// remove this
am::log!("doc.make() is depricated - please use doc.set_object() or doc.insert_object()");
self.put_object(obj, prop, value)
}
#[wasm_bindgen(js_name = putObject)] #[wasm_bindgen(js_name = putObject)]
pub fn put_object( pub fn put_object(
&mut self, &mut self,
@ -311,7 +321,7 @@ impl Automerge {
fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), JsValue> { fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), JsValue> {
for (p, v) in vals { for (p, v) in vals {
let (value, subvals) = self.import_value(&v, None)?; let (value, subvals) = self.import_value(&v, None)?;
//let opid = self.0.set(id, p, value)?; //let opid = self.doc.set(id, p, value)?;
let opid = match (p, value) { let opid = match (p, value) {
(Prop::Map(s), Value::Object(objtype)) => { (Prop::Map(s), Value::Object(objtype)) => {
Some(self.doc.put_object(obj, s, objtype)?) Some(self.doc.put_object(obj, s, objtype)?)
@ -551,6 +561,209 @@ impl Automerge {
Ok(()) Ok(())
} }
pub fn mark(
&mut self,
obj: JsValue,
range: JsValue,
name: JsValue,
value: JsValue,
datatype: JsValue,
) -> Result<(), JsValue> {
let obj = self.import(obj)?;
let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap();
let range = range.as_string().ok_or("range must be a string")?;
let cap = re.captures_iter(&range).next().ok_or("range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal")?;
let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?;
let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?;
let start_sticky = &cap[1] == "(";
let end_sticky = &cap[4] == ")";
let name = name
.as_string()
.ok_or("invalid mark name")
.map_err(to_js_err)?;
let value = self
.import_scalar(&value, &datatype.as_string())
.ok_or_else(|| to_js_err("invalid value"))?;
self.doc
.mark(&obj, start, start_sticky, end, end_sticky, &name, value)
.map_err(to_js_err)?;
Ok(())
}
pub fn unmark(&mut self, obj: JsValue, mark: JsValue) -> Result<(), JsValue> {
let obj = self.import(obj)?;
let mark = self.import(mark)?;
self.doc.unmark(&obj, &mark).map_err(to_js_err)?;
Ok(())
}
pub fn spans(&mut self, obj: JsValue) -> Result<JsValue, JsValue> {
let obj = self.import(obj)?;
let text: Vec<_> = self.doc.list_range(&obj, ..).collect();
let spans = self.doc.spans(&obj).map_err(to_js_err)?;
let mut last_pos = 0;
let result = Array::new();
for s in spans {
let marks = Array::new();
for m in s.marks {
let mark = Array::new();
mark.push(&m.0.into());
mark.push(&datatype(&m.1).into());
mark.push(&ScalarValue(m.1).into());
marks.push(&mark.into());
}
let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos);
if !text_span.is_empty() {
let t: String = text_span
.iter()
.filter_map(|(_, v, _)| v.as_string())
.collect();
result.push(&t.into());
}
result.push(&marks);
last_pos = s.pos;
//let obj = Object::new().into();
//js_set(&obj, "pos", s.pos as i32)?;
//js_set(&obj, "marks", marks)?;
//result.push(&obj.into());
}
let text_span = &text[last_pos..];
if !text_span.is_empty() {
let t: String = text_span
.iter()
.filter_map(|(_, v, _)| v.as_string())
.collect();
result.push(&t.into());
}
Ok(result.into())
}
pub fn raw_spans(&mut self, obj: JsValue) -> Result<Array, JsValue> {
let obj = self.import(obj)?;
let spans = self.doc.raw_spans(&obj).map_err(to_js_err)?;
let result = Array::new();
for s in spans {
result.push(&JsValue::from_serde(&s).map_err(to_js_err)?);
}
Ok(result)
}
pub fn blame(
&mut self,
obj: JsValue,
baseline: JsValue,
change_sets: JsValue,
) -> Result<Array, JsValue> {
am::log!("doc.blame() is depricated - please use doc.attribute()");
self.attribute(obj, baseline, change_sets)
}
pub fn attribute(
&mut self,
obj: JsValue,
baseline: JsValue,
change_sets: JsValue,
) -> Result<Array, JsValue> {
let obj = self.import(obj)?;
let baseline = get_js_heads(baseline)?;
let change_sets = change_sets.dyn_into::<Array>()?;
let change_sets = change_sets
.iter()
.map(get_js_heads)
.collect::<Result<Vec<_>, _>>()?;
let result = self.doc.attribute(&obj, &baseline, &change_sets)?;
let result = result
.into_iter()
.map(|cs| {
let add = cs
.add
.iter()
.map::<Result<JsValue, JsValue>, _>(|range| {
let r = Object::new();
js_set(&r, "start", range.start as f64)?;
js_set(&r, "end", range.end as f64)?;
Ok(JsValue::from(&r))
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
let del = cs
.del
.iter()
.map::<Result<JsValue, JsValue>, _>(|d| {
let r = Object::new();
js_set(&r, "pos", d.0 as f64)?;
js_set(&r, "val", &d.1)?;
Ok(JsValue::from(&r))
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
let obj = Object::new();
js_set(&obj, "add", add)?;
js_set(&obj, "del", del)?;
Ok(obj.into())
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
Ok(result)
}
pub fn attribute2(
&mut self,
obj: JsValue,
baseline: JsValue,
change_sets: JsValue,
) -> Result<Array, JsValue> {
let obj = self.import(obj)?;
let baseline = get_js_heads(baseline)?;
let change_sets = change_sets.dyn_into::<Array>()?;
let change_sets = change_sets
.iter()
.map(get_js_heads)
.collect::<Result<Vec<_>, _>>()?;
let result = self.doc.attribute2(&obj, &baseline, &change_sets)?;
let result = result
.into_iter()
.map(|cs| {
let add = cs
.add
.iter()
.map::<Result<JsValue, JsValue>, _>(|a| {
let r = Object::new();
js_set(&r, "actor", &self.doc.actor_to_str(a.actor))?;
js_set(&r, "start", a.range.start as f64)?;
js_set(&r, "end", a.range.end as f64)?;
Ok(JsValue::from(&r))
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
let del = cs
.del
.iter()
.map::<Result<JsValue, JsValue>, _>(|d| {
let r = Object::new();
js_set(&r, "actor", &self.doc.actor_to_str(d.actor))?;
js_set(&r, "pos", d.pos as f64)?;
js_set(&r, "val", &d.span)?;
Ok(JsValue::from(&r))
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
let obj = Object::new();
js_set(&obj, "add", add)?;
js_set(&obj, "del", del)?;
Ok(obj.into())
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
Ok(result)
}
pub fn save(&mut self) -> Uint8Array { pub fn save(&mut self) -> Uint8Array {
self.ensure_transaction_closed(); self.ensure_transaction_closed();
Uint8Array::from(self.doc.save().as_slice()) Uint8Array::from(self.doc.save().as_slice())

View file

@ -0,0 +1,188 @@
import { describe, it } from 'mocha';
//@ts-ignore
import assert from 'assert'
//@ts-ignore
import { BloomFilter } from './helpers/sync'
import { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..'
import { DecodedSyncMessage, Hash } from '..'
describe('Automerge', () => {
describe('attribute', () => {
it('should be able to attribute text segments on change sets', () => {
let doc1 = create()
let text = doc1.putObject("_root", "notes","hello little world")
let h1 = doc1.getHeads();
let doc2 = doc1.fork();
doc2.splice(text, 5, 7, " big");
doc2.text(text)
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "hello big world")
let doc3 = doc1.fork();
doc3.splice(text, 0, 0, "Well, ");
let h3 = doc3.getHeads();
assert.deepEqual(doc3.text(text), "Well, hello little world")
doc1.merge(doc2)
doc1.merge(doc3)
assert.deepEqual(doc1.text(text), "Well, hello big world")
let attribute = doc1.attribute(text, h1, [h2, h3])
assert.deepEqual(attribute, [
{ add: [ { start: 11, end: 15 } ], del: [ { pos: 15, val: ' little' } ] },
{ add: [ { start: 0, end: 6 } ], del: [] }
])
})
it('should be able to hand complex attribute change sets', () => {
let doc1 = create("aaaa")
let text = doc1.putObject("_root", "notes","AAAAAA")
let h1 = doc1.getHeads();
let doc2 = doc1.fork("bbbb");
doc2.splice(text, 0, 2, "BB");
doc2.commit()
doc2.splice(text, 2, 2, "BB");
doc2.commit()
doc2.splice(text, 6, 0, "BB");
doc2.commit()
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "BBBBAABB")
let doc3 = doc1.fork("cccc");
doc3.splice(text, 1, 1, "C");
doc3.commit()
doc3.splice(text, 3, 1, "C");
doc3.commit()
doc3.splice(text, 5, 1, "C");
doc3.commit()
let h3 = doc3.getHeads();
// with tombstones its
// AC.AC.AC.
assert.deepEqual(doc3.text(text), "ACACAC")
doc1.merge(doc2)
assert.deepEqual(doc1.attribute(text, h1, [h2]), [
{ add: [ {start:0, end: 4}, { start: 6, end: 8 } ], del: [ { pos: 4, val: 'AAAA' } ] },
])
doc1.merge(doc3)
assert.deepEqual(doc1.text(text), "BBBBCCACBB")
// with tombstones its
// BBBB.C..C.AC.BB
assert.deepEqual(doc1.attribute(text, h1, [h2,h3]), [
{ add: [ {start:0, end: 4}, { start: 8, end: 10 } ], del: [ { pos: 4, val: 'A' }, { pos: 5, val: 'AA' }, { pos: 6, val: 'A' } ] },
{ add: [ {start:4, end: 6}, { start: 7, end: 8 } ], del: [ { pos: 5, val: 'A' }, { pos: 6, val: 'A' }, { pos: 8, val: 'A' } ] }
])
})
it('should not include attribution of text that is inserted and deleted only within change sets', () => {
let doc1 = create()
let text = doc1.putObject("_root", "notes","hello little world")
let h1 = doc1.getHeads();
let doc2 = doc1.fork();
doc2.splice(text, 5, 7, " big");
doc2.splice(text, 9, 0, " bad");
doc2.splice(text, 9, 4)
doc2.text(text)
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "hello big world")
let doc3 = doc1.fork();
doc3.splice(text, 0, 0, "Well, HI THERE");
doc3.splice(text, 6, 8, "")
let h3 = doc3.getHeads();
assert.deepEqual(doc3.text(text), "Well, hello little world")
doc1.merge(doc2)
doc1.merge(doc3)
assert.deepEqual(doc1.text(text), "Well, hello big world")
let attribute = doc1.attribute(text, h1, [h2, h3])
assert.deepEqual(attribute, [
{ add: [ { start: 11, end: 15 } ], del: [ { pos: 15, val: ' little' } ] },
{ add: [ { start: 0, end: 6 } ], del: [] }
])
})
})
describe('attribute2', () => {
it('should be able to attribute text segments on change sets', () => {
let doc1 = create("aaaa")
let text = doc1.putObject("_root", "notes","hello little world")
let h1 = doc1.getHeads();
let doc2 = doc1.fork("bbbb");
doc2.splice(text, 5, 7, " big");
doc2.text(text)
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "hello big world")
let doc3 = doc1.fork("cccc");
doc3.splice(text, 0, 0, "Well, ");
let doc4 = doc3.fork("dddd")
doc4.splice(text, 0, 0, "Gee, ");
let h3 = doc4.getHeads();
assert.deepEqual(doc4.text(text), "Gee, Well, hello little world")
doc1.merge(doc2)
doc1.merge(doc4)
assert.deepEqual(doc1.text(text), "Gee, Well, hello big world")
let attribute = doc1.attribute2(text, h1, [h2, h3])
assert.deepEqual(attribute, [
{ add: [ { actor: "bbbb", start: 16, end: 20 } ], del: [ { actor: "bbbb", pos: 20, val: ' little' } ] },
{ add: [ { actor: "dddd", start:0, end: 5 }, { actor: "cccc", start: 5, end: 11 } ], del: [] }
])
})
it('should not include attribution of text that is inserted and deleted only within change sets', () => {
let doc1 = create("aaaa")
let text = doc1.putObject("_root", "notes","hello little world")
let h1 = doc1.getHeads();
let doc2 = doc1.fork("bbbb");
doc2.splice(text, 5, 7, " big");
doc2.splice(text, 9, 0, " bad");
doc2.splice(text, 9, 4)
doc2.text(text)
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "hello big world")
let doc3 = doc1.fork("cccc");
doc3.splice(text, 0, 0, "Well, HI THERE");
doc3.splice(text, 6, 8, "")
let h3 = doc3.getHeads();
assert.deepEqual(doc3.text(text), "Well, hello little world")
doc1.merge(doc2)
doc1.merge(doc3)
assert.deepEqual(doc1.text(text), "Well, hello big world")
let attribute = doc1.attribute2(text, h1, [h2, h3])
assert.deepEqual(attribute, [
{ add: [ { start: 11, end: 15, actor: "bbbb" } ], del: [ { pos: 15, val: ' little', actor: "bbbb" } ] },
{ add: [ { start: 0, end: 6, actor: "cccc" } ], del: [] }
])
let h4 = doc1.getHeads()
doc3.splice(text, 24, 0, "!!!")
doc1.merge(doc3)
let h5 = doc1.getHeads()
assert.deepEqual(doc1.text(text), "Well, hello big world!!!")
attribute = doc1.attribute2(text, h4, [h5])
assert.deepEqual(attribute, [
{ add: [ { start: 21, end: 24, actor: "cccc" } ], del: [] },
])
})
})
})

View file

@ -0,0 +1,201 @@
import { describe, it } from 'mocha';
//@ts-ignore
import assert from 'assert'
//@ts-ignore
import { create, load, Automerge, encodeChange, decodeChange } from '..'
describe('Automerge', () => {
describe('marks', () => {
it('should handle marks [..]', () => {
let doc = create()
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[3..6]", "bold" , true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
doc.insert(list, 6, "A")
doc.insert(list, 3, "A")
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]);
})
it('should handle marks [..] at the beginning of a string', () => {
let doc = create()
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[0..3]", "bold", true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]);
let doc2 = doc.fork()
doc2.insert(list, 0, "A")
doc2.insert(list, 4, "B")
doc.merge(doc2)
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'A', [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'Bbbbccc' ]);
})
it('should handle marks [..] with splice', () => {
let doc = create()
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[0..3]", "bold", true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]);
let doc2 = doc.fork()
doc2.splice(list, 0, 2, "AAA")
doc2.splice(list, 4, 0, "BBB")
doc.merge(doc2)
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'AAA', [ [ 'bold', 'boolean', true ] ], 'a', [], 'BBBbbbccc' ]);
})
it('should handle marks across multiple forks', () => {
let doc = create()
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[0..3]", "bold", true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]);
let doc2 = doc.fork()
doc2.splice(list, 1, 1, "Z") // replace 'aaa' with 'aZa' inside mark.
let doc3 = doc.fork()
doc3.insert(list, 0, "AAA") // should not be included in mark.
doc.merge(doc2)
doc.merge(doc3)
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'AAA', [ [ 'bold', 'boolean', true ] ], 'aZa', [], 'bbbccc' ]);
})
it('should handle marks with deleted ends [..]', () => {
let doc = create()
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[3..6]", "bold" , true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
doc.delete(list,5);
doc.delete(list,5);
doc.delete(list,2);
doc.delete(list,2);
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ])
doc.insert(list, 3, "A")
doc.insert(list, 2, "A")
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ])
})
it('should handle sticky marks (..)', () => {
let doc = create()
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "(3..6)", "bold" , true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
doc.insert(list, 6, "A")
doc.insert(list, 3, "A")
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]);
})
it('should handle sticky marks with deleted ends (..)', () => {
let doc = create()
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "(3..6)", "bold" , true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
doc.delete(list,5);
doc.delete(list,5);
doc.delete(list,2);
doc.delete(list,2);
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ])
doc.insert(list, 3, "A")
doc.insert(list, 2, "A")
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ])
// make sure save/load can handle marks
let doc2 = load(doc.save())
spans = doc2.spans(list);
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ])
assert.deepStrictEqual(doc.getHeads(), doc2.getHeads())
assert.deepStrictEqual(doc.save(), doc2.save())
})
it('should handle overlapping marks', () => {
let doc : Automerge = create("aabbcc")
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
doc.mark(list, "[0..37]", "bold" , true)
doc.mark(list, "[4..19]", "itallic" , true)
doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!")
doc.commit("marks");
let spans = doc.spans(list);
assert.deepStrictEqual(spans,
[
[ [ 'bold', 'boolean', true ] ],
'the ',
[ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ],
'quick ',
[
[ 'bold', 'boolean', true ],
[ 'comment', 'str', 'foxes are my favorite animal!' ],
[ 'itallic', 'boolean', true ]
],
'fox',
[ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ],
' jumps',
[ [ 'bold', 'boolean', true ] ],
' over the lazy dog',
[],
]
)
let text = doc.text(list);
assert.deepStrictEqual(text, "the quick fox jumps over the lazy dog");
let raw_spans = doc.raw_spans(list);
assert.deepStrictEqual(raw_spans,
[
{ id: "39@aabbcc", start: 0, end: 37, type: 'bold', value: true },
{ id: "41@aabbcc", start: 4, end: 19, type: 'itallic', value: true },
{ id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' }
]);
doc.unmark(list, "41@aabbcc")
raw_spans = doc.raw_spans(list);
assert.deepStrictEqual(raw_spans,
[
{ id: "39@aabbcc", start: 0, end: 37, type: 'bold', value: true },
{ id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' }
]);
// mark sure encode decode can handle marks
doc.unmark(list, "39@aabbcc")
raw_spans = doc.raw_spans(list);
assert.deepStrictEqual(raw_spans,
[
{ id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' }
]);
let all = doc.getChanges([])
let decoded = all.map((c) => decodeChange(c))
let encoded = decoded.map((c) => encodeChange(c))
let doc2 = create();
doc2.applyChanges(encoded)
assert.deepStrictEqual(doc.spans(list) , doc2.spans(list))
assert.deepStrictEqual(doc.save(), doc2.save())
})
})
})

View file

@ -396,6 +396,8 @@ describe('Automerge', () => {
assert.deepEqual(change2, null) assert.deepEqual(change2, null)
if (change1 === null) { throw new RangeError("change1 should not be null") } if (change1 === null) { throw new RangeError("change1 should not be null") }
assert.deepEqual(decodeChange(change1).hash, head1[0]) assert.deepEqual(decodeChange(change1).hash, head1[0])
assert.deepEqual(head1.some((hash) => doc1.getChangeByHash(hash) === null), false)
assert.deepEqual(head2.some((hash) => doc1.getChangeByHash(hash) === null), true)
}) })
it('recursive sets are possible', () => { it('recursive sets are possible', () => {
@ -1654,7 +1656,7 @@ describe('Automerge', () => {
if (m2 === null) { throw new RangeError("message should not be null") } if (m2 === null) { throw new RangeError("message should not be null") }
n1.receiveSyncMessage(s1, m2) n1.receiveSyncMessage(s1, m2)
n2.receiveSyncMessage(s2, m1) n2.receiveSyncMessage(s2, m1)
// Then n1 and n2 send each other their changes, except for the false positive // Then n1 and n2 send each other their changes, except for the false positive
m1 = n1.generateSyncMessage(s1) m1 = n1.generateSyncMessage(s1)
m2 = n2.generateSyncMessage(s2) m2 = n2.generateSyncMessage(s2)

View file

@ -205,5 +205,5 @@ export class SyncState {
readonly sharedHeads: Heads; readonly sharedHeads: Heads;
} }
export default function init (): Promise<API>;
export function init (): Promise<API>; export function init (): Promise<API>;

View file

@ -47,7 +47,3 @@ export function init() {
})) }))
} }
// depricating default export
export default function() {
return init()
}

View file

@ -3,13 +3,14 @@ use std::ops::RangeBounds;
use crate::exid::ExId; use crate::exid::ExId;
use crate::op_observer::OpObserver; use crate::op_observer::OpObserver;
use crate::transaction::{CommitOptions, Transactable}; use crate::transaction::{CommitOptions, Transactable};
use crate::Parents;
use crate::{ use crate::{
sync, ApplyOptions, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, query, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash,
Parents, ScalarValue, Prop, Value, Values,
}; };
use crate::{ use crate::{
transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, sync, ApplyOptions, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType,
Value, Values, ScalarValue,
}; };
/// An automerge document that automatically manages transactions. /// An automerge document that automatically manages transactions.
@ -33,6 +34,11 @@ impl AutoCommit {
} }
} }
// FIXME : temp
pub fn actor_to_str(&self, actor: usize) -> String {
self.doc.ops.m.actors.cache[actor].to_hex_string()
}
/// Get the inner document. /// Get the inner document.
#[doc(hidden)] #[doc(hidden)]
pub fn document(&mut self) -> &Automerge { pub fn document(&mut self) -> &Automerge {
@ -404,6 +410,37 @@ impl Transactable for AutoCommit {
tx.insert(&mut self.doc, obj.as_ref(), index, value) tx.insert(&mut self.doc, obj.as_ref(), index, value)
} }
#[allow(clippy::too_many_arguments)]
fn mark<O: AsRef<ExId>>(
&mut self,
obj: O,
start: usize,
expand_start: bool,
end: usize,
expand_end: bool,
mark: &str,
value: ScalarValue,
) -> Result<(), AutomergeError> {
self.ensure_transaction_open();
let tx = self.transaction.as_mut().unwrap();
tx.mark(
&mut self.doc,
obj,
start,
expand_start,
end,
expand_end,
mark,
value,
)
}
fn unmark<O: AsRef<ExId>>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError> {
self.ensure_transaction_open();
let tx = self.transaction.as_mut().unwrap();
tx.unmark(&mut self.doc, obj, mark)
}
fn insert_object<O: AsRef<ExId>>( fn insert_object<O: AsRef<ExId>>(
&mut self, &mut self,
obj: O, obj: O,
@ -462,6 +499,32 @@ impl Transactable for AutoCommit {
self.doc.text_at(obj, heads) self.doc.text_at(obj, heads)
} }
fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::Span<'_>>, AutomergeError> {
self.doc.spans(obj)
}
fn raw_spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::SpanInfo>, AutomergeError> {
self.doc.raw_spans(obj)
}
fn attribute<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet>, AutomergeError> {
self.doc.attribute(obj, baseline, change_sets)
}
fn attribute2<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet2>, AutomergeError> {
self.doc.attribute2(obj, baseline, change_sets)
}
// TODO - I need to return these OpId's here **only** to get // TODO - I need to return these OpId's here **only** to get
// the legacy conflicts format of { [opid]: value } // the legacy conflicts format of { [opid]: value }
// Something better? // Something better?

View file

@ -452,6 +452,28 @@ impl Automerge {
} }
} }
pub(crate) fn exid_to_obj_tmp_unchecked(&self, id: &ExId) -> Result<ObjId, AutomergeError> {
match id {
ExId::Root => Ok(ObjId::root()),
ExId::Id(ctr, actor, idx) => {
// do a direct get here b/c this could be foriegn and not be within the array
// bounds
if self.ops.m.actors.cache.get(*idx) == Some(actor) {
Ok(ObjId(OpId(*ctr, *idx)))
} else {
// FIXME - make a real error
let idx = self
.ops
.m
.actors
.lookup(actor)
.ok_or(AutomergeError::Fail)?;
Ok(ObjId(OpId(*ctr, idx)))
}
}
}
}
pub(crate) fn id_to_exid(&self, id: OpId) -> ExId { pub(crate) fn id_to_exid(&self, id: OpId) -> ExId {
self.ops.id_to_exid(id) self.ops.id_to_exid(id)
} }
@ -491,6 +513,71 @@ impl Automerge {
Ok(buffer) Ok(buffer)
} }
pub fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::Span<'_>>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let mut query = self.ops.search(&obj, query::Spans::new());
query.check_marks();
Ok(query.spans)
}
pub fn attribute<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let baseline = self.clock_at(baseline);
let change_sets: Vec<Clock> = change_sets
.iter()
.map(|p| self.clock_at(p).unwrap())
.collect();
let mut query = self
.ops
.search(&obj, query::Attribute::new(baseline.unwrap(), change_sets));
query.finish();
Ok(query.change_sets)
}
pub fn attribute2<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet2>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let baseline = self.clock_at(baseline);
let change_sets: Vec<Clock> = change_sets
.iter()
.map(|p| self.clock_at(p).unwrap())
.collect();
let mut query = self
.ops
.search(&obj, query::Attribute2::new(baseline.unwrap(), change_sets));
query.finish();
Ok(query.change_sets)
}
pub fn raw_spans<O: AsRef<ExId>>(
&self,
obj: O,
) -> Result<Vec<query::SpanInfo>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let query = self.ops.search(&obj, query::RawSpans::new());
let result = query
.spans
.into_iter()
.map(|s| query::SpanInfo {
id: self.id_to_exid(s.id),
start: s.start,
end: s.end,
span_type: s.name,
value: s.value,
})
.collect();
Ok(result)
}
// TODO - I need to return these OpId's here **only** to get // TODO - I need to return these OpId's here **only** to get
// the legacy conflicts format of { [opid]: value } // the legacy conflicts format of { [opid]: value }
// Something better? // Something better?
@ -1041,6 +1128,8 @@ impl Automerge {
OpType::Put(value) => format!("{}", value), OpType::Put(value) => format!("{}", value),
OpType::Make(obj) => format!("make({})", obj), OpType::Make(obj) => format!("make({})", obj),
OpType::Increment(obj) => format!("inc({})", obj), OpType::Increment(obj) => format!("inc({})", obj),
OpType::MarkBegin(m) => format!("mark({}={})", m.name, m.value),
OpType::MarkEnd(_) => "/mark".into(),
OpType::Delete => format!("del{}", 0), OpType::Delete => format!("del{}", 0),
}; };
let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect(); let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect();

View file

@ -137,6 +137,15 @@ impl<'a> Iterator for OperationIterator<'a> {
Action::MakeTable => OpType::Make(ObjType::Table), Action::MakeTable => OpType::Make(ObjType::Table),
Action::Del => OpType::Delete, Action::Del => OpType::Delete,
Action::Inc => OpType::Increment(value.to_i64()?), Action::Inc => OpType::Increment(value.to_i64()?),
Action::MarkBegin => {
// mark has 3 things in the val column
let name = value.as_string()?;
let expand = self.value.next()?.to_bool()?;
let value = self.value.next()?;
OpType::mark(name, expand, value)
}
Action::MarkEnd => OpType::MarkEnd(value.to_bool()?),
Action::Unused => panic!("invalid action"),
}; };
Some(amp::Op { Some(amp::Op {
action, action,
@ -178,6 +187,15 @@ impl<'a> Iterator for DocOpIterator<'a> {
Action::MakeTable => OpType::Make(ObjType::Table), Action::MakeTable => OpType::Make(ObjType::Table),
Action::Del => OpType::Delete, Action::Del => OpType::Delete,
Action::Inc => OpType::Increment(value.to_i64()?), Action::Inc => OpType::Increment(value.to_i64()?),
Action::MarkBegin => {
// mark has 3 things in the val column
let name = value.as_string()?;
let expand = self.value.next()?.to_bool()?;
let value = self.value.next()?;
OpType::mark(name, expand, value)
}
Action::MarkEnd => OpType::MarkEnd(value.to_bool()?),
Action::Unused => panic!("invalid action"),
}; };
Some(DocOp { Some(DocOp {
actor, actor,
@ -1082,6 +1100,16 @@ impl DocOpEncoder {
self.val.append_null(); self.val.append_null();
Action::Del Action::Del
} }
amp::OpType::MarkBegin(m) => {
self.val.append_value(&m.name.clone().into(), actors);
self.val.append_value(&m.expand.into(), actors);
self.val.append_value(&m.value.clone(), actors);
Action::MarkBegin
}
amp::OpType::MarkEnd(s) => {
self.val.append_value(&(*s).into(), actors);
Action::MarkEnd
}
amp::OpType::Make(kind) => { amp::OpType::Make(kind) => {
self.val.append_null(); self.val.append_null();
match kind { match kind {
@ -1191,6 +1219,16 @@ impl ColumnEncoder {
self.val.append_null(); self.val.append_null();
Action::Del Action::Del
} }
OpType::MarkBegin(m) => {
self.val.append_value2(&m.name.clone().into(), actors);
self.val.append_value2(&m.expand.into(), actors);
self.val.append_value2(&m.value.clone(), actors);
Action::MarkBegin
}
OpType::MarkEnd(s) => {
self.val.append_value2(&(*s).into(), actors);
Action::MarkEnd
}
OpType::Make(kind) => { OpType::Make(kind) => {
self.val.append_null(); self.val.append_null();
match kind { match kind {
@ -1296,8 +1334,11 @@ pub(crate) enum Action {
MakeText, MakeText,
Inc, Inc,
MakeTable, MakeTable,
MarkBegin,
Unused, // final bit is used to mask `Make` actions
MarkEnd,
} }
const ACTIONS: [Action; 7] = [ const ACTIONS: [Action; 10] = [
Action::MakeMap, Action::MakeMap,
Action::Set, Action::Set,
Action::MakeList, Action::MakeList,
@ -1305,6 +1346,9 @@ const ACTIONS: [Action; 7] = [
Action::MakeText, Action::MakeText,
Action::Inc, Action::Inc,
Action::MakeTable, Action::MakeTable,
Action::MarkBegin,
Action::Unused,
Action::MarkEnd,
]; ];
impl Decodable for Action { impl Decodable for Action {

View file

@ -50,6 +50,12 @@ impl Serialize for Op {
OpType::Increment(n) => op.serialize_field("value", &n)?, OpType::Increment(n) => op.serialize_field("value", &n)?,
OpType::Put(ScalarValue::Counter(c)) => op.serialize_field("value", &c.start)?, OpType::Put(ScalarValue::Counter(c)) => op.serialize_field("value", &c.start)?,
OpType::Put(value) => op.serialize_field("value", &value)?, OpType::Put(value) => op.serialize_field("value", &value)?,
OpType::MarkBegin(m) => {
op.serialize_field("name", &m.name)?;
op.serialize_field("expand", &m.expand)?;
op.serialize_field("value", &m.value)?;
}
OpType::MarkEnd(s) => op.serialize_field("expand", &s)?,
_ => {} _ => {}
} }
op.serialize_field("pred", &self.pred)?; op.serialize_field("pred", &self.pred)?;
@ -71,6 +77,8 @@ pub(crate) enum RawOpType {
Del, Del,
Inc, Inc,
Set, Set,
MarkBegin,
MarkEnd,
} }
impl Serialize for RawOpType { impl Serialize for RawOpType {
@ -86,6 +94,8 @@ impl Serialize for RawOpType {
RawOpType::Del => "del", RawOpType::Del => "del",
RawOpType::Inc => "inc", RawOpType::Inc => "inc",
RawOpType::Set => "set", RawOpType::Set => "set",
RawOpType::MarkBegin => "mark_begin",
RawOpType::MarkEnd => "mark_end",
}; };
serializer.serialize_str(s) serializer.serialize_str(s)
} }
@ -117,6 +127,8 @@ impl<'de> Deserialize<'de> for RawOpType {
"del" => Ok(RawOpType::Del), "del" => Ok(RawOpType::Del),
"inc" => Ok(RawOpType::Inc), "inc" => Ok(RawOpType::Inc),
"set" => Ok(RawOpType::Set), "set" => Ok(RawOpType::Set),
"mark_begin" => Ok(RawOpType::MarkBegin),
"mark_end" => Ok(RawOpType::MarkEnd),
other => Err(Error::unknown_variant(other, VARIANTS)), other => Err(Error::unknown_variant(other, VARIANTS)),
} }
} }
@ -189,6 +201,30 @@ impl<'de> Deserialize<'de> for Op {
RawOpType::MakeList => OpType::Make(ObjType::List), RawOpType::MakeList => OpType::Make(ObjType::List),
RawOpType::MakeText => OpType::Make(ObjType::Text), RawOpType::MakeText => OpType::Make(ObjType::Text),
RawOpType::Del => OpType::Delete, RawOpType::Del => OpType::Delete,
RawOpType::MarkBegin => {
let name = name.ok_or_else(|| Error::missing_field("mark(name)"))?;
let expand = expand.unwrap_or(false);
let value = if let Some(datatype) = datatype {
let raw_value = value
.ok_or_else(|| Error::missing_field("value"))?
.unwrap_or(ScalarValue::Null);
raw_value.as_datatype(datatype).map_err(|e| {
Error::invalid_value(
Unexpected::Other(e.unexpected.as_str()),
&e.expected.as_str(),
)
})?
} else {
value
.ok_or_else(|| Error::missing_field("value"))?
.unwrap_or(ScalarValue::Null)
};
OpType::mark(name, expand, value)
}
RawOpType::MarkEnd => {
let expand = expand.unwrap_or(true);
OpType::MarkEnd(expand)
}
RawOpType::Set => { RawOpType::Set => {
let value = if let Some(datatype) = datatype { let value = if let Some(datatype) = datatype {
let raw_value = value let raw_value = value

View file

@ -15,6 +15,8 @@ impl Serialize for OpType {
OpType::Make(ObjType::Table) => RawOpType::MakeTable, OpType::Make(ObjType::Table) => RawOpType::MakeTable,
OpType::Make(ObjType::List) => RawOpType::MakeList, OpType::Make(ObjType::List) => RawOpType::MakeList,
OpType::Make(ObjType::Text) => RawOpType::MakeText, OpType::Make(ObjType::Text) => RawOpType::MakeText,
OpType::MarkBegin(_) => RawOpType::MarkBegin,
OpType::MarkEnd(_) => RawOpType::MarkEnd,
OpType::Delete => RawOpType::Del, OpType::Delete => RawOpType::Del,
OpType::Increment(_) => RawOpType::Inc, OpType::Increment(_) => RawOpType::Inc,
OpType::Put(_) => RawOpType::Set, OpType::Put(_) => RawOpType::Set,

View file

@ -1,10 +1,14 @@
use crate::exid::ExId;
use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::op_tree::{OpSetMetadata, OpTreeNode};
use crate::types::{Clock, Counter, Key, Op, OpId, OpType, ScalarValue}; use crate::types::{Clock, Counter, Key, Op, OpId, OpType, ScalarValue};
use fxhash::FxBuildHasher; use fxhash::FxBuildHasher;
use serde::Serialize;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fmt::Debug; use std::fmt::Debug;
mod attribute;
mod attribute2;
mod elem_id_pos; mod elem_id_pos;
mod insert; mod insert;
mod keys; mod keys;
@ -22,9 +26,13 @@ mod nth_at;
mod opid; mod opid;
mod prop; mod prop;
mod prop_at; mod prop_at;
mod raw_spans;
mod seek_op; mod seek_op;
mod seek_op_with_patch; mod seek_op_with_patch;
mod spans;
pub(crate) use attribute::{Attribute, ChangeSet};
pub(crate) use attribute2::{Attribute2, ChangeSet2};
pub(crate) use elem_id_pos::ElemIdPos; pub(crate) use elem_id_pos::ElemIdPos;
pub(crate) use insert::InsertNth; pub(crate) use insert::InsertNth;
pub(crate) use keys::Keys; pub(crate) use keys::Keys;
@ -42,8 +50,20 @@ pub(crate) use nth_at::NthAt;
pub(crate) use opid::OpIdSearch; pub(crate) use opid::OpIdSearch;
pub(crate) use prop::Prop; pub(crate) use prop::Prop;
pub(crate) use prop_at::PropAt; pub(crate) use prop_at::PropAt;
pub(crate) use raw_spans::RawSpans;
pub(crate) use seek_op::SeekOp; pub(crate) use seek_op::SeekOp;
pub(crate) use seek_op_with_patch::SeekOpWithPatch; pub(crate) use seek_op_with_patch::SeekOpWithPatch;
pub(crate) use spans::{Span, Spans};
#[derive(Serialize, Debug, Clone, PartialEq)]
pub struct SpanInfo {
pub id: ExId,
pub start: usize,
pub end: usize,
#[serde(rename = "type")]
pub span_type: String,
pub value: ScalarValue,
}
// use a struct for the args for clarity as they are passed up the update chain in the optree // use a struct for the args for clarity as they are passed up the update chain in the optree
#[derive(Debug, Clone)] #[derive(Debug, Clone)]

View file

@ -0,0 +1,129 @@
use crate::clock::Clock;
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
use crate::types::{ElemId, Op};
use std::fmt::Debug;
use std::ops::Range;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct Attribute {
pos: usize,
seen: usize,
last_seen: Option<ElemId>,
baseline: Clock,
pub(crate) change_sets: Vec<ChangeSet>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ChangeSet {
clock: Clock,
next_add: Option<Range<usize>>,
next_del: Option<(usize, String)>,
pub add: Vec<Range<usize>>,
pub del: Vec<(usize, String)>,
}
impl From<Clock> for ChangeSet {
fn from(clock: Clock) -> Self {
ChangeSet {
clock,
next_add: None,
next_del: None,
add: Vec::new(),
del: Vec::new(),
}
}
}
impl ChangeSet {
fn cut_add(&mut self) {
if let Some(add) = self.next_add.take() {
self.add.push(add)
}
}
fn cut_del(&mut self) {
if let Some(del) = self.next_del.take() {
self.del.push(del)
}
}
}
impl Attribute {
pub(crate) fn new(baseline: Clock, change_sets: Vec<Clock>) -> Self {
Attribute {
pos: 0,
seen: 0,
last_seen: None,
baseline,
change_sets: change_sets.into_iter().map(|c| c.into()).collect(),
}
}
fn update_add(&mut self, element: &Op) {
let baseline = self.baseline.covers(&element.id);
for cs in &mut self.change_sets {
if !baseline && cs.clock.covers(&element.id) {
// is part of the change_set
if let Some(range) = &mut cs.next_add {
range.end += 1;
} else {
cs.next_add = Some(Range {
start: self.seen,
end: self.seen + 1,
});
}
} else {
cs.cut_add();
}
cs.cut_del();
}
}
// id is in baseline
// succ is not in baseline but is in cs
fn update_del(&mut self, element: &Op) {
if !self.baseline.covers(&element.id)
|| element.succ.iter().any(|id| self.baseline.covers(id))
{
return;
}
for cs in &mut self.change_sets {
if element.succ.iter().any(|id| cs.clock.covers(id)) {
// was deleted by change set
if let Some(s) = element.as_string() {
if let Some((_, span)) = &mut cs.next_del {
span.push_str(&s);
} else {
cs.next_del = Some((self.seen, s))
}
}
}
}
}
pub(crate) fn finish(&mut self) {
for cs in &mut self.change_sets {
cs.cut_add();
cs.cut_del();
}
}
}
impl<'a> TreeQuery<'a> for Attribute {
fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult {
if element.insert {
self.last_seen = None;
}
if self.last_seen.is_none() && element.visible() {
self.update_add(element);
self.seen += 1;
self.last_seen = element.elemid();
}
if !element.succ.is_empty() {
self.update_del(element);
}
self.pos += 1;
QueryResult::Next
}
}

View file

@ -0,0 +1,174 @@
use crate::clock::Clock;
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
use crate::types::{ElemId, Op};
use std::fmt::Debug;
use std::ops::Range;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct Attribute2 {
pos: usize,
seen: usize,
last_seen: Option<ElemId>,
baseline: Clock,
pub(crate) change_sets: Vec<ChangeSet2>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ChangeSet2 {
clock: Clock,
next_add: Option<CS2Add>,
next_del: Option<CS2Del>,
pub add: Vec<CS2Add>,
pub del: Vec<CS2Del>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct CS2Add {
pub actor: usize,
pub range: Range<usize>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct CS2Del {
pub pos: usize,
pub actor: usize,
pub span: String,
}
impl From<Clock> for ChangeSet2 {
fn from(clock: Clock) -> Self {
ChangeSet2 {
clock,
next_add: None,
next_del: None,
add: Vec::new(),
del: Vec::new(),
}
}
}
impl ChangeSet2 {
fn cut_add(&mut self) {
if let Some(add) = self.next_add.take() {
self.add.push(add)
}
}
fn cut_del(&mut self) {
if let Some(del) = self.next_del.take() {
self.del.push(del)
}
}
}
impl Attribute2 {
pub(crate) fn new(baseline: Clock, change_sets: Vec<Clock>) -> Self {
Attribute2 {
pos: 0,
seen: 0,
last_seen: None,
baseline,
change_sets: change_sets.into_iter().map(|c| c.into()).collect(),
}
}
fn update_add(&mut self, element: &Op) {
let baseline = self.baseline.covers(&element.id);
for cs in &mut self.change_sets {
if !baseline && cs.clock.covers(&element.id) {
// is part of the change_set
if let Some(CS2Add { range, actor }) = &mut cs.next_add {
if *actor == element.id.actor() {
range.end += 1;
} else {
cs.cut_add();
cs.next_add = Some(CS2Add {
actor: element.id.actor(),
range: Range {
start: self.seen,
end: self.seen + 1,
},
});
}
} else {
cs.next_add = Some(CS2Add {
actor: element.id.actor(),
range: Range {
start: self.seen,
end: self.seen + 1,
},
});
}
} else {
cs.cut_add();
}
cs.cut_del();
}
}
// id is in baseline
// succ is not in baseline but is in cs
fn update_del(&mut self, element: &Op) {
if !self.baseline.covers(&element.id)
|| element.succ.iter().any(|id| self.baseline.covers(id))
{
return;
}
for cs in &mut self.change_sets {
let succ: Vec<_> = element
.succ
.iter()
.filter(|id| cs.clock.covers(id))
.collect();
// was deleted by change set
if let Some(suc) = succ.get(0) {
if let Some(s) = element.as_string() {
if let Some(CS2Del { actor, span, .. }) = &mut cs.next_del {
if suc.actor() == *actor {
span.push_str(&s);
} else {
cs.cut_del();
cs.next_del = Some(CS2Del {
pos: self.seen,
actor: suc.actor(),
span: s,
})
}
} else {
cs.next_del = Some(CS2Del {
pos: self.seen,
actor: suc.actor(),
span: s,
})
}
}
}
}
}
pub(crate) fn finish(&mut self) {
for cs in &mut self.change_sets {
cs.cut_add();
cs.cut_del();
}
}
}
impl<'a> TreeQuery<'a> for Attribute2 {
fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult {
if element.insert {
self.last_seen = None;
}
if self.last_seen.is_none() && element.visible() {
self.update_add(element);
self.seen += 1;
self.last_seen = element.elemid();
}
if !element.succ.is_empty() {
self.update_del(element);
}
self.pos += 1;
QueryResult::Next
}
}

View file

@ -99,6 +99,10 @@ impl<'a> TreeQuery<'a> for InsertNth {
self.last_seen = None; self.last_seen = None;
self.last_insert = element.elemid(); self.last_insert = element.elemid();
} }
if self.valid.is_some() && element.valid_mark_anchor() {
self.last_valid_insert = Some(element.elemid_or_key());
self.valid = None;
}
if self.last_seen.is_none() && element.visible() { if self.last_seen.is_none() && element.visible() {
if self.seen >= self.target { if self.seen >= self.target {
return QueryResult::Finish; return QueryResult::Finish;

View file

@ -19,7 +19,7 @@ impl ListVals {
} }
impl<'a> TreeQuery<'a> for ListVals { impl<'a> TreeQuery<'a> for ListVals {
fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { fn query_node(&mut self, child: &'a OpTreeNode) -> QueryResult {
let start = 0; let start = 0;
for pos in start..child.len() { for pos in start..child.len() {
let op = child.get(pos).unwrap(); let op = child.get(pos).unwrap();

View file

@ -0,0 +1,78 @@
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
use crate::types::{ElemId, Op, OpId, OpType, ScalarValue};
use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct RawSpans {
pos: usize,
seen: usize,
last_seen: Option<ElemId>,
last_insert: Option<ElemId>,
changed: bool,
pub(crate) spans: Vec<RawSpan>,
}
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct RawSpan {
pub(crate) id: OpId,
pub(crate) start: usize,
pub(crate) end: usize,
pub(crate) name: String,
pub(crate) value: ScalarValue,
}
impl RawSpans {
pub(crate) fn new() -> Self {
RawSpans {
pos: 0,
seen: 0,
last_seen: None,
last_insert: None,
changed: false,
spans: Vec::new(),
}
}
}
impl<'a> TreeQuery<'a> for RawSpans {
fn query_element_with_metadata(&mut self, element: &Op, m: &OpSetMetadata) -> QueryResult {
// find location to insert
// mark or set
if element.succ.is_empty() {
if let OpType::MarkBegin(md) = &element.action {
let pos = self
.spans
.binary_search_by(|probe| m.lamport_cmp(probe.id, element.id))
.unwrap_err();
self.spans.insert(
pos,
RawSpan {
id: element.id,
start: self.seen,
end: 0,
name: md.name.clone(),
value: md.value.clone(),
},
);
}
if let OpType::MarkEnd(_) = &element.action {
for s in self.spans.iter_mut() {
if s.id == element.id.prev() {
s.end = self.seen;
break;
}
}
}
}
if element.insert {
self.last_seen = None;
self.last_insert = element.elemid();
}
if self.last_seen.is_none() && element.visible() {
self.seen += 1;
self.last_seen = element.elemid();
}
self.pos += 1;
QueryResult::Next
}
}

View file

@ -0,0 +1,109 @@
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
use crate::types::{ElemId, Op, OpType, ScalarValue};
use std::borrow::Cow;
use std::collections::HashMap;
use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct Spans<'a> {
pos: usize,
seen: usize,
last_seen: Option<ElemId>,
last_insert: Option<ElemId>,
seen_at_this_mark: Option<ElemId>,
seen_at_last_mark: Option<ElemId>,
ops: Vec<&'a Op>,
marks: HashMap<String, &'a ScalarValue>,
changed: bool,
pub(crate) spans: Vec<Span<'a>>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Span<'a> {
pub pos: usize,
pub marks: Vec<(String, Cow<'a, ScalarValue>)>,
}
impl<'a> Spans<'a> {
pub(crate) fn new() -> Self {
Spans {
pos: 0,
seen: 0,
last_seen: None,
last_insert: None,
seen_at_last_mark: None,
seen_at_this_mark: None,
changed: false,
ops: Vec::new(),
marks: HashMap::new(),
spans: Vec::new(),
}
}
pub(crate) fn check_marks(&mut self) {
let mut new_marks = HashMap::new();
for op in &self.ops {
if let OpType::MarkBegin(m) = &op.action {
new_marks.insert(m.name.clone(), &m.value);
}
}
if new_marks != self.marks {
self.changed = true;
self.marks = new_marks;
}
if self.changed
&& (self.seen_at_last_mark != self.seen_at_this_mark
|| self.seen_at_last_mark.is_none() && self.seen_at_this_mark.is_none())
{
self.changed = false;
self.seen_at_last_mark = self.seen_at_this_mark;
let mut marks: Vec<_> = self
.marks
.iter()
.map(|(key, val)| (key.clone(), Cow::Borrowed(*val)))
.collect();
marks.sort_by(|(k1, _), (k2, _)| k1.cmp(k2));
self.spans.push(Span {
pos: self.seen,
marks,
});
}
}
}
impl<'a> TreeQuery<'a> for Spans<'a> {
/*
fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult {
unimplemented!()
}
*/
fn query_element_with_metadata(&mut self, element: &'a Op, m: &OpSetMetadata) -> QueryResult {
// find location to insert
// mark or set
if element.succ.is_empty() {
if let OpType::MarkBegin(_) = &element.action {
let pos = self
.ops
.binary_search_by(|probe| m.lamport_cmp(probe.id, element.id))
.unwrap_err();
self.ops.insert(pos, element);
}
if let OpType::MarkEnd(_) = &element.action {
self.ops.retain(|op| op.id != element.id.prev());
}
}
if element.insert {
self.last_seen = None;
self.last_insert = element.elemid();
}
if self.last_seen.is_none() && element.visible() {
self.check_marks();
self.seen += 1;
self.last_seen = element.elemid();
self.seen_at_this_mark = element.elemid();
}
self.pos += 1;
QueryResult::Next
}
}

View file

@ -1,3 +1,6 @@
use crate::{
decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, ChangeHash,
};
use itertools::Itertools; use itertools::Itertools;
use std::{ use std::{
borrow::Cow, borrow::Cow,
@ -6,10 +9,7 @@ use std::{
io::Write, io::Write,
}; };
use crate::{ use crate::{types::HASH_SIZE, ApplyOptions, OpObserver};
decoding, decoding::Decoder, encoding::Encodable, types::HASH_SIZE, ApplyOptions, Automerge,
AutomergeError, Change, ChangeHash, OpObserver,
};
mod bloom; mod bloom;
mod state; mod state;

View file

@ -171,6 +171,73 @@ impl TransactionInner {
self.operations.push((obj, prop, op)); self.operations.push((obj, prop, op));
} }
#[allow(clippy::too_many_arguments)]
pub(crate) fn mark<O: AsRef<ExId>>(
&mut self,
doc: &mut Automerge,
obj: O,
start: usize,
expand_start: bool,
end: usize,
expand_end: bool,
mark: &str,
value: ScalarValue,
) -> Result<(), AutomergeError> {
let obj = doc.exid_to_obj(obj.as_ref())?;
self.do_insert(
doc,
obj,
start,
OpType::mark(mark.into(), expand_start, value),
)?;
self.do_insert(doc, obj, end, OpType::MarkEnd(expand_end))?;
Ok(())
}
pub(crate) fn unmark<O: AsRef<ExId>>(
&mut self,
doc: &mut Automerge,
obj: O,
mark: O,
) -> Result<(), AutomergeError> {
let obj = doc.exid_to_obj(obj.as_ref())?;
let markid = doc.exid_to_obj_tmp_unchecked(mark.as_ref())?.0;
let op1 = Op {
id: self.next_id(),
action: OpType::Delete,
key: markid.into(),
succ: Default::default(),
pred: doc.ops.m.sorted_opids(vec![markid].into_iter()),
insert: false,
};
let q1 = doc.ops.search(&obj, query::SeekOp::new(&op1));
doc.ops.add_succ(&obj, q1.succ.into_iter(), &op1);
//for i in q1.succ {
// doc.ops.replace(&obj, i, |old_op| old_op.add_succ(&op1));
//}
self.operations.push((obj, Prop::Map("".into()), op1));
let markid = markid.next();
let op2 = Op {
id: self.next_id(),
action: OpType::Delete,
key: markid.into(),
succ: Default::default(),
pred: doc.ops.m.sorted_opids(vec![markid].into_iter()),
insert: false,
};
let q2 = doc.ops.search(&obj, query::SeekOp::new(&op2));
doc.ops.add_succ(&obj, q2.succ.into_iter(), &op2);
//for i in q2.succ {
// doc.ops.replace(&obj, i, |old_op| old_op.add_succ(&op2));
//}
self.operations.push((obj, Prop::Map("".into()), op2));
Ok(())
}
pub(crate) fn insert<V: Into<ScalarValue>>( pub(crate) fn insert<V: Into<ScalarValue>>(
&mut self, &mut self,
doc: &mut Automerge, doc: &mut Automerge,

View file

@ -1,12 +1,12 @@
use std::ops::RangeBounds; use std::ops::RangeBounds;
use super::{CommitOptions, Transactable, TransactionInner};
use crate::exid::ExId; use crate::exid::ExId;
use crate::query;
use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValue, Value, Values}; use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValue, Value, Values};
use crate::{AutomergeError, Keys}; use crate::{AutomergeError, Keys};
use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt}; use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt};
use super::{CommitOptions, Transactable, TransactionInner};
/// A transaction on a document. /// A transaction on a document.
/// Transactions group operations into a single change so that no other operations can happen /// Transactions group operations into a single change so that no other operations can happen
/// in-between. /// in-between.
@ -129,6 +129,33 @@ impl<'a> Transactable for Transaction<'a> {
.insert(self.doc, obj.as_ref(), index, value) .insert(self.doc, obj.as_ref(), index, value)
} }
#[allow(clippy::too_many_arguments)]
fn mark<O: AsRef<ExId>>(
&mut self,
obj: O,
start: usize,
expand_start: bool,
end: usize,
expand_end: bool,
mark: &str,
value: ScalarValue,
) -> Result<(), AutomergeError> {
self.inner.as_mut().unwrap().mark(
self.doc,
obj,
start,
expand_start,
end,
expand_end,
mark,
value,
)
}
fn unmark<O: AsRef<ExId>>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError> {
self.inner.as_mut().unwrap().unmark(self.doc, obj, mark)
}
fn insert_object<O: AsRef<ExId>>( fn insert_object<O: AsRef<ExId>>(
&mut self, &mut self,
obj: O, obj: O,
@ -253,6 +280,32 @@ impl<'a> Transactable for Transaction<'a> {
self.doc.text_at(obj, heads) self.doc.text_at(obj, heads)
} }
fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::Span<'_>>, AutomergeError> {
self.doc.spans(obj)
}
fn raw_spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::SpanInfo>, AutomergeError> {
self.doc.raw_spans(obj)
}
fn attribute<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet>, AutomergeError> {
self.doc.attribute(obj, baseline, change_sets)
}
fn attribute2<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet2>, AutomergeError> {
self.doc.attribute2(obj, baseline, change_sets)
}
fn get<O: AsRef<ExId>, P: Into<Prop>>( fn get<O: AsRef<ExId>, P: Into<Prop>>(
&self, &self,
obj: O, obj: O,

View file

@ -1,6 +1,7 @@
use std::ops::RangeBounds; use std::ops::RangeBounds;
use crate::exid::ExId; use crate::exid::ExId;
use crate::query;
use crate::{ use crate::{
AutomergeError, ChangeHash, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, AutomergeError, ChangeHash, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt,
ObjType, Parents, Prop, ScalarValue, Value, Values, ObjType, Parents, Prop, ScalarValue, Value, Values,
@ -61,6 +62,21 @@ pub trait Transactable {
object: ObjType, object: ObjType,
) -> Result<ExId, AutomergeError>; ) -> Result<ExId, AutomergeError>;
/// Set a mark within a range on a list
#[allow(clippy::too_many_arguments)]
fn mark<O: AsRef<ExId>>(
&mut self,
obj: O,
start: usize,
expand_start: bool,
end: usize,
expand_end: bool,
mark: &str,
value: ScalarValue,
) -> Result<(), AutomergeError>;
fn unmark<O: AsRef<ExId>>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError>;
/// Increment the counter at the prop in the object by `value`. /// Increment the counter at the prop in the object by `value`.
fn increment<O: AsRef<ExId>, P: Into<Prop>>( fn increment<O: AsRef<ExId>, P: Into<Prop>>(
&mut self, &mut self,
@ -151,6 +167,28 @@ pub trait Transactable {
heads: &[ChangeHash], heads: &[ChangeHash],
) -> Result<String, AutomergeError>; ) -> Result<String, AutomergeError>;
/// test spans api for mark/span experiment
fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::Span<'_>>, AutomergeError>;
/// test raw_spans api for mark/span experiment
fn raw_spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::SpanInfo>, AutomergeError>;
/// test attribute api for mark/span experiment
fn attribute<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet>, AutomergeError>;
/// test attribute api for mark/span experiment
fn attribute2<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet2>, AutomergeError>;
/// Get the value at this prop in the object. /// Get the value at this prop in the object.
fn get<O: AsRef<ExId>, P: Into<Prop>>( fn get<O: AsRef<ExId>, P: Into<Prop>>(
&self, &self,

View file

@ -182,9 +182,29 @@ impl fmt::Display for ObjType {
#[derive(PartialEq, Debug, Clone)] #[derive(PartialEq, Debug, Clone)]
pub enum OpType { pub enum OpType {
Make(ObjType), Make(ObjType),
/// Perform a deletion, expanding the operation to cover `n` deletions (multiOp).
Delete, Delete,
Increment(i64), Increment(i64),
Put(ScalarValue), Put(ScalarValue),
MarkBegin(MarkData),
MarkEnd(bool),
}
impl OpType {
pub(crate) fn mark(name: String, expand: bool, value: ScalarValue) -> Self {
OpType::MarkBegin(MarkData {
name,
expand,
value,
})
}
}
#[derive(PartialEq, Debug, Clone)]
pub struct MarkData {
pub name: String,
pub value: ScalarValue,
pub expand: bool,
} }
impl From<ObjType> for OpType { impl From<ObjType> for OpType {
@ -219,6 +239,14 @@ impl OpId {
pub(crate) fn actor(&self) -> usize { pub(crate) fn actor(&self) -> usize {
self.1 self.1
} }
#[inline]
pub(crate) fn prev(&self) -> OpId {
OpId(self.0 - 1, self.1)
}
#[inline]
pub(crate) fn next(&self) -> OpId {
OpId(self.0 + 1, self.1)
}
} }
impl Exportable for ObjId { impl Exportable for ObjId {
@ -419,7 +447,7 @@ impl Op {
} }
pub(crate) fn visible(&self) -> bool { pub(crate) fn visible(&self) -> bool {
if self.is_inc() { if self.is_inc() || self.is_mark() {
false false
} else if self.is_counter() { } else if self.is_counter() {
self.succ.len() <= self.incs() self.succ.len() <= self.incs()
@ -444,6 +472,18 @@ impl Op {
matches!(&self.action, OpType::Increment(_)) matches!(&self.action, OpType::Increment(_))
} }
pub(crate) fn valid_mark_anchor(&self) -> bool {
self.succ.is_empty()
&& matches!(
&self.action,
OpType::MarkBegin(MarkData { expand: true, .. }) | OpType::MarkEnd(false)
)
}
pub(crate) fn is_mark(&self) -> bool {
matches!(&self.action, OpType::MarkBegin(_) | OpType::MarkEnd(_))
}
pub(crate) fn is_counter(&self) -> bool { pub(crate) fn is_counter(&self) -> bool {
matches!(&self.action, OpType::Put(ScalarValue::Counter(_))) matches!(&self.action, OpType::Put(ScalarValue::Counter(_)))
} }
@ -472,6 +512,13 @@ impl Op {
} }
} }
pub(crate) fn as_string(&self) -> Option<String> {
match &self.action {
OpType::Put(scalar) => scalar.as_string(),
_ => None,
}
}
pub(crate) fn get_increment_value(&self) -> Option<i64> { pub(crate) fn get_increment_value(&self) -> Option<i64> {
if let OpType::Increment(i) = self.action { if let OpType::Increment(i) = self.action {
Some(i) Some(i)
@ -484,6 +531,8 @@ impl Op {
match &self.action { match &self.action {
OpType::Make(obj_type) => Value::Object(*obj_type), OpType::Make(obj_type) => Value::Object(*obj_type),
OpType::Put(scalar) => Value::Scalar(Cow::Borrowed(scalar)), OpType::Put(scalar) => Value::Scalar(Cow::Borrowed(scalar)),
OpType::MarkBegin(mark) => Value::Scalar(Cow::Owned(format!("markBegin[{}]={}",mark.name, mark.value).into())),
OpType::MarkEnd(_) => Value::Scalar(Cow::Owned("markEnd".into())),
_ => panic!("cant convert op into a value - {:?}", self), _ => panic!("cant convert op into a value - {:?}", self),
} }
} }
@ -492,6 +541,8 @@ impl Op {
match &self.action { match &self.action {
OpType::Make(obj_type) => Value::Object(*obj_type), OpType::Make(obj_type) => Value::Object(*obj_type),
OpType::Put(scalar) => Value::Scalar(Cow::Owned(scalar.clone())), OpType::Put(scalar) => Value::Scalar(Cow::Owned(scalar.clone())),
OpType::MarkBegin(mark) => Value::Scalar(Cow::Owned(format!("markBegin[{}]={}",mark.name, mark.value).into())),
OpType::MarkEnd(_) => Value::Scalar(Cow::Owned("markEnd".into())),
_ => panic!("cant convert op into a value - {:?}", self), _ => panic!("cant convert op into a value - {:?}", self),
} }
} }
@ -502,6 +553,8 @@ impl Op {
OpType::Put(value) if self.insert => format!("i:{}", value), OpType::Put(value) if self.insert => format!("i:{}", value),
OpType::Put(value) => format!("s:{}", value), OpType::Put(value) => format!("s:{}", value),
OpType::Make(obj) => format!("make{}", obj), OpType::Make(obj) => format!("make{}", obj),
OpType::MarkBegin(m) => format!("mark{}={}", m.name, m.value),
OpType::MarkEnd(_) => "unmark".into(),
OpType::Increment(val) => format!("inc:{}", val), OpType::Increment(val) => format!("inc:{}", val),
OpType::Delete => "del".to_string(), OpType::Delete => "del".to_string(),
} }

View file

@ -14,6 +14,13 @@ pub enum Value<'a> {
} }
impl<'a> Value<'a> { impl<'a> Value<'a> {
pub fn as_string(&self) -> Option<String> {
match self {
Value::Scalar(val) => val.as_string(),
_ => None,
}
}
pub fn map() -> Value<'a> { pub fn map() -> Value<'a> {
Value::Object(ObjType::Map) Value::Object(ObjType::Map)
} }
@ -629,6 +636,13 @@ impl ScalarValue {
} }
} }
pub fn as_string(&self) -> Option<String> {
match self {
ScalarValue::Str(s) => Some(s.to_string()),
_ => None,
}
}
pub fn counter(n: i64) -> ScalarValue { pub fn counter(n: i64) -> ScalarValue {
ScalarValue::Counter(n.into()) ScalarValue::Counter(n.into())
} }

View file

@ -238,6 +238,8 @@ impl OpTableRow {
crate::OpType::Put(v) => format!("set {}", v), crate::OpType::Put(v) => format!("set {}", v),
crate::OpType::Make(obj) => format!("make {}", obj), crate::OpType::Make(obj) => format!("make {}", obj),
crate::OpType::Increment(v) => format!("inc {}", v), crate::OpType::Increment(v) => format!("inc {}", v),
crate::OpType::MarkBegin(v) => format!("mark {}={}", v.name, v.value),
crate::OpType::MarkEnd(v) => format!("/mark {}", v),
}; };
let prop = match op.key { let prop = match op.key {
crate::types::Key::Map(k) => metadata.props[k].clone(), crate::types::Key::Map(k) => metadata.props[k].clone(),

View file

@ -0,0 +1,39 @@
use automerge::transaction::Transactable;
use automerge::{AutoCommit, AutomergeError, ROOT};
/*
mod helpers;
use helpers::{
pretty_print, realize, realize_obj,
RealizedObject,
};
*/
#[test]
fn simple_attribute_text() -> Result<(), AutomergeError> {
let mut doc = AutoCommit::new();
let note = doc.put_object(&ROOT, "note", automerge::ObjType::Text)?;
doc.splice_text(&note, 0, 0, "hello little world")?;
let baseline = doc.get_heads();
assert!(doc.text(&note).unwrap() == "hello little world");
let mut doc2 = doc.fork();
doc2.splice_text(&note, 5, 7, " big")?;
let h2 = doc2.get_heads();
assert!(doc2.text(&note)? == "hello big world");
let mut doc3 = doc.fork();
doc3.splice_text(&note, 0, 0, "Well, ")?;
let h3 = doc3.get_heads();
assert!(doc3.text(&note)? == "Well, hello little world");
doc.merge(&mut doc2)?;
doc.merge(&mut doc3)?;
let text = doc.text(&note)?;
assert!(text == "Well, hello big world");
let cs = vec![h2, h3];
let attribute = doc.attribute(&note, &baseline, &cs)?;
assert!(&text[attribute[0].add[0].clone()] == " big");
assert!(attribute[0].del[0] == (15, " little".to_owned()));
//println!("{:?} == {:?}", attribute[0].del[0] , (15, " little".to_owned()));
assert!(&text[attribute[1].add[0].clone()] == "Well, ");
//println!("- ------- attribute = {:?}", attribute);
Ok(())
}

View file

@ -7,9 +7,9 @@ yarn --cwd $WASM_PROJECT install;
yarn --cwd $WASM_PROJECT build; yarn --cwd $WASM_PROJECT build;
# If the dependencies are already installed we delete automerge-wasm. This makes # If the dependencies are already installed we delete automerge-wasm. This makes
# this script usable for iterative development. # this script usable for iterative development.
if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then #if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then
rm -rf $JS_PROJECT/node_modules/automerge-wasm # rm -rf $JS_PROJECT/node_modules/automerge-wasm
fi #fi
# --check-files forces yarn to check if the local dep has changed # --check-files forces yarn to check if the local dep has changed
yarn --cwd $JS_PROJECT install --check-files; yarn --cwd $JS_PROJECT install --check-files;
yarn --cwd $JS_PROJECT test; yarn --cwd $JS_PROJECT test;