Compare commits

...

33 commits

Author SHA1 Message Date
Alex Good
30b4f1d02e
Fix lifetimes on list_range_at 2023-03-11 00:47:19 +00:00
Orion Henry
c3ceda48e9 first attempt at an api 2023-03-10 18:23:00 -06:00
Orion Henry
4990ca9223 first draft of diff() api 2023-03-10 14:32:21 -06:00
Orion Henry
271d5cbead cleanup based on alex's comments 2023-03-10 12:57:13 -06:00
Orion Henry
aa0fdc7d2d Merge remote-tracking branch 'origin/main' into marks_port 2023-03-10 12:06:58 -06:00
Orion Henry
c38d9e883e add unmark() and marks() to unstable api 2023-03-01 10:54:28 -06:00
Orion Henry
2f4cf7b328 enable load callback test 2023-02-28 18:54:03 -06:00
Orion Henry
1c28e9656a add get_marks_at() 2023-02-28 17:17:14 -06:00
Orion Henry
8e818910d1 Merge remote-tracking branch 'origin/main' into marks_port 2023-02-28 14:47:44 -06:00
Orion Henry
ba491c6f72 move wasm observer into automerge 2023-02-28 13:25:07 -06:00
Orion Henry
1a539a3d79 add unmark / remove spans/raw_spans/attr 2023-02-27 12:33:55 -06:00
Orion Henry
af9b006bb0 marks name->key, range->start,end, patch callback propogates expceitons and has heads 2023-02-24 17:57:45 -06:00
Orion Henry
01c721e640 remove a fixme 2023-02-24 14:10:29 -06:00
Orion Henry
3beccfb5ee track mark data by reference 2023-02-24 13:38:10 -06:00
Orion Henry
9a6840392e let insert take a conflict flag 2023-02-24 12:26:11 -06:00
Orion Henry
d7d2c29dc7
Merge branch 'main' into marks_port 2023-02-24 09:59:19 -08:00
Orion Henry
2c6e54390b rewrote observe_current_state - added get_marks() api call - cleaned up js interface 2023-02-24 11:36:14 -06:00
Orion Henry
d7f93c5aca get mark patches working on load 2023-02-23 17:51:45 -06:00
Orion Henry
e1d81e01fc mark patch callbacks and js mark api 2023-02-16 19:35:39 -06:00
Alex Good
c6a32d8368 Correct logic when skip = B and fix formatting
A few tests were failing which exposed the fact that if skip is `B` (the
out factor of the OpTree) then we set `skip = None` and this causes us
to attempt to return `Skip` in a non root node. I ported the failing
test from JS to Rust and fixed the problem.

I also fixed the formatting issues.
2023-02-14 11:29:55 -06:00
Orion Henry
a9612371e0 rework how skip works to push the logic into node 2023-02-14 11:29:55 -06:00
Conrad Irwin
e2bb0eb6b9 Use our leb128 parser for values
This ensures that values in automerge documents are encoded correctly,
and that no extra data is smuggled in any LEB fields.
2023-02-14 11:29:55 -06:00
Alex Good
02e8ae2c70 Remove nightly from CI 2023-02-14 11:29:55 -06:00
Orion Henry
a006a32e3f get tests passing + unmark 2023-02-13 10:56:05 -06:00
Alex Good
a02f70f2b8
Use new columns instead of existing ones
The previous approach of using the key and insert columns of existing
ops was leading to quite confusing code. There's no real cost to
introducing new columns so I've switched the code to do that instead.

Introduce an `expand` and a `mark_name` column. `expand` is a boolean
column and `mark_name` is a RLE encoded string column. Neither of these
columns are encoded if they are empty.

Also move the `MarkData::name` property to use strings interned in
`OpSetMetadata::props` rather than representing the string directly on
the basis that we probably will have a lot of repeated mark names and
we do a bunch of equality checks on them while searching so this will
probably speed things up a bit.

Introduced a new `MaybeBooleanEncoder` (and associated `MaybeBooleanDecoder` and
`MaybeBooleanRange`) types to represent a boolean column which is
entirely skipped if all it contains are `false` values. This allows us
to omit encoding the `expand` column for groups of ops which only ever
set it to `false` which in turn makes us backwards compatible when not
using marks.
2023-02-09 15:09:52 +00:00
Alex Good
f281213a47
tests passing 2023-02-09 10:55:19 +00:00
Alex Good
a44ceacb1c
everything compiles 2023-02-09 10:26:26 +00:00
Orion Henry
290c9e6872 attempt to finish - two issues outstanding 2023-02-08 14:59:41 -06:00
Alex Good
9a7dba09a4
wip 2023-02-08 18:20:38 +00:00
Alex Good
2345176526
DocOp 2023-02-08 17:53:58 +00:00
Orion Henry
9bc424d776 first test passing - needs serialization 2023-02-08 10:43:33 -06:00
Orion Henry
61f9604d0c merge with main 2023-02-06 11:34:13 -06:00
Orion Henry
d745685f5e wip 2023-01-20 13:38:53 -06:00
66 changed files with 6151 additions and 1921 deletions

View file

@ -517,6 +517,30 @@ export function loadIncremental<T>(
return progressDocument(doc, heads, opts.patchCallback || state.patchCallback)
}
/**
* Create binary save data to be appended to a save file or fed into {@link loadIncremental}
*
* @typeParam T - The type of the value which is contained in the document.
* Note that no validation is done to make sure this type is in
* fact the type of the contained value so be a bit careful
*
* This function is useful for incrementally saving state. The data can be appended to a
* automerge save file, or passed to a document replicating its state.
*
*/
export function saveIncremental<T>(doc: Doc<T>): Uint8Array {
const state = _state(doc)
if (state.heads) {
throw new RangeError(
"Attempting to change an out of date document - set at: " + _trace(doc)
)
}
if (_is_proxy(doc)) {
throw new RangeError("Calls to Automerge.change cannot be nested")
}
return state.handle.saveIncremental()
}
/**
* Export the contents of a document to a compressed format
*

View file

@ -4,8 +4,8 @@ export { Counter } from "./counter"
export { Int, Uint, Float64 } from "./numbers"
import { Counter } from "./counter"
import type { Patch } from "@automerge/automerge-wasm"
export type { Patch } from "@automerge/automerge-wasm"
import type { Patch, PatchInfo } from "@automerge/automerge-wasm"
export type { Patch, Mark } from "@automerge/automerge-wasm"
export type AutomergeValue =
| ScalarValue
@ -36,11 +36,9 @@ export type Doc<T> = { readonly [P in keyof T]: T[P] }
* Callback which is called by various methods in this library to notify the
* user of what changes have been made.
* @param patch - A description of the changes made
* @param before - The document before the change was made
* @param after - The document after the change was made
* @param info - An object that has the "before" and "after" document state, and the "from" and "to" heads
*/
export type PatchCallback<T> = (
patches: Array<Patch>,
before: Doc<T>,
after: Doc<T>
info: PatchInfo<T>
) => void

View file

@ -44,11 +44,12 @@ export {
Float64,
type Patch,
type PatchCallback,
type Mark,
type AutomergeValue,
type ScalarValue,
} from "./unstable_types"
import type { PatchCallback } from "./stable"
import type { ScalarValue, Mark, PatchCallback } from "./stable"
import { type UnstableConflicts as Conflicts } from "./conflicts"
import { unstableConflictAt } from "./conflicts"
@ -197,7 +198,11 @@ export function load<T>(
): Doc<T> {
const opts = importOpts(_opts)
opts.enableTextV2 = true
return stable.load(data, opts)
if (opts.patchCallback) {
return stable.loadIncremental(stable.init(opts), data)
} else {
return stable.load(data, opts)
}
}
function importOpts<T>(
@ -233,6 +238,66 @@ export function splice<T>(
}
}
export function mark<T>(
doc: Doc<T>,
prop: stable.Prop,
name: string,
range: string,
value: ScalarValue
) {
if (!_is_proxy(doc)) {
throw new RangeError("object cannot be modified outside of a change block")
}
const state = _state(doc, false)
const objectId = _obj(doc)
if (!objectId) {
throw new RangeError("invalid object for mark")
}
const obj = `${objectId}/${prop}`
try {
return state.handle.mark(obj, range, name, value)
} catch (e) {
throw new RangeError(`Cannot mark: ${e}`)
}
}
export function unmark<T>(
doc: Doc<T>,
prop: stable.Prop,
name: string,
start: number,
end: number
) {
if (!_is_proxy(doc)) {
throw new RangeError("object cannot be modified outside of a change block")
}
const state = _state(doc, false)
const objectId = _obj(doc)
if (!objectId) {
throw new RangeError("invalid object for unmark")
}
const obj = `${objectId}/${prop}`
try {
return state.handle.unmark(obj, name, start, end)
} catch (e) {
throw new RangeError(`Cannot unmark: ${e}`)
}
}
export function marks<T>(doc: Doc<T>, prop: stable.Prop): Mark[] {
const state = _state(doc, false)
const objectId = _obj(doc)
if (!objectId) {
throw new RangeError("invalid object for unmark")
}
const obj = `${objectId}/${prop}`
try {
return state.handle.marks(obj)
} catch (e) {
throw new RangeError(`Cannot call marks(): ${e}`)
}
}
/**
* Get the conflicts associated with a property
*

View file

@ -8,6 +8,7 @@ export {
Float64,
type Patch,
type PatchCallback,
type Mark,
} from "./types"
import { RawString } from "./raw_string"

View file

@ -340,8 +340,7 @@ describe("Automerge", () => {
const s2 = Automerge.change(
s1,
{
patchCallback: (patches, before, after) =>
callbacks.push({ patches, before, after }),
patchCallback: (patches, info) => callbacks.push({ patches, info }),
},
doc => {
doc.birds = ["Goldfinch"]
@ -363,8 +362,8 @@ describe("Automerge", () => {
path: ["birds", 0, 0],
value: "Goldfinch",
})
assert.strictEqual(callbacks[0].before, s1)
assert.strictEqual(callbacks[0].after, s2)
assert.strictEqual(callbacks[0].info.before, s1)
assert.strictEqual(callbacks[0].info.after, s2)
})
it("should call a patchCallback set up on document initialisation", () => {
@ -374,8 +373,7 @@ describe("Automerge", () => {
after: Automerge.Doc<any>
}> = []
s1 = Automerge.init({
patchCallback: (patches, before, after) =>
callbacks.push({ patches, before, after }),
patchCallback: (patches, info) => callbacks.push({ patches, info }),
})
const s2 = Automerge.change(s1, doc => (doc.bird = "Goldfinch"))
assert.strictEqual(callbacks.length, 1)
@ -389,8 +387,8 @@ describe("Automerge", () => {
path: ["bird", 0],
value: "Goldfinch",
})
assert.strictEqual(callbacks[0].before, s1)
assert.strictEqual(callbacks[0].after, s2)
assert.strictEqual(callbacks[0].info.before, s1)
assert.strictEqual(callbacks[0].info.after, s2)
})
})
@ -1570,7 +1568,7 @@ describe("Automerge", () => {
assert.deepStrictEqual(doc, { list: expected })
})
it.skip("should call patchCallback if supplied to load", () => {
it("should call patchCallback if supplied to load", () => {
const s1 = Automerge.change(
Automerge.init<any>(),
doc => (doc.birds = ["Goldfinch"])
@ -1579,40 +1577,19 @@ describe("Automerge", () => {
const callbacks: Array<any> = [],
actor = Automerge.getActorId(s1)
const reloaded = Automerge.load<any>(Automerge.save(s2), {
patchCallback(patch, before, after) {
callbacks.push({ patch, before, after })
patchCallback(patches, opts) {
callbacks.push({ patches, opts })
},
})
assert.strictEqual(callbacks.length, 1)
assert.deepStrictEqual(callbacks[0].patch, {
maxOp: 3,
deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash],
clock: { [actor]: 2 },
pendingChanges: 0,
diffs: {
objectId: "_root",
type: "map",
props: {
birds: {
[`1@${actor}`]: {
objectId: `1@${actor}`,
type: "list",
edits: [
{
action: "multi-insert",
index: 0,
elemId: `2@${actor}`,
values: ["Goldfinch", "Chaffinch"],
},
],
},
},
},
},
})
assert.deepStrictEqual(callbacks[0].before, {})
assert.strictEqual(callbacks[0].after, reloaded)
assert.strictEqual(callbacks[0].local, false)
assert.deepStrictEqual(callbacks[0].patches, [
{ action: "put", path: ["birds"], value: [] },
{ action: "insert", path: ["birds", 0], values: ["", ""] },
{ action: "splice", path: ["birds", 0, 0], value: "Goldfinch" },
{ action: "splice", path: ["birds", 1, 0], value: "Chaffinch" },
])
assert.deepStrictEqual(callbacks[0].opts.before, {})
assert.strictEqual(callbacks[0].opts.after, reloaded)
})
})
@ -1812,8 +1789,8 @@ describe("Automerge", () => {
before,
Automerge.getAllChanges(s1),
{
patchCallback(patch, before, after) {
callbacks.push({ patch, before, after })
patchCallback(patch, info) {
callbacks.push({ patch, info })
},
}
)
@ -1833,8 +1810,8 @@ describe("Automerge", () => {
path: ["birds", 0, 0],
value: "Goldfinch",
})
assert.strictEqual(callbacks[0].before, before)
assert.strictEqual(callbacks[0].after, after)
assert.strictEqual(callbacks[0].info.before, before)
assert.strictEqual(callbacks[0].info.after, after)
})
it("should merge multiple applied changes into one patch", () => {

63
javascript/test/marks.ts Normal file
View file

@ -0,0 +1,63 @@
import * as assert from "assert"
import { unstable as Automerge } from "../src"
import * as WASM from "@automerge/automerge-wasm"
describe("Automerge", () => {
describe("marks", () => {
it("should allow marks that can be seen in patches", () => {
let callbacks = []
let doc1 = Automerge.init({
patchCallback: (patches, info) => callbacks.push(patches),
})
doc1 = Automerge.change(doc1, d => {
d.x = "the quick fox jumps over the lazy dog"
})
doc1 = Automerge.change(doc1, d => {
Automerge.mark(d, "x", "font-weight", "[5..10]", "bold")
})
doc1 = Automerge.change(doc1, d => {
Automerge.unmark(d, "x", "font-weight", 7, 9)
})
assert.deepStrictEqual(callbacks[1], [
{
action: "mark",
path: ["x"],
marks: [{ name: "font-weight", start: 5, end: 10, value: "bold" }],
},
])
assert.deepStrictEqual(callbacks[2], [
{
action: "unmark",
path: ["x"],
name: "font-weight",
start: 7,
end: 9,
},
])
callbacks = []
let doc2 = Automerge.init({
patchCallback: (patches, info) => callbacks.push(patches),
})
doc2 = Automerge.loadIncremental(doc2, Automerge.save(doc1))
assert.deepStrictEqual(callbacks[0][2], {
action: "mark",
path: ["x"],
marks: [
{ name: "font-weight", start: 5, end: 7, value: "bold" },
{ name: "font-weight", start: 9, end: 10, value: "bold" },
],
})
assert.deepStrictEqual(Automerge.marks(doc2, "x"), [
{ name: "font-weight", value: "bold", start: 5, end: 7 },
{ name: "font-weight", value: "bold", start: 9, end: 10 },
])
})
})
})

View file

@ -94,7 +94,7 @@ export type Op = {
pred: string[],
}
export type Patch = PutPatch | DelPatch | SpliceTextPatch | IncPatch | InsertPatch;
export type Patch = PutPatch | DelPatch | SpliceTextPatch | IncPatch | InsertPatch | MarkPatch | UnmarkPatch;
export type PutPatch = {
action: 'put'
@ -103,6 +103,20 @@ export type PutPatch = {
conflict: boolean
}
export type MarkPatch = {
action: 'mark'
path: Prop[],
marks: Mark[]
}
export type UnmarkPatch = {
action: 'unmark'
path: Prop[],
name: string,
start: number,
end: number
}
export type IncPatch = {
action: 'inc'
path: Prop[],
@ -127,6 +141,13 @@ export type InsertPatch = {
values: Value[],
}
export type Mark = {
name: string,
value: Value,
start: number,
end: number,
}
export function encodeChange(change: ChangeToEncode): Change;
export function create(text_v2: boolean, actor?: Actor): Automerge;
export function load(data: Uint8Array, text_v2: boolean, actor?: Actor): Automerge;
@ -165,6 +186,11 @@ export class Automerge {
increment(obj: ObjID, prop: Prop, value: number): void;
delete(obj: ObjID, prop: Prop): void;
// marks
mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void;
unmark(obj: ObjID, name: string, start: number, end: number): void;
marks(obj: ObjID, heads?: Heads): Mark[];
// returns a single value - if there is a conflict return the winner
get(obj: ObjID, prop: Prop, heads?: Heads): Value | undefined;
getWithType(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null;
@ -217,7 +243,14 @@ export class Automerge {
dump(): void;
// experimental api can go here
applyPatches<Doc>(obj: Doc, meta?: unknown, callback?: (patch: Array<Patch>, before: Doc, after: Doc) => void): Doc;
applyPatches<Doc>(obj: Doc, meta?: unknown, callback?: (patch: Array<Patch>, info: PatchInfo<Doc>) => void): Doc;
}
export interface PatchInfo<T> {
before: T,
after: T,
from: Heads,
to: Heads,
}
export interface JsSyncState {
@ -236,3 +269,4 @@ export class SyncState {
sentHashes: Heads;
readonly sharedHeads: Heads;
}

View file

@ -30,6 +30,7 @@
"scripts": {
"lint": "eslint test/*.ts index.d.ts",
"debug": "cross-env PROFILE=dev TARGET_DIR=debug yarn buildall",
"dev": "cross-env PROFILE=dev TARGET_DIR=debug FEATURES='' TARGET=nodejs yarn target",
"build": "cross-env PROFILE=dev TARGET_DIR=debug FEATURES='' yarn buildall",
"release": "cross-env PROFILE=release TARGET_DIR=release yarn buildall",
"buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=bundler yarn target && cross-env TARGET=deno yarn target",
@ -42,6 +43,7 @@
"devDependencies": {
"@types/mocha": "^10.0.1",
"@types/node": "^18.11.13",
"@types/uuid": "^9.0.1",
"@typescript-eslint/eslint-plugin": "^5.46.0",
"@typescript-eslint/parser": "^5.46.0",
"cross-env": "^7.0.3",
@ -51,7 +53,8 @@
"pako": "^2.1.0",
"rimraf": "^3.0.2",
"ts-mocha": "^10.0.0",
"typescript": "^4.9.4"
"typescript": "^4.9.4",
"uuid": "^9.0.0"
},
"exports": {
"browser": "./bundler/automerge_wasm.js",

View file

@ -12,7 +12,7 @@ use std::fmt::Display;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use crate::{observer::Patch, ObjId, Value};
use am::{ObjId, Patch, PatchAction, Value};
const RAW_DATA_SYMBOL: &str = "_am_raw_value_";
const DATATYPE_SYMBOL: &str = "_am_datatype_";
@ -28,6 +28,12 @@ impl From<AR> for JsValue {
}
}
impl From<AR> for Array {
fn from(ar: AR) -> Self {
ar.0
}
}
impl From<JS> for JsValue {
fn from(js: JS) -> Self {
js.0
@ -334,11 +340,20 @@ impl TryFrom<JS> for am::sync::Message {
}
}
impl From<Vec<ChangeHash>> for AR {
fn from(values: Vec<ChangeHash>) -> Self {
AR(values
.iter()
.map(|h| JsValue::from_str(&h.to_string()))
.collect())
}
}
impl From<&[ChangeHash]> for AR {
fn from(value: &[ChangeHash]) -> Self {
AR(value
.iter()
.map(|h| JsValue::from_str(&hex::encode(h.0)))
.map(|h| JsValue::from_str(&h.to_string()))
.collect())
}
}
@ -746,13 +761,13 @@ impl Automerge {
pub(crate) fn apply_patch_to_array(
&self,
array: &Object,
patch: &Patch,
patch: &Patch<u16>,
meta: &JsValue,
exposed: &mut HashSet<ObjId>,
) -> Result<Object, error::ApplyPatch> {
let result = Array::from(array); // shallow copy
match patch {
Patch::PutSeq {
match &patch.action {
PatchAction::PutSeq {
index,
value,
expose,
@ -768,13 +783,13 @@ impl Automerge {
}
Ok(result.into())
}
Patch::DeleteSeq { index, length, .. } => {
PatchAction::DeleteSeq { index, length, .. } => {
Ok(self.sub_splice(result, *index, *length, vec![], meta)?)
}
Patch::Insert { index, values, .. } => {
PatchAction::Insert { index, values, .. } => {
Ok(self.sub_splice(result, *index, 0, values, meta)?)
}
Patch::Increment { prop, value, .. } => {
PatchAction::Increment { prop, value, .. } => {
if let Prop::Seq(index) = prop {
let index = *index as f64;
let old_val = js_get(&result, index)?.0;
@ -795,9 +810,9 @@ impl Automerge {
Err(error::ApplyPatch::IncrementKeyInSeq)
}
}
Patch::DeleteMap { .. } => Err(error::ApplyPatch::DeleteKeyFromSeq),
Patch::PutMap { .. } => Err(error::ApplyPatch::PutKeyInSeq),
Patch::SpliceText { index, value, .. } => {
PatchAction::DeleteMap { .. } => Err(error::ApplyPatch::DeleteKeyFromSeq),
PatchAction::PutMap { .. } => Err(error::ApplyPatch::PutKeyInSeq),
PatchAction::SpliceText { index, value, .. } => {
match self.text_rep {
TextRepresentation::String => Err(error::ApplyPatch::SpliceTextInSeq),
TextRepresentation::Array => {
@ -819,19 +834,20 @@ impl Automerge {
}
}
}
PatchAction::Mark { .. } | PatchAction::Unmark { .. } => Ok(result.into()),
}
}
pub(crate) fn apply_patch_to_map(
&self,
map: &Object,
patch: &Patch,
patch: &Patch<u16>,
meta: &JsValue,
exposed: &mut HashSet<ObjId>,
) -> Result<Object, error::ApplyPatch> {
let result = Object::assign(&Object::new(), map); // shallow copy
match patch {
Patch::PutMap {
match &patch.action {
PatchAction::PutMap {
key, value, expose, ..
} => {
if *expose && value.0.is_object() {
@ -844,7 +860,7 @@ impl Automerge {
}
Ok(result)
}
Patch::DeleteMap { key, .. } => {
PatchAction::DeleteMap { key, .. } => {
Reflect::delete_property(&result, &key.into()).map_err(|e| {
error::Export::Delete {
prop: key.to_string(),
@ -853,7 +869,7 @@ impl Automerge {
})?;
Ok(result)
}
Patch::Increment { prop, value, .. } => {
PatchAction::Increment { prop, value, .. } => {
if let Prop::Map(key) = prop {
let old_val = js_get(&result, key)?.0;
let old_val = self.unwrap_scalar(old_val)?;
@ -873,27 +889,30 @@ impl Automerge {
Err(error::ApplyPatch::IncrementIndexInMap)
}
}
Patch::Insert { .. } => Err(error::ApplyPatch::InsertInMap),
Patch::DeleteSeq { .. } => Err(error::ApplyPatch::SpliceInMap),
//Patch::SpliceText { .. } => Err(to_js_err("cannot Splice into map")),
Patch::SpliceText { .. } => Err(error::ApplyPatch::SpliceTextInMap),
Patch::PutSeq { .. } => Err(error::ApplyPatch::PutIdxInMap),
PatchAction::Insert { .. } => Err(error::ApplyPatch::InsertInMap),
PatchAction::DeleteSeq { .. } => Err(error::ApplyPatch::SpliceInMap),
//PatchAction::SpliceText { .. } => Err(to_js_err("cannot Splice into map")),
PatchAction::SpliceText { .. } => Err(error::ApplyPatch::SpliceTextInMap),
PatchAction::PutSeq { .. } => Err(error::ApplyPatch::PutIdxInMap),
PatchAction::Mark { .. } | PatchAction::Unmark { .. } => {
Err(error::ApplyPatch::MarkInMap)
}
}
}
pub(crate) fn apply_patch(
&self,
obj: Object,
patch: &Patch,
patch: &Patch<u16>,
depth: usize,
meta: &JsValue,
exposed: &mut HashSet<ObjId>,
) -> Result<Object, error::ApplyPatch> {
let (inner, datatype, id) = self.unwrap_object(&obj)?;
let prop = patch.path().get(depth).map(|p| prop_to_js(&p.1));
let prop = patch.path.get(depth).map(|p| prop_to_js(&p.1));
let result = if let Some(prop) = prop {
let subval = js_get(&inner, &prop)?.0;
if subval.is_string() && patch.path().len() - 1 == depth {
if subval.is_string() && patch.path.len() - 1 == depth {
if let Ok(s) = subval.dyn_into::<JsString>() {
let new_value = self.apply_patch_to_text(&s, patch)?;
let result = shallow_copy(&inner);
@ -914,12 +933,12 @@ impl Automerge {
return Ok(obj);
}
} else if Array::is_array(&inner) {
if &id == patch.obj() {
if id == patch.obj {
self.apply_patch_to_array(&inner, patch, meta, exposed)
} else {
Ok(Array::from(&inner).into())
}
} else if &id == patch.obj() {
} else if id == patch.obj {
self.apply_patch_to_map(&inner, patch, meta, exposed)
} else {
Ok(Object::assign(&Object::new(), &inner))
@ -932,17 +951,17 @@ impl Automerge {
fn apply_patch_to_text(
&self,
string: &JsString,
patch: &Patch,
patch: &Patch<u16>,
) -> Result<JsValue, error::ApplyPatch> {
match patch {
Patch::DeleteSeq { index, length, .. } => {
match &patch.action {
PatchAction::DeleteSeq { index, length, .. } => {
let index = *index as u32;
let before = string.slice(0, index);
let after = string.slice(index + *length as u32, string.length());
let result = before.concat(&after);
Ok(result.into())
}
Patch::SpliceText { index, value, .. } => {
PatchAction::SpliceText { index, value, .. } => {
let index = *index as u32;
let length = string.length();
let before = string.slice(0, index);
@ -1205,6 +1224,148 @@ fn set_hidden_value<V: Into<JsValue>>(
Ok(())
}
pub(crate) struct JsPatch(pub(crate) Patch<u16>);
fn export_path(path: &[(ObjId, Prop)], end: &Prop) -> Array {
let result = Array::new();
for p in path {
result.push(&prop_to_js(&p.1));
}
result.push(&prop_to_js(end));
result
}
fn export_just_path(path: &[(ObjId, Prop)]) -> Array {
let result = Array::new();
for p in path {
result.push(&prop_to_js(&p.1));
}
result
}
impl TryFrom<JsPatch> for JsValue {
type Error = error::Export;
fn try_from(p: JsPatch) -> Result<Self, Self::Error> {
let result = Object::new();
let path = &p.0.path;
match p.0.action {
PatchAction::PutMap { key, value, .. } => {
js_set(&result, "action", "put")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Map(key)),
)?;
js_set(
&result,
"value",
alloc(&value.0, TextRepresentation::String).1,
)?;
Ok(result.into())
}
PatchAction::PutSeq { index, value, .. } => {
js_set(&result, "action", "put")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Seq(index)),
)?;
js_set(
&result,
"value",
alloc(&value.0, TextRepresentation::String).1,
)?;
Ok(result.into())
}
PatchAction::Insert { index, values, .. } => {
js_set(&result, "action", "insert")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Seq(index)),
)?;
js_set(
&result,
"values",
values
.iter()
.map(|v| alloc(&v.0, TextRepresentation::String).1)
.collect::<Array>(),
)?;
Ok(result.into())
}
PatchAction::SpliceText { index, value, .. } => {
js_set(&result, "action", "splice")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Seq(index)),
)?;
let bytes: Vec<u16> = value.iter().cloned().collect();
js_set(&result, "value", String::from_utf16_lossy(bytes.as_slice()))?;
Ok(result.into())
}
PatchAction::Increment { prop, value, .. } => {
js_set(&result, "action", "inc")?;
js_set(&result, "path", export_path(path.as_slice(), &prop))?;
js_set(&result, "value", &JsValue::from_f64(value as f64))?;
Ok(result.into())
}
PatchAction::DeleteMap { key, .. } => {
js_set(&result, "action", "del")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Map(key)),
)?;
Ok(result.into())
}
PatchAction::DeleteSeq { index, length, .. } => {
js_set(&result, "action", "del")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Seq(index)),
)?;
if length > 1 {
js_set(&result, "length", length)?;
}
Ok(result.into())
}
PatchAction::Mark { marks, .. } => {
js_set(&result, "action", "mark")?;
js_set(&result, "path", export_just_path(path.as_slice()))?;
let marks_array = Array::new();
for m in marks.iter() {
let mark = Object::new();
js_set(&mark, "name", m.name())?;
js_set(
&mark,
"value",
&alloc(&m.value().into(), TextRepresentation::String).1,
)?;
js_set(&mark, "start", m.start as i32)?;
js_set(&mark, "end", m.end as i32)?;
marks_array.push(&mark);
}
js_set(&result, "marks", marks_array)?;
Ok(result.into())
}
PatchAction::Unmark {
name, start, end, ..
} => {
js_set(&result, "action", "unmark")?;
js_set(&result, "path", export_just_path(path.as_slice()))?;
js_set(&result, "name", name)?;
js_set(&result, "start", start as i32)?;
js_set(&result, "end", end as i32)?;
Ok(result.into())
}
}
}
}
fn shallow_copy(obj: &Object) -> Object {
if Array::is_array(obj) {
Array::from(obj).into()
@ -1406,6 +1567,8 @@ pub(crate) mod error {
SpliceTextInMap,
#[error("cannot put a seq index in a map")]
PutIdxInMap,
#[error("cannot mark a span in a map")]
MarkInMap,
#[error(transparent)]
GetProp(#[from] GetProp),
#[error(transparent)]

View file

@ -29,8 +29,10 @@ use am::transaction::CommitOptions;
use am::transaction::{Observed, Transactable, UnObserved};
use am::ScalarValue;
use automerge as am;
use automerge::{sync::SyncDoc, Change, ObjId, Prop, ReadDoc, TextEncoding, Value, ROOT};
use automerge::{sync::SyncDoc, Change, Prop, ReadDoc, TextEncoding, Value, ROOT};
use automerge::{ToggleObserver, VecOpObserver16};
use js_sys::{Array, Function, Object, Uint8Array};
use regex::Regex;
use serde::ser::Serialize;
use std::borrow::Cow;
use std::collections::HashMap;
@ -40,13 +42,9 @@ use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
mod interop;
mod observer;
mod sequence_tree;
mod sync;
mod value;
use observer::Observer;
use interop::{alloc, get_heads, import_obj, js_set, to_js_err, to_prop, AR, JS};
use sync::SyncState;
use value::Datatype;
@ -60,7 +58,7 @@ macro_rules! log {
};
}
type AutoCommit = am::AutoCommitWithObs<Observed<Observer>>;
type AutoCommit = am::AutoCommitWithObs<Observed<ToggleObserver<VecOpObserver16>>>;
#[cfg(feature = "wee_alloc")]
#[global_allocator]
@ -82,6 +80,15 @@ impl std::default::Default for TextRepresentation {
}
}
impl From<TextRepresentation> for am::op_observer::TextRepresentation {
fn from(tr: TextRepresentation) -> Self {
match tr {
TextRepresentation::Array => am::op_observer::TextRepresentation::Array,
TextRepresentation::String => am::op_observer::TextRepresentation::String,
}
}
}
#[wasm_bindgen]
#[derive(Debug)]
pub struct Automerge {
@ -97,7 +104,10 @@ impl Automerge {
actor: Option<String>,
text_rep: TextRepresentation,
) -> Result<Automerge, error::BadActorId> {
let mut doc = AutoCommit::default().with_encoding(TextEncoding::Utf16);
let mut doc = AutoCommit::default()
.with_observer(ToggleObserver::default().with_text_rep(text_rep.into()))
.with_encoding(TextEncoding::Utf16);
doc.observer().set_text_rep(text_rep.into());
if let Some(a) = actor {
let a = automerge::ActorId::from(hex::decode(a)?.to_vec());
doc.set_actor(a);
@ -545,8 +555,9 @@ impl Automerge {
let enable = enable
.as_bool()
.ok_or_else(|| to_js_err("must pass a bool to enablePatches"))?;
let old_enabled = self.doc.observer().enable(enable);
self.doc.observer().set_text_rep(self.text_rep);
let heads = self.doc.get_heads();
let old_enabled = self.doc.observer().enable(enable, heads);
self.doc.observer().set_text_rep(self.text_rep.into());
Ok(old_enabled.into())
}
@ -571,11 +582,12 @@ impl Automerge {
object: JsValue,
meta: JsValue,
callback: JsValue,
) -> Result<JsValue, error::ApplyPatch> {
) -> Result<JsValue, JsValue> {
let mut object = object
.dyn_into::<Object>()
.map_err(|_| error::ApplyPatch::NotObjectd)?;
let patches = self.doc.observer().take_patches();
let end_heads = self.doc.get_heads();
let (patches, begin_heads) = self.doc.observer().take_patches(end_heads.clone());
let callback = callback.dyn_into::<Function>().ok();
// even if there are no patches we may need to update the meta object
@ -594,19 +606,24 @@ impl Automerge {
object = self.apply_patch(object, p, 0, &meta, &mut exposed)?;
}
self.finalize_exposed(&object, exposed, &meta)?;
if let Some(c) = &callback {
if !patches.is_empty() {
let patches: Array = patches
.into_iter()
.map(interop::JsPatch)
.map(JsValue::try_from)
.collect::<Result<_, _>>()?;
c.call3(&JsValue::undefined(), &patches.into(), &before, &object)
.map_err(error::ApplyPatch::PatchCallback)?;
let info = Object::new();
js_set(&info, "before", &before)?;
js_set(&info, "after", &object)?;
js_set(&info, "from", AR::from(begin_heads))?;
js_set(&info, "to", AR::from(end_heads))?;
c.call2(&JsValue::undefined(), &patches.into(), &info)?;
}
}
self.finalize_exposed(&object, exposed, &meta)?;
Ok(object.into())
}
@ -616,10 +633,11 @@ impl Automerge {
// committed.
// If we pop the patches then we won't be able to revert them.
let patches = self.doc.observer().take_patches();
let heads = self.doc.get_heads();
let (patches, _heads) = self.doc.observer().take_patches(heads);
let result = Array::new();
for p in patches {
result.push(&p.try_into()?);
result.push(&interop::JsPatch(p).try_into()?);
}
Ok(result)
}
@ -702,17 +720,12 @@ impl Automerge {
#[wasm_bindgen(js_name = getHeads)]
pub fn get_heads(&mut self) -> Array {
let heads = self.doc.get_heads();
let heads: Array = heads
.iter()
.map(|h| JsValue::from_str(&hex::encode(h.0)))
.collect();
heads
AR::from(heads).into()
}
#[wasm_bindgen(js_name = getActorId)]
pub fn get_actor_id(&self) -> String {
let actor = self.doc.get_actor();
actor.to_string()
self.doc.get_actor().to_string()
}
#[wasm_bindgen(js_name = getLastLocalChange)]
@ -775,7 +788,8 @@ impl Automerge {
) -> Result<JsValue, error::Materialize> {
let (obj, obj_type) = self.import(obj).unwrap_or((ROOT, am::ObjType::Map));
let heads = get_heads(heads)?;
let _patches = self.doc.observer().take_patches(); // throw away patches
let current_heads = self.doc.get_heads();
let _patches = self.doc.observer().take_patches(current_heads); // throw away patches
Ok(self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta)?)
}
@ -786,6 +800,75 @@ impl Automerge {
let hash = self.doc.empty_change(options);
JsValue::from_str(&hex::encode(hash))
}
pub fn mark(
&mut self,
obj: JsValue,
range: JsValue,
name: JsValue,
value: JsValue,
datatype: JsValue,
) -> Result<(), JsValue> {
let (obj, _) = self.import(obj)?;
let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap();
let range = range.as_string().ok_or("range must be a string")?;
let cap = re.captures_iter(&range).next().ok_or(format!("(range={}) range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal",range))?;
let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?;
let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?;
let left_sticky = &cap[1] == "(";
let right_sticky = &cap[4] == ")";
let name = name
.as_string()
.ok_or("invalid mark name")
.map_err(to_js_err)?;
let value = self
.import_scalar(&value, &datatype.as_string())
.ok_or_else(|| to_js_err("invalid value"))?;
self.doc
.mark(
&obj,
am::marks::Mark::new(name, value, start, end),
am::marks::ExpandMark::from(left_sticky, right_sticky),
)
.map_err(to_js_err)?;
Ok(())
}
pub fn unmark(
&mut self,
obj: JsValue,
key: JsValue,
start: f64,
end: f64,
) -> Result<(), JsValue> {
let (obj, _) = self.import(obj)?;
let key = key.as_string().ok_or("key must be a string")?;
self.doc
.unmark(&obj, &key, start as usize, end as usize)
.map_err(to_js_err)?;
Ok(())
}
pub fn marks(&mut self, obj: JsValue, heads: Option<Array>) -> Result<JsValue, JsValue> {
let (obj, _) = self.import(obj)?;
let heads = get_heads(heads)?;
let marks = if let Some(heads) = heads {
self.doc.marks_at(obj, &heads).map_err(to_js_err)?
} else {
self.doc.marks(obj).map_err(to_js_err)?
};
let result = Array::new();
for m in marks {
let mark = Object::new();
let (_datatype, value) = alloc(&m.value().clone().into(), self.text_rep);
js_set(&mark, "name", m.name())?;
js_set(&mark, "value", value)?;
js_set(&mark, "start", m.start as i32)?;
js_set(&mark, "end", m.end as i32)?;
result.push(&mark.into());
}
Ok(result.into())
}
}
#[wasm_bindgen(js_name = create)]
@ -812,7 +895,7 @@ pub fn load(
TextRepresentation::Array
};
let mut doc = am::AutoCommitWithObs::<UnObserved>::load(&data)?
.with_observer(Observer::default().with_text_rep(text_rep))
.with_observer(ToggleObserver::default().with_text_rep(text_rep.into()))
.with_encoding(TextEncoding::Utf16);
if let Some(s) = actor {
let actor =

View file

@ -1,518 +0,0 @@
#![allow(dead_code)]
use std::borrow::Cow;
use crate::{
interop::{self, alloc, js_set},
TextRepresentation,
};
use automerge::{ObjId, OpObserver, Prop, ReadDoc, ScalarValue, Value};
use js_sys::{Array, Object};
use wasm_bindgen::prelude::*;
use crate::sequence_tree::SequenceTree;
#[derive(Debug, Clone, Default)]
pub(crate) struct Observer {
enabled: bool,
patches: Vec<Patch>,
text_rep: TextRepresentation,
}
impl Observer {
pub(crate) fn take_patches(&mut self) -> Vec<Patch> {
std::mem::take(&mut self.patches)
}
pub(crate) fn enable(&mut self, enable: bool) -> bool {
if self.enabled && !enable {
self.patches.truncate(0)
}
let old_enabled = self.enabled;
self.enabled = enable;
old_enabled
}
fn get_path<R: ReadDoc>(&mut self, doc: &R, obj: &ObjId) -> Option<Vec<(ObjId, Prop)>> {
match doc.parents(obj) {
Ok(parents) => parents.visible_path(),
Err(e) => {
automerge::log!("error generating patch : {:?}", e);
None
}
}
}
pub(crate) fn with_text_rep(mut self, text_rep: TextRepresentation) -> Self {
self.text_rep = text_rep;
self
}
pub(crate) fn set_text_rep(&mut self, text_rep: TextRepresentation) {
self.text_rep = text_rep;
}
}
#[derive(Debug, Clone)]
pub(crate) enum Patch {
PutMap {
obj: ObjId,
path: Vec<(ObjId, Prop)>,
key: String,
value: (Value<'static>, ObjId),
expose: bool,
},
PutSeq {
obj: ObjId,
path: Vec<(ObjId, Prop)>,
index: usize,
value: (Value<'static>, ObjId),
expose: bool,
},
Insert {
obj: ObjId,
path: Vec<(ObjId, Prop)>,
index: usize,
values: SequenceTree<(Value<'static>, ObjId)>,
},
SpliceText {
obj: ObjId,
path: Vec<(ObjId, Prop)>,
index: usize,
value: SequenceTree<u16>,
},
Increment {
obj: ObjId,
path: Vec<(ObjId, Prop)>,
prop: Prop,
value: i64,
},
DeleteMap {
obj: ObjId,
path: Vec<(ObjId, Prop)>,
key: String,
},
DeleteSeq {
obj: ObjId,
path: Vec<(ObjId, Prop)>,
index: usize,
length: usize,
},
}
impl OpObserver for Observer {
fn insert<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
index: usize,
tagged_value: (Value<'_>, ObjId),
) {
if self.enabled {
let value = (tagged_value.0.to_owned(), tagged_value.1);
if let Some(Patch::Insert {
obj: tail_obj,
index: tail_index,
values,
..
}) = self.patches.last_mut()
{
let range = *tail_index..=*tail_index + values.len();
if tail_obj == &obj && range.contains(&index) {
values.insert(index - *tail_index, value);
return;
}
}
if let Some(path) = self.get_path(doc, &obj) {
let mut values = SequenceTree::new();
values.push(value);
let patch = Patch::Insert {
path,
obj,
index,
values,
};
self.patches.push(patch);
}
}
}
fn splice_text<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, value: &str) {
if self.enabled {
if self.text_rep == TextRepresentation::Array {
for (i, c) in value.chars().enumerate() {
self.insert(
doc,
obj.clone(),
index + i,
(
Value::Scalar(Cow::Owned(ScalarValue::Str(c.to_string().into()))),
ObjId::Root, // We hope this is okay
),
);
}
return;
}
if let Some(Patch::SpliceText {
obj: tail_obj,
index: tail_index,
value: prev_value,
..
}) = self.patches.last_mut()
{
let range = *tail_index..=*tail_index + prev_value.len();
if tail_obj == &obj && range.contains(&index) {
let i = index - *tail_index;
for (n, ch) in value.encode_utf16().enumerate() {
prev_value.insert(i + n, ch)
}
return;
}
}
if let Some(path) = self.get_path(doc, &obj) {
let mut v = SequenceTree::new();
for ch in value.encode_utf16() {
v.push(ch)
}
let patch = Patch::SpliceText {
path,
obj,
index,
value: v,
};
self.patches.push(patch);
}
}
}
fn delete_seq<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, length: usize) {
if self.enabled {
match self.patches.last_mut() {
Some(Patch::SpliceText {
obj: tail_obj,
index: tail_index,
value,
..
}) => {
let range = *tail_index..*tail_index + value.len();
if tail_obj == &obj
&& range.contains(&index)
&& range.contains(&(index + length - 1))
{
for _ in 0..length {
value.remove(index - *tail_index);
}
return;
}
}
Some(Patch::Insert {
obj: tail_obj,
index: tail_index,
values,
..
}) => {
let range = *tail_index..*tail_index + values.len();
if tail_obj == &obj
&& range.contains(&index)
&& range.contains(&(index + length - 1))
{
for _ in 0..length {
values.remove(index - *tail_index);
}
return;
}
}
Some(Patch::DeleteSeq {
obj: tail_obj,
index: tail_index,
length: tail_length,
..
}) => {
if tail_obj == &obj && index == *tail_index {
*tail_length += length;
return;
}
}
_ => {}
}
if let Some(path) = self.get_path(doc, &obj) {
let patch = Patch::DeleteSeq {
path,
obj,
index,
length,
};
self.patches.push(patch)
}
}
}
fn delete_map<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, key: &str) {
if self.enabled {
if let Some(path) = self.get_path(doc, &obj) {
let patch = Patch::DeleteMap {
path,
obj,
key: key.to_owned(),
};
self.patches.push(patch)
}
}
}
fn put<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
_conflict: bool,
) {
if self.enabled {
let expose = false;
if let Some(path) = self.get_path(doc, &obj) {
let value = (tagged_value.0.to_owned(), tagged_value.1);
let patch = match prop {
Prop::Map(key) => Patch::PutMap {
path,
obj,
key,
value,
expose,
},
Prop::Seq(index) => Patch::PutSeq {
path,
obj,
index,
value,
expose,
},
};
self.patches.push(patch);
}
}
}
fn expose<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
_conflict: bool,
) {
if self.enabled {
let expose = true;
if let Some(path) = self.get_path(doc, &obj) {
let value = (tagged_value.0.to_owned(), tagged_value.1);
let patch = match prop {
Prop::Map(key) => Patch::PutMap {
path,
obj,
key,
value,
expose,
},
Prop::Seq(index) => Patch::PutSeq {
path,
obj,
index,
value,
expose,
},
};
self.patches.push(patch);
}
}
}
fn increment<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (i64, ObjId),
) {
if self.enabled {
if let Some(path) = self.get_path(doc, &obj) {
let value = tagged_value.0;
self.patches.push(Patch::Increment {
path,
obj,
prop,
value,
})
}
}
}
fn text_as_seq(&self) -> bool {
self.text_rep == TextRepresentation::Array
}
}
impl automerge::op_observer::BranchableObserver for Observer {
fn merge(&mut self, other: &Self) {
self.patches.extend_from_slice(other.patches.as_slice())
}
fn branch(&self) -> Self {
Observer {
patches: vec![],
enabled: self.enabled,
text_rep: self.text_rep,
}
}
}
fn prop_to_js(p: &Prop) -> JsValue {
match p {
Prop::Map(key) => JsValue::from_str(key),
Prop::Seq(index) => JsValue::from_f64(*index as f64),
}
}
fn export_path(path: &[(ObjId, Prop)], end: &Prop) -> Array {
let result = Array::new();
for p in path {
result.push(&prop_to_js(&p.1));
}
result.push(&prop_to_js(end));
result
}
impl Patch {
pub(crate) fn path(&self) -> &[(ObjId, Prop)] {
match &self {
Self::PutMap { path, .. } => path.as_slice(),
Self::PutSeq { path, .. } => path.as_slice(),
Self::Increment { path, .. } => path.as_slice(),
Self::Insert { path, .. } => path.as_slice(),
Self::SpliceText { path, .. } => path.as_slice(),
Self::DeleteMap { path, .. } => path.as_slice(),
Self::DeleteSeq { path, .. } => path.as_slice(),
}
}
pub(crate) fn obj(&self) -> &ObjId {
match &self {
Self::PutMap { obj, .. } => obj,
Self::PutSeq { obj, .. } => obj,
Self::Increment { obj, .. } => obj,
Self::Insert { obj, .. } => obj,
Self::SpliceText { obj, .. } => obj,
Self::DeleteMap { obj, .. } => obj,
Self::DeleteSeq { obj, .. } => obj,
}
}
}
impl TryFrom<Patch> for JsValue {
type Error = interop::error::Export;
fn try_from(p: Patch) -> Result<Self, Self::Error> {
let result = Object::new();
match p {
Patch::PutMap {
path, key, value, ..
} => {
js_set(&result, "action", "put")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Map(key)),
)?;
js_set(
&result,
"value",
alloc(&value.0, TextRepresentation::String).1,
)?;
Ok(result.into())
}
Patch::PutSeq {
path, index, value, ..
} => {
js_set(&result, "action", "put")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Seq(index)),
)?;
js_set(
&result,
"value",
alloc(&value.0, TextRepresentation::String).1,
)?;
Ok(result.into())
}
Patch::Insert {
path,
index,
values,
..
} => {
js_set(&result, "action", "insert")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Seq(index)),
)?;
js_set(
&result,
"values",
values
.iter()
.map(|v| alloc(&v.0, TextRepresentation::String).1)
.collect::<Array>(),
)?;
Ok(result.into())
}
Patch::SpliceText {
path, index, value, ..
} => {
js_set(&result, "action", "splice")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Seq(index)),
)?;
let bytes: Vec<u16> = value.iter().cloned().collect();
js_set(&result, "value", String::from_utf16_lossy(bytes.as_slice()))?;
Ok(result.into())
}
Patch::Increment {
path, prop, value, ..
} => {
js_set(&result, "action", "inc")?;
js_set(&result, "path", export_path(path.as_slice(), &prop))?;
js_set(&result, "value", &JsValue::from_f64(value as f64))?;
Ok(result.into())
}
Patch::DeleteMap { path, key, .. } => {
js_set(&result, "action", "del")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Map(key)),
)?;
Ok(result.into())
}
Patch::DeleteSeq {
path,
index,
length,
..
} => {
js_set(&result, "action", "del")?;
js_set(
&result,
"path",
export_path(path.as_slice(), &Prop::Seq(index)),
)?;
if length > 1 {
js_set(&result, "length", length)?;
}
Ok(result.into())
}
}
}
}

View file

@ -0,0 +1,572 @@
import { describe, it } from 'mocha';
//@ts-ignore
import assert from 'assert'
//@ts-ignore
import { create, load, Automerge, encodeChange, decodeChange } from '..'
import { v4 as uuid } from "uuid"
let util = require('util')
describe('Automerge', () => {
describe('marks', () => {
it('should handle marks [..]', () => {
let doc = create(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[3..6]", "bold" , true)
let text = doc.text(list)
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 3, end: 6 }])
doc.insert(list, 6, "A")
doc.insert(list, 3, "A")
marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 4, end: 7 }])
})
it('should handle mark and unmark', () => {
let doc = create(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[2..8]", "bold" , true)
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 2, end: 8 }])
doc.unmark(list, 'bold', 4, 6)
doc.insert(list, 7, "A")
doc.insert(list, 3, "A")
marks = doc.marks(list);
assert.deepStrictEqual(marks, [
{ name: 'bold', value: true, start: 2, end: 5 },
{ name: 'bold', value: true, start: 7, end: 10 },
])
})
it('should handle mark and unmark of overlapping marks', () => {
let doc = create(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[2..6]", "bold" , true)
doc.mark(list, "[5..8]", "bold" , true)
doc.mark(list, "[3..6]", "underline" , true)
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [
{ name: 'underline', value: true, start: 3, end: 6 },
{ name: 'bold', value: true, start: 2, end: 8 },
])
doc.unmark(list, 'bold', 4, 6)
doc.insert(list, 7, "A")
doc.insert(list, 3, "A")
marks = doc.marks(list);
assert.deepStrictEqual(marks, [
{ name: 'bold', value: true, start: 2, end: 5 },
{ name: 'underline', value: true, start: 4, end: 7 },
{ name: 'bold', value: true, start: 7, end: 10 },
])
doc.unmark(list, 'bold', 0, 11)
marks = doc.marks(list);
assert.deepStrictEqual(marks, [
{ name: 'underline', value: true, start: 4, end: 7 }
])
})
it('should handle marks [..] at the beginning of a string', () => {
let doc = create(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[0..3]", "bold", true)
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 0, end: 3 }])
let doc2 = doc.fork()
doc2.insert(list, 0, "A")
doc2.insert(list, 4, "B")
doc.merge(doc2)
marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 1, end: 4 }])
})
it('should handle marks [..] with splice', () => {
let doc = create(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[0..3]", "bold", true)
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 0, end: 3 }])
let doc2 = doc.fork()
doc2.splice(list, 0, 2, "AAA")
doc2.splice(list, 4, 0, "BBB")
doc.merge(doc2)
marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 3, end: 4 }])
})
it('should handle marks across multiple forks', () => {
let doc = create(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[0..3]", "bold", true)
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 0, end: 3 }])
let doc2 = doc.fork()
doc2.splice(list, 1, 1, "Z") // replace 'aaa' with 'aZa' inside mark.
let doc3 = doc.fork()
doc3.insert(list, 0, "AAA") // should not be included in mark.
doc.merge(doc2)
doc.merge(doc3)
marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 3, end: 6 }])
})
it('should handle marks with deleted ends [..]', () => {
let doc = create(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[3..6]", "bold" , true)
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 3, end: 6 }])
doc.delete(list,5);
doc.delete(list,5);
doc.delete(list,2);
doc.delete(list,2);
marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 2, end: 3 }])
doc.insert(list, 3, "A")
doc.insert(list, 2, "A")
marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 3, end: 4 }])
})
it('should handle sticky marks (..)', () => {
let doc = create(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "(3..6)", "bold" , true)
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 3, end: 6 }])
doc.insert(list, 6, "A")
doc.insert(list, 3, "A")
marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 3, end: 8 }])
})
it('should handle sticky marks with deleted ends (..)', () => {
let doc = create(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "(3..6)", "bold" , true)
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 3, end: 6 }])
doc.delete(list,5);
doc.delete(list,5);
doc.delete(list,2);
doc.delete(list,2);
marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 2, end: 3 }])
doc.insert(list, 3, "A")
doc.insert(list, 2, "A")
marks = doc.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 2, end: 5 }])
// make sure save/load can handle marks
let saved = doc.save()
let doc2 = load(saved,true)
marks = doc2.marks(list);
assert.deepStrictEqual(marks, [{ name: 'bold', value: true, start: 2, end: 5 }])
assert.deepStrictEqual(doc.getHeads(), doc2.getHeads())
assert.deepStrictEqual(doc.save(), doc2.save())
})
it('should handle overlapping marks', () => {
let doc : Automerge = create(true, "aabbcc")
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
doc.mark(list, "[0..37]", "bold" , true)
doc.mark(list, "[4..19]", "itallic" , true)
let id = uuid(); // we want each comment to be unique so give it a unique id
doc.mark(list, "[10..13]", `comment:${id}` , "foxes are my favorite animal!")
doc.commit("marks");
let marks = doc.marks(list);
assert.deepStrictEqual(marks, [
{ name: `comment:${id}`, start: 10, end: 13, value: 'foxes are my favorite animal!' },
{ name: 'itallic', start: 4, end: 19, value: true },
{ name: 'bold', start: 0, end: 37, value: true }
])
let text = doc.text(list);
assert.deepStrictEqual(text, "the quick fox jumps over the lazy dog");
let all = doc.getChanges([])
let decoded = all.map((c) => decodeChange(c))
let util = require('util');
let encoded = decoded.map((c) => encodeChange(c))
let decoded2 = encoded.map((c) => decodeChange(c))
let doc2 = create(true);
doc2.applyChanges(encoded)
assert.deepStrictEqual(doc.marks(list) , doc2.marks(list))
assert.deepStrictEqual(doc.save(), doc2.save())
})
it('generates patches for marks made locally', () => {
let doc : Automerge = create(true, "aabbcc")
doc.enablePatches(true)
let list = doc.putObject("_root", "list", "")
doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
let h1 = doc.getHeads()
doc.mark(list, "[0..37]", "bold" , true)
doc.mark(list, "[4..19]", "itallic" , true)
let id = uuid(); // we want each comment to be unique so give it a unique id
doc.mark(list, "[10..13]", `comment:${id}` , "foxes are my favorite animal!")
doc.commit("marks");
let h2 = doc.getHeads()
let patches = doc.popPatches();
let util = require('util')
assert.deepEqual(patches, [
{ action: 'put', path: [ 'list' ], value: '' },
{
action: 'splice', path: [ 'list', 0 ],
value: 'the quick fox jumps over the lazy dog'
},
{
action: 'mark', path: [ 'list' ],
marks: [
{ name: 'bold', value: true, start: 0, end: 37 },
{ name: 'itallic', value: true, start: 4, end: 19 },
{ name: `comment:${id}`, value: 'foxes are my favorite animal!', start: 10, end: 13 }
]
}
]);
})
it('marks should create patches that respect marks that supersede it', () => {
let doc1 : Automerge = create(true, "aabbcc")
let list = doc1.putObject("_root", "list", "")
doc1.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
let doc2 = load(doc1.save(),true);
let doc3 = load(doc1.save(),true);
doc3.enablePatches(true)
doc1.put("/","foo", "bar"); // make a change to our op counter is higher than doc2
doc1.mark(list, "[0..5]", "x", "a")
doc1.mark(list, "[8..11]", "x", "b")
doc2.mark(list, "[4..13]", "x", "c");
doc3.merge(doc1)
doc3.merge(doc2)
let patches = doc3.popPatches();
assert.deepEqual(patches, [
{ action: 'put', path: [ 'foo' ], value: 'bar' },
{
action: 'mark',
path: [ 'list' ],
marks: [
{ name: 'x', value: 'a', start: 0, end: 5 },
{ name: 'x', value: 'b', start: 8, end: 11 },
{ name: 'x', value: 'c', start: 5, end: 8 },
{ name: 'x', value: 'c', start: 11, end: 13 },
]
},
]);
})
})
describe('loading marks', () => {
it('a mark will appear on load', () => {
let doc1 : Automerge = create(true, "aabbcc")
doc1.enablePatches(true)
let list = doc1.putObject("_root", "list", "")
doc1.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
doc1.mark(list, "[5..10]", "xxx", "aaa")
let patches1 = doc1.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches1, [{
action: 'mark', path: [ 'list' ], marks: [ { name: 'xxx', value: 'aaa', start: 5, end: 10 }],
}]);
let doc2 : Automerge = create(true);
doc2.enablePatches(true)
doc2.loadIncremental(doc1.save())
let patches2 = doc2.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches2, [{
action: 'mark', path: ['list'], marks: [ { name: 'xxx', value: 'aaa', start: 5, end: 10}],
}]);
})
it('a overlapping marks will coalesse on load', () => {
let doc1 : Automerge = create(true, "aabbcc")
doc1.enablePatches(true)
let list = doc1.putObject("_root", "list", "")
doc1.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
doc1.mark(list, "[5..15]", "xxx", "aaa")
doc1.mark(list, "[10..20]", "xxx", "aaa")
doc1.mark(list, "[15..25]", "xxx", "aaa")
let patches1 = doc1.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches1, [
{ action: 'mark', path: [ 'list' ], marks: [
{ name: 'xxx', value: 'aaa', start: 5, end: 15 },
{ name: 'xxx', value: 'aaa', start: 10, end: 20 },
{ name: 'xxx', value: 'aaa', start: 15, end: 25 },
] },
]);
let doc2 : Automerge = create(true);
doc2.enablePatches(true)
doc2.loadIncremental(doc1.save())
let patches2 = doc2.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches2, [
{ action: 'mark', path: ['list'], marks: [ { name: 'xxx', value: 'aaa', start: 5, end: 25}] },
]);
})
it('coalesse handles different values', () => {
let doc1 : Automerge = create(true, "aabbcc")
doc1.enablePatches(true)
let list = doc1.putObject("_root", "list", "")
doc1.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
doc1.mark(list, "[5..15]", "xxx", "aaa")
doc1.mark(list, "[10..20]", "xxx", "bbb")
doc1.mark(list, "[15..25]", "xxx", "aaa")
let patches1 = doc1.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches1, [
{ action: 'mark', path: [ 'list' ], marks: [
{ name: 'xxx', value: 'aaa', start: 5, end: 15 },
{ name: 'xxx', value: 'bbb', start: 10, end: 20 },
{ name: 'xxx', value: 'aaa', start: 15, end: 25 },
]}
]);
let doc2 : Automerge = create(true);
doc2.enablePatches(true)
doc2.loadIncremental(doc1.save())
let patches2 = doc2.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches2, [
{ action: 'mark', path: ['list'], marks: [
{ name: 'xxx', value: 'aaa', start: 5, end: 10 },
{ name: 'xxx', value: 'bbb', start: 10, end: 15 },
{ name: 'xxx', value: 'aaa', start: 15, end: 25 },
]},
]);
})
it('wont coalesse handles different names', () => {
let doc1 : Automerge = create(true, "aabbcc")
doc1.enablePatches(true)
let list = doc1.putObject("_root", "list", "")
doc1.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
doc1.mark(list, "[5..15]", "xxx", "aaa")
doc1.mark(list, "[10..20]", "yyy", "aaa")
doc1.mark(list, "[15..25]", "zzz", "aaa")
let patches1 = doc1.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches1, [
{ action: 'mark', path: [ 'list' ], marks: [
{ name: 'xxx', value: 'aaa', start: 5, end:15 },
{ name: 'yyy', value: 'aaa', start: 10, end: 20 },
{ name: 'zzz', value: 'aaa', start: 15, end: 25 },
]}
]);
let doc2 : Automerge = create(true);
doc2.enablePatches(true)
doc2.loadIncremental(doc1.save())
let patches2 = doc2.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches2, [
{ action: 'mark', path: [ 'list' ], marks: [
{ name: 'xxx', value: 'aaa', start: 5, end: 15 },
{ name: 'yyy', value: 'aaa', start: 10, end: 20 },
{ name: 'zzz', value: 'aaa', start: 15, end: 25 },
]}
]);
})
it('coalesse handles async merge', () => {
let doc1 : Automerge = create(true, "aabbcc")
doc1.enablePatches(true)
let list = doc1.putObject("_root", "list", "")
doc1.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
let doc2 = doc1.fork()
doc1.put("/", "key1", "value"); // incrementing op counter so we win vs doc2
doc1.put("/", "key2", "value"); // incrementing op counter so we win vs doc2
doc1.mark(list, "[10..20]", "xxx", "aaa")
doc1.mark(list, "[15..25]", "xxx", "aaa")
doc2.mark(list, "[5..30]" , "xxx", "bbb")
doc1.merge(doc2)
let patches1 = doc1.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches1, [
{ action: 'mark', path: [ 'list' ], marks: [
{ name: 'xxx', value: 'aaa', start: 10, end: 20 },
{ name: 'xxx', value: 'aaa', start: 15, end: 25 },
{ name: 'xxx', value: 'bbb', start: 5, end: 10 },
{ name: 'xxx', value: 'bbb', start: 25, end: 30 },
]
},
]);
let doc3 : Automerge = create(true);
doc3.enablePatches(true)
doc3.loadIncremental(doc1.save())
let patches2 = doc3.popPatches().filter((p:any) => p.action == "mark")
let marks = doc3.marks(list)
assert.deepEqual(marks, [
{ name: 'xxx', value: 'bbb', start: 5, end: 10 },
{ name: 'xxx', value: 'aaa', start: 10, end: 25 },
{ name: 'xxx', value: 'bbb', start: 25, end: 30 },
]);
assert.deepEqual(patches2, [{ action: 'mark', path: [ 'list' ], marks }]);
})
it('does not show marks hidden in merge', () => {
let doc1 : Automerge = create(true, "aabbcc")
doc1.enablePatches(true)
let list = doc1.putObject("_root", "list", "")
doc1.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
let doc2 = doc1.fork()
doc1.put("/", "key1", "value"); // incrementing op counter so we win vs doc2
doc1.put("/", "key2", "value"); // incrementing op counter so we win vs doc2
doc1.mark(list, "[10..20]", "xxx", "aaa")
doc1.mark(list, "[15..25]", "xxx", "aaa")
doc2.mark(list, "[11..24]" , "xxx", "bbb")
doc1.merge(doc2)
let patches1 = doc1.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches1, [
{ action: 'mark', path: [ 'list' ], marks: [
{ name: 'xxx', value: 'aaa', start: 10, end: 20 },
{ name: 'xxx', value: 'aaa', start: 15, end: 25 },
]
},
]);
let doc3 : Automerge = create(true);
doc3.enablePatches(true)
doc3.loadIncremental(doc1.save())
let patches2 = doc3.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches2, [
{ action: 'mark', path: [ 'list' ], marks: [
{ name: 'xxx', value: 'aaa', start: 10, end: 25 },
]}
]);
})
it('coalesse disconnected marks with async merge', () => {
let doc1 : Automerge = create(true, "aabbcc")
doc1.enablePatches(true)
let list = doc1.putObject("_root", "list", "")
doc1.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
let doc2 = doc1.fork()
doc1.put("/", "key1", "value"); // incrementing op counter so we win vs doc2
doc1.put("/", "key2", "value"); // incrementing op counter so we win vs doc2
doc1.mark(list, "[5..11]", "xxx", "aaa")
doc1.mark(list, "[19..25]", "xxx", "aaa")
doc2.mark(list, "[10..20]" , "xxx", "aaa")
doc1.merge(doc2)
let patches1 = doc1.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches1, [
{ action: 'mark', path: [ 'list' ], marks: [
{ name: 'xxx', value: 'aaa', start: 5, end: 11 },
{ name: 'xxx', value: 'aaa', start: 19, end: 25 },
{ name: 'xxx', value: 'aaa', start: 11, end: 19 },
]
},
]);
let doc3 : Automerge = create(true);
doc3.enablePatches(true)
doc3.loadIncremental(doc1.save())
let patches2 = doc3.popPatches().filter((p:any) => p.action == "mark")
assert.deepEqual(patches2, [
{ action: 'mark', path: [ 'list' ], marks: [
{ name: 'xxx', value: 'aaa', start: 5, end: 25 },
]}
]);
})
it('can get marks at a given heads', () => {
let doc1 : Automerge = create(true, "aabbcc")
doc1.enablePatches(true)
let list = doc1.putObject("_root", "list", "")
doc1.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
let heads1 = doc1.getHeads();
let marks1 = doc1.marks(list);
doc1.mark(list, "[3..25]", "xxx", "aaa")
let heads2 = doc1.getHeads();
let marks2 = doc1.marks(list);
doc1.mark(list, "[4..11]", "yyy", "bbb")
let heads3 = doc1.getHeads();
let marks3 = doc1.marks(list);
doc1.unmark(list, "xxx", 9, 20)
let heads4 = doc1.getHeads();
let marks4 = doc1.marks(list);
assert.deepEqual(marks1, doc1.marks(list,heads1))
assert.deepEqual(marks2, doc1.marks(list,heads2))
assert.deepEqual(marks3, doc1.marks(list,heads3))
assert.deepEqual(marks4, doc1.marks(list,heads4))
})
})
})

View file

@ -1941,6 +1941,40 @@ describe('Automerge', () => {
assert.deepEqual(mat.text, "ab011ij")
})
it('propogates exceptions thrown in patch callback', () => {
const doc = create(true)
doc.enablePatches(true)
let mat : any = doc.materialize("/")
doc.putObject("/", "text", "abcdefghij")
assert.throws(() => {
doc.applyPatches(mat, {}, (patches, info) => {
throw new RangeError("hello world")
})
}, /RangeError: hello world/)
})
it('patch callback has correct patch info', () => {
const doc = create(true)
let mat : any = doc.materialize("/")
doc.putObject("/", "text", "abcdefghij")
let before = doc.materialize("/")
let from = doc.getHeads()
doc.enablePatches(true)
doc.splice("/text", 2, 2, "00")
let after = doc.materialize("/")
let to = doc.getHeads()
doc.applyPatches(mat, {}, (patches, info) => {
assert.deepEqual(info.before, before);
assert.deepEqual(info.after, after);
assert.deepEqual(info.from, from);
assert.deepEqual(info.to, to);
})
})
it('can handle utf16 text', () => {
const doc = create(true)
doc.enablePatches(true)

View file

@ -51,7 +51,7 @@ fn main() {
doc1.merge(&mut doc2).unwrap();
for change in doc1.get_changes(&[]).unwrap() {
let length = doc1.length_at(&cards, &[change.hash()]);
let length = doc1.at(&[change.hash()]).length(&cards);
println!("{} {}", change.message().unwrap(), length);
}
}

View file

@ -1,11 +1,11 @@
use automerge::op_observer::HasPatches;
use automerge::transaction::CommitOptions;
use automerge::transaction::Transactable;
use automerge::Automerge;
use automerge::AutomergeError;
use automerge::Patch;
use automerge::ReadDoc;
use automerge::VecOpObserver;
use automerge::ROOT;
use automerge::{Patch, PatchAction};
fn main() {
let mut doc = Automerge::new();
@ -42,64 +42,52 @@ fn main() {
get_changes(&doc, patches);
}
fn get_changes(doc: &Automerge, patches: Vec<Patch>) {
for patch in patches {
match patch {
Patch::Put {
obj, prop, value, ..
} => {
fn get_changes(_doc: &Automerge, patches: Vec<Patch<char>>) {
for Patch { obj, path, action } in patches {
match action {
PatchAction::PutMap { key, value, .. } => {
println!(
"put {:?} at {:?} in obj {:?}, object path {:?}",
value,
prop,
obj,
doc.path_to_object(&obj)
value, key, obj, path,
)
}
Patch::Insert {
obj, index, value, ..
} => {
PatchAction::PutSeq { index, value, .. } => {
println!(
"put {:?} at {:?} in obj {:?}, object path {:?}",
value, index, obj, path,
)
}
PatchAction::Insert { index, values, .. } => {
println!(
"insert {:?} at {:?} in obj {:?}, object path {:?}",
value,
index,
obj,
doc.path_to_object(&obj)
values, index, obj, path,
)
}
Patch::Splice {
obj, index, value, ..
} => {
PatchAction::SpliceText { index, value, .. } => {
println!(
"splice '{:?}' at {:?} in obj {:?}, object path {:?}",
value,
index,
obj,
doc.path_to_object(&obj)
value, index, obj, path,
)
}
Patch::Increment {
obj, prop, value, ..
} => {
PatchAction::Increment { prop, value, .. } => {
println!(
"increment {:?} in obj {:?} by {:?}, object path {:?}",
prop,
obj,
value,
doc.path_to_object(&obj)
prop, obj, value, path,
)
}
Patch::Delete { obj, prop, .. } => println!(
PatchAction::DeleteMap { key, .. } => {
println!("delete {:?} in obj {:?}, object path {:?}", key, obj, path,)
}
PatchAction::DeleteSeq { index, .. } => println!(
"delete {:?} in obj {:?}, object path {:?}",
prop,
obj,
doc.path_to_object(&obj)
index, obj, path,
),
Patch::Expose { obj, prop, .. } => println!(
"expose {:?} in obj {:?}, object path {:?}",
prop,
obj,
doc.path_to_object(&obj)
PatchAction::Mark { marks } => {
println!("mark {:?} in obj {:?}, object path {:?}", marks, obj, path,)
}
PatchAction::Unmark { name, start, end } => println!(
"unmark {:?} from {} to {} in obj {:?}, object path {:?}",
name, start, end, obj, path,
),
}
}

View file

@ -1,13 +1,12 @@
use std::ops::RangeBounds;
use crate::automerge;
use crate::exid::ExId;
use crate::marks::{ExpandMark, Mark};
use crate::op_observer::{BranchableObserver, OpObserver};
use crate::sync::SyncDoc;
use crate::transaction::{CommitOptions, Transactable};
use crate::{
sync, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, Parents, ReadDoc,
ScalarValue,
};
use crate::{sync, Keys, ListRange, MapRange, ObjType, Parents, ReadDoc, ScalarValue};
use crate::{
transaction::{Observation, Observed, TransactionInner, UnObserved},
ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, TextEncoding, Value, Values,
@ -255,6 +254,10 @@ impl<Obs: Observation> AutoCommitWithObs<Obs> {
self.doc.get_last_local_change()
}
pub fn at<'a,'b>(&'a self, heads: &'b [ChangeHash]) -> automerge::At<'a, 'b> {
self.doc.at(heads)
}
pub fn get_changes(
&mut self,
have_deps: &[ChangeHash],
@ -280,6 +283,11 @@ impl<Obs: Observation> AutoCommitWithObs<Obs> {
self.doc.import(s)
}
#[doc(hidden)]
pub fn import_obj(&self, s: &str) -> Result<ExId, AutomergeError> {
self.doc.import_obj(s)
}
#[doc(hidden)]
pub fn dump(&mut self) {
self.ensure_transaction_closed();
@ -305,6 +313,22 @@ impl<Obs: Observation> AutoCommitWithObs<Obs> {
self.doc.get_heads()
}
pub fn diff(
&mut self,
start: &[ChangeHash],
end: &[ChangeHash],
) -> Result<Obs::Obs, AutomergeError> {
self.ensure_transaction_closed();
if let Some(observer) = self.observation.observer() {
let mut branch = observer.branch();
self.doc.diff_with_observer(start, end, &mut branch)?;
Ok(branch)
} else {
Err(AutomergeError::NoObserver)
}
}
/// Commit any uncommitted changes
///
/// Returns `None` if there were no operations to commit
@ -379,14 +403,10 @@ impl<Obs: Observation> ReadDoc for AutoCommitWithObs<Obs> {
self.doc.path_to_object(obj)
}
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_, '_> {
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_> {
self.doc.keys(obj)
}
fn keys_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> {
self.doc.keys_at(obj, heads)
}
fn map_range<O: AsRef<ExId>, R: RangeBounds<String>>(
&self,
obj: O,
@ -395,15 +415,6 @@ impl<Obs: Observation> ReadDoc for AutoCommitWithObs<Obs> {
self.doc.map_range(obj, range)
}
fn map_range_at<O: AsRef<ExId>, R: RangeBounds<String>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> MapRangeAt<'_, R> {
self.doc.map_range_at(obj, range, heads)
}
fn list_range<O: AsRef<ExId>, R: RangeBounds<usize>>(
&self,
obj: O,
@ -412,45 +423,24 @@ impl<Obs: Observation> ReadDoc for AutoCommitWithObs<Obs> {
self.doc.list_range(obj, range)
}
fn list_range_at<O: AsRef<ExId>, R: RangeBounds<usize>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> ListRangeAt<'_, R> {
self.doc.list_range_at(obj, range, heads)
}
fn values<O: AsRef<ExId>>(&self, obj: O) -> Values<'_> {
self.doc.values(obj)
}
fn values_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> {
self.doc.values_at(obj, heads)
}
fn length<O: AsRef<ExId>>(&self, obj: O) -> usize {
self.doc.length(obj)
}
fn length_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> usize {
self.doc.length_at(obj, heads)
}
fn object_type<O: AsRef<ExId>>(&self, obj: O) -> Result<ObjType, AutomergeError> {
self.doc.object_type(obj)
}
fn text<O: AsRef<ExId>>(&self, obj: O) -> Result<String, AutomergeError> {
self.doc.text(obj)
fn marks<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<Mark<'_>>, AutomergeError> {
self.doc.marks(obj)
}
fn text_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<String, AutomergeError> {
self.doc.text_at(obj, heads)
fn text<O: AsRef<ExId>>(&self, obj: O) -> Result<String, AutomergeError> {
self.doc.text(obj)
}
fn get<O: AsRef<ExId>, P: Into<Prop>>(
@ -461,15 +451,6 @@ impl<Obs: Observation> ReadDoc for AutoCommitWithObs<Obs> {
self.doc.get(obj, prop)
}
fn get_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Option<(Value<'_>, ExId)>, AutomergeError> {
self.doc.get_at(obj, prop, heads)
}
fn get_all<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
@ -478,15 +459,6 @@ impl<Obs: Observation> ReadDoc for AutoCommitWithObs<Obs> {
self.doc.get_all(obj, prop)
}
fn get_all_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Vec<(Value<'_>, ExId)>, AutomergeError> {
self.doc.get_all_at(obj, prop, heads)
}
fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec<ChangeHash> {
self.doc.get_missing_deps(heads)
}
@ -621,6 +593,42 @@ impl<Obs: Observation> Transactable for AutoCommitWithObs<Obs> {
)
}
fn mark<O: AsRef<ExId>>(
&mut self,
obj: O,
mark: Mark<'_>,
expand: ExpandMark,
) -> Result<(), AutomergeError> {
self.ensure_transaction_open();
let (current, tx) = self.transaction.as_mut().unwrap();
tx.mark(
&mut self.doc,
current.observer(),
obj.as_ref(),
mark,
expand,
)
}
fn unmark<O: AsRef<ExId>>(
&mut self,
obj: O,
key: &str,
start: usize,
end: usize,
) -> Result<(), AutomergeError> {
self.ensure_transaction_open();
let (current, tx) = self.transaction.as_mut().unwrap();
tx.unmark(
&mut self.doc,
current.observer(),
obj.as_ref(),
key,
start,
end,
)
}
fn base_heads(&self) -> Vec<ChangeHash> {
self.doc.get_heads()
}

View file

@ -4,10 +4,13 @@ use std::fmt::Debug;
use std::num::NonZeroU64;
use std::ops::RangeBounds;
use itertools::Itertools;
use crate::change_graph::ChangeGraph;
use crate::columnar::Key as EncodedKey;
use crate::exid::ExId;
use crate::keys::Keys;
use crate::marks::{Mark, MarkStateMachine};
use crate::op_observer::{BranchableObserver, OpObserver};
use crate::op_set::OpSet;
use crate::parents::Parents;
@ -16,16 +19,16 @@ use crate::transaction::{
self, CommitOptions, Failure, Observed, Success, Transaction, TransactionArgs, UnObserved,
};
use crate::types::{
ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ListEncoding, ObjId, Op, OpId,
OpType, ScalarValue, TextEncoding, Value,
ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ListEncoding, MarkData, ObjId, Op,
OpId, OpType, TextEncoding, Value,
};
use crate::{
query, AutomergeError, Change, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType,
Prop, ReadDoc, Values,
};
use serde::Serialize;
use crate::{query, AutomergeError, Change, ListRange, MapRange, ObjType, Prop, ReadDoc, Values};
mod at;
mod current_state;
mod diff;
pub use at::At;
#[cfg(test)]
mod tests;
@ -208,6 +211,10 @@ impl Automerge {
}
}
pub fn at<'a, 'b>(&'a self, heads: &'b [ChangeHash]) -> at::At<'a, 'b> {
at::At { heads, doc: self }
}
/// Start a transaction.
pub fn transaction(&mut self) -> Transaction<'_, UnObserved> {
let args = self.transaction_args();
@ -400,21 +407,8 @@ impl Automerge {
pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result<(ObjId, ObjType), AutomergeError> {
match id {
ExId::Root => Ok((ObjId::root(), ObjType::Map)),
ExId::Id(ctr, actor, idx) => {
// do a direct get here b/c this could be foriegn and not be within the array
// bounds
let obj = if self.ops.m.actors.cache.get(*idx) == Some(actor) {
ObjId(OpId::new(*ctr, *idx))
} else {
// FIXME - make a real error
let idx = self
.ops
.m
.actors
.lookup(actor)
.ok_or(AutomergeError::Fail)?;
ObjId(OpId::new(*ctr, idx))
};
ExId::Id(..) => {
let obj = ObjId(self.exid_to_opid(id)?);
if let Some(obj_type) = self.ops.object_type(&obj) {
Ok((obj, obj_type))
} else {
@ -424,10 +418,34 @@ impl Automerge {
}
}
pub(crate) fn exid_to_opid(&self, id: &ExId) -> Result<OpId, AutomergeError> {
match id {
ExId::Root => Err(AutomergeError::Fail),
ExId::Id(ctr, actor, idx) => {
if self.ops.m.actors.cache.get(*idx) == Some(actor) {
Ok(OpId::new(*ctr, *idx))
} else {
// FIXME - make a real error
let idx = self
.ops
.m
.actors
.lookup(actor)
.ok_or(AutomergeError::Fail)?;
Ok(OpId::new(*ctr, idx))
}
}
}
}
pub(crate) fn id_to_exid(&self, id: OpId) -> ExId {
self.ops.id_to_exid(id)
}
pub(crate) fn tagged_value<'a>(&self, op: &'a Op) -> (Value<'a>, ExId) {
(op.value(), self.id_to_exid(op.id))
}
/// Load a document.
pub fn load(data: &[u8]) -> Result<Self, AutomergeError> {
Self::load_with::<()>(data, OnPartialLoad::Error, VerificationMode::Check, None)
@ -723,7 +741,12 @@ impl Automerge {
obj,
Op {
id,
action: OpType::from_action_and_value(c.action, c.val),
action: OpType::from_action_and_value(
c.action,
c.val,
c.mark_name,
c.expand,
),
key,
succ: Default::default(),
pred,
@ -913,8 +936,21 @@ impl Automerge {
#[doc(hidden)]
pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> {
if s == "_root" {
let obj = self.import_obj(s)?;
if obj == ExId::Root {
Ok((ExId::Root, ObjType::Map))
} else {
let obj_type = self
.object_type(&obj)
.map_err(|_| AutomergeError::InvalidObjId(s.to_owned()))?;
Ok((obj, obj_type))
}
}
#[doc(hidden)]
pub fn import_obj(&self, s: &str) -> Result<ExId, AutomergeError> {
if s == "_root" {
Ok(ExId::Root)
} else {
let n = s
.find('@')
@ -930,10 +966,7 @@ impl Automerge {
.lookup(&actor)
.ok_or_else(|| AutomergeError::InvalidObjId(s.to_owned()))?;
let obj = ExId::Id(counter, self.ops.m.actors.cache[actor].clone(), actor);
let obj_type = self
.object_type(&obj)
.map_err(|_| AutomergeError::InvalidObjId(s.to_owned()))?;
Ok((obj, obj_type))
Ok(obj)
}
}
@ -967,6 +1000,10 @@ impl Automerge {
OpType::Make(obj) => format!("make({})", obj),
OpType::Increment(obj) => format!("inc({})", obj),
OpType::Delete => format!("del{}", 0),
OpType::MarkBegin(_, MarkData { name, value }) => {
format!("mark({},{})", name, value)
}
OpType::MarkEnd(_) => "/mark".to_string(),
};
let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect();
let succ: Vec<_> = op.succ.into_iter().map(|id| self.to_string(*id)).collect();
@ -1045,11 +1082,18 @@ impl Automerge {
};
if op.insert {
if obj_type == Some(ObjType::Text) {
if op.is_mark() {
if let OpType::MarkEnd(_) = op.action {
let q = self
.ops
.search(obj, query::SeekMark::new(op.id.prev(), pos, encoding));
observer.mark(self, ex_obj, q.marks.into_iter());
}
} else if obj_type == Some(ObjType::Text) {
observer.splice_text(self, ex_obj, seen, op.to_str());
} else {
let value = (op.value(), self.ops.id_to_exid(op.id));
observer.insert(self, ex_obj, seen, value);
observer.insert(self, ex_obj, seen, value, false);
}
} else if op.is_delete() {
if let Some(winner) = &values.last() {
@ -1081,7 +1125,7 @@ impl Automerge {
.unwrap_or(false);
let value = (op.value(), self.ops.id_to_exid(op.id));
if op.is_list_op() && !had_value_before {
observer.insert(self, ex_obj, seen, value);
observer.insert(self, ex_obj, seen, value, false);
} else if just_conflict {
observer.flag_conflict(self, ex_obj, key);
} else {
@ -1099,6 +1143,19 @@ impl Automerge {
op
}
pub fn diff_with_observer<Obs: OpObserver>(
&self,
start: &[ChangeHash],
end: &[ChangeHash],
observer: &mut Obs,
) -> Result<(), AutomergeError> {
let start = self.clock_at(start);
let end = self.clock_at(end);
diff::observe_diff(self, &start, &end, observer);
Ok(())
}
/// Get the heads of this document.
pub fn get_heads(&self) -> Vec<ChangeHash> {
let mut deps: Vec<_> = self.deps.iter().copied().collect();
@ -1147,21 +1204,13 @@ impl ReadDoc for Automerge {
Ok(self.parents(obj.as_ref().clone())?.path())
}
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_, '_> {
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_> {
if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) {
let iter_keys = self.ops.keys(obj);
Keys::new(self, iter_keys)
} else {
Keys::new(self, None)
if let Some(iter_keys) = self.ops.keys(obj) {
return Keys::new(self).with_keys(iter_keys);
}
}
}
fn keys_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> {
if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
return KeysAt::new(self, self.ops.keys_at(obj, clock));
}
KeysAt::new(self, None)
Keys::new(self)
}
fn map_range<O: AsRef<ExId>, R: RangeBounds<String>>(
@ -1170,24 +1219,11 @@ impl ReadDoc for Automerge {
range: R,
) -> MapRange<'_, R> {
if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) {
MapRange::new(self, self.ops.map_range(obj, range))
} else {
MapRange::new(self, None)
if let Some(map_range) = self.ops.map_range(obj, range) {
return MapRange::new(self).with_map_range(map_range);
}
}
}
fn map_range_at<O: AsRef<ExId>, R: RangeBounds<String>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> MapRangeAt<'_, R> {
if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
let iter_range = self.ops.map_range_at(obj, range, clock);
return MapRangeAt::new(self, iter_range);
}
MapRangeAt::new(self, None)
MapRange::new(self)
}
fn list_range<O: AsRef<ExId>, R: RangeBounds<usize>>(
@ -1196,24 +1232,11 @@ impl ReadDoc for Automerge {
range: R,
) -> ListRange<'_, R> {
if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) {
ListRange::new(self, self.ops.list_range(obj, range))
} else {
ListRange::new(self, None)
if let Some(list_range) = self.ops.list_range(obj, range) {
return ListRange::new(self).with_list_range(list_range);
}
}
}
fn list_range_at<O: AsRef<ExId>, R: RangeBounds<usize>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> ListRangeAt<'_, R> {
if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
let iter_range = self.ops.list_range_at(obj, range, clock);
return ListRangeAt::new(self, iter_range);
}
ListRangeAt::new(self, None)
ListRange::new(self)
}
fn values<O: AsRef<ExId>>(&self, obj: O) -> Values<'_> {
@ -1228,24 +1251,6 @@ impl ReadDoc for Automerge {
}
}
fn values_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> {
if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
match obj_type {
ObjType::Map | ObjType::Table => {
let iter_range = self.ops.map_range_at(obj, .., clock);
Values::new(self, iter_range)
}
ObjType::List | ObjType::Text => {
let iter_range = self.ops.list_range_at(obj, .., clock);
Values::new(self, iter_range)
}
}
} else {
Values::empty(self)
}
}
fn length<O: AsRef<ExId>>(&self, obj: O) -> usize {
if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) {
if obj_type == ObjType::Map || obj_type == ObjType::Table {
@ -1259,22 +1264,6 @@ impl ReadDoc for Automerge {
}
}
fn length_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> usize {
if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
if obj_type == ObjType::Map || obj_type == ObjType::Table {
self.keys_at(obj, heads).count()
} else {
let encoding = ListEncoding::new(obj_type, self.text_encoding);
self.ops
.search(&inner_obj, query::LenAt::new(clock, encoding))
.len
}
} else {
0
}
}
fn object_type<O: AsRef<ExId>>(&self, obj: O) -> Result<ObjType, AutomergeError> {
let (_, obj_type) = self.exid_to_obj(obj.as_ref())?;
Ok(obj_type)
@ -1290,23 +1279,30 @@ impl ReadDoc for Automerge {
Ok(buffer)
}
fn text_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<String, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?.0;
let clock = self.clock_at(heads);
let query = self.ops.search(&obj, query::ListValsAt::new(clock));
let mut buffer = String::new();
for q in &query.ops {
if let OpType::Put(ScalarValue::Str(s)) = &q.action {
buffer.push_str(s);
} else {
buffer.push('\u{fffc}');
}
}
Ok(buffer)
fn marks<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<Mark<'_>>, AutomergeError> {
let (obj, obj_type) = self.exid_to_obj(obj.as_ref())?;
let encoding = ListEncoding::new(obj_type, self.text_encoding);
let ops_by_key = self.ops().iter_ops(&obj).group_by(|o| o.elemid_or_key());
let mut pos = 0;
let mut marks = MarkStateMachine::default();
Ok(ops_by_key
.into_iter()
.filter_map(|(_key, key_ops)| {
key_ops
.filter(|o| o.visible_or_mark())
.last()
.and_then(|o| match &o.action {
OpType::Make(_) | OpType::Put(_) => {
pos += o.width(encoding);
None
}
OpType::MarkBegin(_, data) => marks.mark_begin(o.id, pos, data, self),
OpType::MarkEnd(_) => marks.mark_end(o.id, pos, self),
OpType::Increment(_) | OpType::Delete => None,
})
})
.collect())
}
fn get<O: AsRef<ExId>, P: Into<Prop>>(
@ -1317,15 +1313,6 @@ impl ReadDoc for Automerge {
Ok(self.get_all(obj, prop.into())?.last().cloned())
}
fn get_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Option<(Value<'_>, ExId)>, AutomergeError> {
Ok(self.get_all_at(obj, prop, heads)?.last().cloned())
}
fn get_all<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
@ -1363,45 +1350,6 @@ impl ReadDoc for Automerge {
Ok(result)
}
fn get_all_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Vec<(Value<'_>, ExId)>, AutomergeError> {
let prop = prop.into();
let obj = self.exid_to_obj(obj.as_ref())?.0;
let clock = self.clock_at(heads);
let result = match prop {
Prop::Map(p) => {
let prop = self.ops.m.props.lookup(&p);
if let Some(p) = prop {
self.ops
.search(&obj, query::PropAt::new(p, clock))
.ops
.into_iter()
.map(|o| (o.clone_value(), self.id_to_exid(o.id)))
.collect()
} else {
vec![]
}
}
Prop::Seq(n) => {
let obj_type = self.ops.object_type(&obj);
let encoding = obj_type
.map(|o| ListEncoding::new(o, self.text_encoding))
.unwrap_or_default();
self.ops
.search(&obj, query::NthAt::new(n, clock, encoding))
.ops
.into_iter()
.map(|o| (o.clone_value(), self.id_to_exid(o.id)))
.collect()
}
};
Ok(result)
}
fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec<ChangeHash> {
let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash()).collect();
let mut missing = HashSet::new();
@ -1439,14 +1387,3 @@ impl Default for Automerge {
Self::new()
}
}
#[derive(Serialize, Debug, Clone, PartialEq)]
pub(crate) struct SpanInfo {
pub(crate) id: ExId,
pub(crate) time: i64,
pub(crate) start: usize,
pub(crate) end: usize,
#[serde(rename = "type")]
pub(crate) span_type: String,
pub(crate) value: ScalarValue,
}

View file

@ -0,0 +1,268 @@
use crate::exid::ExId;
use crate::marks::{Mark, MarkStateMachine};
use crate::query;
use crate::types::{ListEncoding, OpType};
use crate::value::ScalarValue;
use crate::{
Automerge, AutomergeError, Change, ChangeHash, Keys, ListRange, MapRange, ObjType, Parents,
Prop, ReadDoc, Value, Values,
};
use itertools::Itertools;
use std::ops::RangeBounds;
#[derive(Debug)]
pub struct At<'a, 'b> {
pub(crate) doc: &'a Automerge,
pub(crate) heads: &'b [ChangeHash],
}
impl<'a,'b> ReadDoc for At<'a,'b> {
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'a> {
self.doc.keys_at(obj, self.heads)
}
fn parents<O: AsRef<ExId>>(&self, obj: O) -> Result<Parents<'a>, AutomergeError> {
// FIXME - need a parents_at()
self.doc.parents(obj)
}
fn path_to_object<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<(ExId, Prop)>, AutomergeError> {
// FIXME - need a path_to_object_at()
self.doc.path_to_object(obj)
}
fn map_range<O: AsRef<ExId>, R: RangeBounds<String>>(
&self,
obj: O,
range: R,
) -> MapRange<'a, R> {
self.doc.map_range_at(obj, range, self.heads)
}
fn list_range<O: AsRef<ExId>, R: RangeBounds<usize>>(
&self,
obj: O,
range: R,
) -> ListRange<'a, R> {
self.doc.list_range_at(obj, range, self.heads)
}
fn values<O: AsRef<ExId>>(&self, obj: O) -> Values<'_> {
self.doc.values_at(obj, self.heads)
}
fn length<O: AsRef<ExId>>(&self, obj: O) -> usize {
self.doc.length_at(obj, self.heads)
}
fn object_type<O: AsRef<ExId>>(&self, obj: O) -> Result<ObjType, AutomergeError> {
self.doc.object_type(obj)
}
fn marks<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<Mark<'a>>, AutomergeError> {
self.doc.marks_at(obj, self.heads)
}
fn text<O: AsRef<ExId>>(&self, obj: O) -> Result<String, AutomergeError> {
self.doc.text_at(obj, self.heads)
}
fn get<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
) -> Result<Option<(Value<'a>, ExId)>, AutomergeError> {
self.doc.get_at(obj, prop, self.heads)
}
fn get_all<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
) -> Result<Vec<(Value<'a>, ExId)>, AutomergeError> {
self.doc.get_all_at(obj, prop, self.heads)
}
fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec<ChangeHash> {
todo!()
}
fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> {
todo!()
}
}
impl Automerge {
fn keys_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> Keys<'_> {
if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
if let Some(keys_at) = self.ops.keys_at(obj, clock) {
return Keys::new(self).with_keys_at(keys_at);
}
}
Keys::new(self)
}
fn map_range_at<O: AsRef<ExId>, R: RangeBounds<String>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> MapRange<'_, R> {
if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
if let Some(iter_range) = self.ops.map_range_at(obj, range, clock) {
return MapRange::new(self).with_map_range_at(iter_range);
}
}
MapRange::new(self)
}
fn list_range_at<'a, 'b, O: AsRef<ExId>, R: RangeBounds<usize>>(
&'a self,
obj: O,
range: R,
heads: &'b [ChangeHash],
) -> ListRange<'a, R> {
if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
if let Some(iter_range) = self.ops.list_range_at(obj, range, clock) {
return ListRange::new(self).with_list_range_at(iter_range);
}
}
ListRange::new(self)
}
fn values_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> {
if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
match obj_type {
ObjType::Map | ObjType::Table => {
let iter_range = self.ops.map_range_at(obj, .., clock);
Values::new(self, iter_range)
}
ObjType::List | ObjType::Text => {
let iter_range = self.ops.list_range_at(obj, .., clock);
Values::new(self, iter_range)
}
}
} else {
Values::empty(self)
}
}
fn length_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> usize {
if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) {
let clock = self.clock_at(heads);
if obj_type == ObjType::Map || obj_type == ObjType::Table {
self.keys_at(obj, heads).count()
} else {
let encoding = ListEncoding::new(obj_type, self.text_encoding);
self.ops
.search(&inner_obj, query::LenAt::new(clock, encoding))
.len
}
} else {
0
}
}
fn text_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<String, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?.0;
let clock = self.clock_at(heads);
let query = self.ops.search(&obj, query::ListValsAt::new(clock));
let mut buffer = String::new();
for q in &query.ops {
if let OpType::Put(ScalarValue::Str(s)) = &q.action {
buffer.push_str(s);
} else {
buffer.push('\u{fffc}');
}
}
Ok(buffer)
}
fn marks_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<Vec<Mark<'_>>, AutomergeError> {
let (obj, obj_type) = self.exid_to_obj(obj.as_ref())?;
let clock = self.clock_at(heads);
let encoding = ListEncoding::new(obj_type, self.text_encoding);
let ops_by_key = self.ops().iter_ops(&obj).group_by(|o| o.elemid_or_key());
let mut window = query::VisWindow::default();
let mut pos = 0;
let mut marks = MarkStateMachine::default();
Ok(ops_by_key
.into_iter()
.filter_map(|(_key, key_ops)| {
key_ops
.filter(|o| window.visible_at(o, pos, &clock))
.last()
.and_then(|o| match &o.action {
OpType::Make(_) | OpType::Put(_) => {
pos += o.width(encoding);
None
}
OpType::MarkBegin(_, data) => marks.mark_begin(o.id, pos, data, self),
OpType::MarkEnd(_) => marks.mark_end(o.id, pos, self),
OpType::Increment(_) | OpType::Delete => None,
})
})
.collect())
}
fn get_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Option<(Value<'_>, ExId)>, AutomergeError> {
Ok(self.get_all_at(obj, prop, heads)?.last().cloned())
}
fn get_all_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Vec<(Value<'_>, ExId)>, AutomergeError> {
let prop = prop.into();
let obj = self.exid_to_obj(obj.as_ref())?.0;
let clock = self.clock_at(heads);
let result = match prop {
Prop::Map(p) => {
let prop = self.ops.m.props.lookup(&p);
if let Some(p) = prop {
self.ops
.search(&obj, query::PropAt::new(p, clock))
.ops
.into_iter()
.map(|o| (o.clone_value(), self.id_to_exid(o.id)))
.collect()
} else {
vec![]
}
}
Prop::Seq(n) => {
let obj_type = self.ops.object_type(&obj);
let encoding = obj_type
.map(|o| ListEncoding::new(o, self.text_encoding))
.unwrap_or_default();
self.ops
.search(&obj, query::NthAt::new(n, clock, encoding))
.ops
.into_iter()
.map(|o| (o.clone_value(), self.id_to_exid(o.id)))
.collect()
}
};
Ok(result)
}
}

View file

@ -1,12 +1,27 @@
use std::{borrow::Cow, collections::HashSet, iter::Peekable};
use std::borrow::Cow;
use itertools::Itertools;
use crate::{
types::{ElemId, Key, ListEncoding, ObjId, Op, OpId},
ObjType, OpObserver, OpType, ScalarValue, Value,
marks::{Mark, MarkStateMachine},
types::{Key, ListEncoding, ObjId, Op, OpId, Prop},
Automerge, ObjType, OpObserver, OpType, Value,
};
#[derive(Debug, Default)]
struct TextState<'a> {
text: String,
len: usize,
marks: MarkStateMachine<'a>,
finished: Vec<Mark<'a>>,
}
struct Put<'a> {
value: Value<'a>,
key: Key,
id: OpId,
}
/// Traverse the "current" state of the document, notifying `observer`
///
/// The "current" state of the document is the set of visible operations. This function will
@ -17,7 +32,8 @@ use crate::{
///
/// Due to only notifying of visible operations the observer will only be called with `put`,
/// `insert`, and `splice`, operations.
pub(super) fn observe_current_state<O: OpObserver>(doc: &crate::Automerge, observer: &mut O) {
pub(crate) fn observe_current_state<O: OpObserver>(doc: &Automerge, observer: &mut O) {
// The OpSet already exposes operations in the order they appear in the document.
// `OpSet::iter_objs` iterates over the objects in causal order, this means that parent objects
// will always appear before their children. Furthermore, the operations within each object are
@ -26,321 +42,398 @@ pub(super) fn observe_current_state<O: OpObserver>(doc: &crate::Automerge, obser
// Effectively then we iterate over each object, then we group the operations in the object by
// key and for each key find the visible operations for that key. Then we notify the observer
// for each of those visible operations.
let mut visible_objs = HashSet::new();
visible_objs.insert(ObjId::root());
for (obj, typ, ops) in doc.ops().iter_objs() {
if !visible_objs.contains(obj) {
continue;
}
let ops_by_key = ops.group_by(|o| o.key);
let actions = ops_by_key
.into_iter()
.flat_map(|(key, key_ops)| key_actions(key, key_ops));
if typ == ObjType::Text && !observer.text_as_seq() {
track_new_objs_and_notify(
&mut visible_objs,
doc,
obj,
typ,
observer,
text_actions(actions),
)
} else if typ == ObjType::List {
track_new_objs_and_notify(
&mut visible_objs,
doc,
obj,
typ,
observer,
list_actions(actions),
)
observe_text(doc, observer, obj, ops)
} else if typ.is_sequence() {
observe_list(doc, observer, obj, ops);
} else {
track_new_objs_and_notify(&mut visible_objs, doc, obj, typ, observer, actions)
observe_map(doc, observer, obj, ops);
}
}
}
fn track_new_objs_and_notify<N: Action, I: Iterator<Item = N>, O: OpObserver>(
visible_objs: &mut HashSet<ObjId>,
doc: &crate::Automerge,
obj: &ObjId,
typ: ObjType,
fn observe_text<'a, I: Iterator<Item = &'a Op>, O: OpObserver>(
doc: &'a Automerge,
observer: &mut O,
actions: I,
obj: &ObjId,
ops: I,
) {
let exid = doc.id_to_exid(obj.0);
for action in actions {
if let Some(obj) = action.made_object() {
visible_objs.insert(obj);
}
action.notify_observer(doc, &exid, obj, typ, observer);
}
}
trait Action {
/// Notify an observer of whatever this action does
fn notify_observer<O: OpObserver>(
self,
doc: &crate::Automerge,
exid: &crate::ObjId,
obj: &ObjId,
typ: ObjType,
observer: &mut O,
);
/// If this action created an object, return the ID of that object
fn made_object(&self) -> Option<ObjId>;
}
fn key_actions<'a, I: Iterator<Item = &'a Op>>(
key: Key,
key_ops: I,
) -> impl Iterator<Item = SimpleAction<'a>> {
#[derive(Clone)]
enum CurrentOp<'a> {
Put {
value: Value<'a>,
id: OpId,
conflicted: bool,
},
Insert(Value<'a>, OpId),
}
let current_ops = key_ops
.filter(|o| o.visible())
.filter_map(|o| match o.action {
OpType::Make(obj_type) => {
let value = Value::Object(obj_type);
if o.insert {
Some(CurrentOp::Insert(value, o.id))
} else {
Some(CurrentOp::Put {
value,
id: o.id,
conflicted: false,
})
}
}
OpType::Put(ref value) => {
let value = Value::Scalar(Cow::Borrowed(value));
if o.insert {
Some(CurrentOp::Insert(value, o.id))
} else {
Some(CurrentOp::Put {
value,
id: o.id,
conflicted: false,
})
}
}
_ => None,
});
current_ops
.coalesce(|previous, current| match (previous, current) {
(CurrentOp::Put { .. }, CurrentOp::Put { value, id, .. }) => Ok(CurrentOp::Put {
value,
id,
conflicted: true,
}),
(previous, current) => Err((previous, current)),
})
.map(move |op| match op {
CurrentOp::Put {
value,
id,
conflicted,
} => SimpleAction::Put {
prop: key,
tagged_value: (value, id),
conflict: conflicted,
},
CurrentOp::Insert(val, id) => SimpleAction::Insert {
elem_id: ElemId(id),
tagged_value: (val, id),
},
})
}
/// Either a "put" or "insert" action. i.e. not splicing for text values
enum SimpleAction<'a> {
Put {
prop: Key,
tagged_value: (Value<'a>, OpId),
conflict: bool,
},
Insert {
elem_id: ElemId,
tagged_value: (Value<'a>, OpId),
},
}
impl<'a> Action for SimpleAction<'a> {
fn notify_observer<O: OpObserver>(
self,
doc: &crate::Automerge,
exid: &crate::ObjId,
obj: &ObjId,
typ: ObjType,
observer: &mut O,
) {
let encoding = match typ {
ObjType::Text => ListEncoding::Text(doc.text_encoding()),
_ => ListEncoding::List,
};
match self {
Self::Put {
prop,
tagged_value,
conflict,
} => {
let tagged_value = (tagged_value.0, doc.id_to_exid(tagged_value.1));
let prop = doc.ops().export_key(*obj, prop, encoding).unwrap();
observer.put(doc, exid.clone(), prop, tagged_value, conflict);
}
Self::Insert {
elem_id,
tagged_value: (value, opid),
} => {
let index = doc
.ops()
.search(obj, crate::query::ElemIdPos::new(elem_id, encoding))
.index()
.unwrap();
let tagged_value = (value, doc.id_to_exid(opid));
observer.insert(doc, doc.id_to_exid(obj.0), index, tagged_value);
}
}
}
fn made_object(&self) -> Option<ObjId> {
match self {
Self::Put {
tagged_value: (Value::Object(_), id),
..
} => Some((*id).into()),
Self::Insert {
tagged_value: (Value::Object(_), id),
..
} => Some((*id).into()),
_ => None,
}
}
}
/// An `Action` which splices for text values
enum TextAction<'a> {
Action(SimpleAction<'a>),
Splice { start: ElemId, chars: String },
}
impl<'a> Action for TextAction<'a> {
fn notify_observer<O: OpObserver>(
self,
doc: &crate::Automerge,
exid: &crate::ObjId,
obj: &ObjId,
typ: ObjType,
observer: &mut O,
) {
match self {
Self::Action(action) => action.notify_observer(doc, exid, obj, typ, observer),
Self::Splice { start, chars } => {
let index = doc
.ops()
.search(
obj,
crate::query::ElemIdPos::new(
start,
ListEncoding::Text(doc.text_encoding()),
),
)
.index()
.unwrap();
observer.splice_text(doc, doc.id_to_exid(obj.0), index, chars.as_str());
}
}
}
fn made_object(&self) -> Option<ObjId> {
match self {
Self::Action(action) => action.made_object(),
_ => None,
}
}
}
fn list_actions<'a, I: Iterator<Item = SimpleAction<'a>>>(
actions: I,
) -> impl Iterator<Item = SimpleAction<'a>> {
actions.map(|a| match a {
SimpleAction::Put {
prop: Key::Seq(elem_id),
tagged_value,
..
} => SimpleAction::Insert {
elem_id,
tagged_value,
},
a => a,
})
}
/// Condense consecutive `SimpleAction::Insert` actions into one `TextAction::Splice`
fn text_actions<'a, I>(actions: I) -> impl Iterator<Item = TextAction<'a>>
where
I: Iterator<Item = SimpleAction<'a>>,
{
TextActions {
ops: actions.peekable(),
}
}
struct TextActions<'a, I: Iterator<Item = SimpleAction<'a>>> {
ops: Peekable<I>,
}
impl<'a, I: Iterator<Item = SimpleAction<'a>>> Iterator for TextActions<'a, I> {
type Item = TextAction<'a>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(SimpleAction::Insert { .. }) = self.ops.peek() {
let (start, value) = match self.ops.next() {
Some(SimpleAction::Insert {
tagged_value: (value, opid),
..
}) => (opid, value),
_ => unreachable!(),
};
let mut chars = match value {
Value::Scalar(Cow::Borrowed(ScalarValue::Str(s))) => s.to_string(),
_ => "\u{fffc}".to_string(),
};
while let Some(SimpleAction::Insert { .. }) = self.ops.peek() {
if let Some(SimpleAction::Insert {
tagged_value: (value, _),
..
}) = self.ops.next()
{
match value {
Value::Scalar(Cow::Borrowed(ScalarValue::Str(s))) => chars.push_str(s),
_ => chars.push('\u{fffc}'),
let ops_by_key = ops.group_by(|o| o.elemid_or_key());
let encoding = ListEncoding::Text(doc.text_encoding());
let state = TextState::default();
let state = ops_by_key
.into_iter()
.fold(state, |mut state, (_key, key_ops)| {
if let Some(o) = key_ops.filter(|o| o.visible_or_mark()).last() {
match &o.action {
OpType::Make(_) | OpType::Put(_) => {
state.text.push_str(o.to_str());
state.len += o.width(encoding);
}
OpType::MarkBegin(_, data) => {
if let Some(mark) = state.marks.mark_begin(o.id, state.len, data, doc) {
state.finished.push(mark);
}
}
OpType::MarkEnd(_) => {
if let Some(mark) = state.marks.mark_end(o.id, state.len, doc) {
state.finished.push(mark);
}
}
OpType::Increment(_) | OpType::Delete => {}
}
}
Some(TextAction::Splice {
start: ElemId(start),
chars,
state
});
observer.splice_text(doc, exid.clone(), 0, state.text.as_str());
observer.mark(doc, exid, state.finished.into_iter());
}
fn observe_list<'a, I: Iterator<Item = &'a Op>, O: OpObserver>(
doc: &'a Automerge,
observer: &mut O,
obj: &ObjId,
ops: I,
) {
let exid = doc.id_to_exid(obj.0);
let mut marks = MarkStateMachine::default();
let ops_by_key = ops.group_by(|o| o.elemid_or_key());
let mut len = 0;
let mut finished = Vec::new();
ops_by_key
.into_iter()
.filter_map(|(_key, key_ops)| {
key_ops
.filter(|o| o.visible_or_mark())
.filter_map(|o| match &o.action {
OpType::Make(obj_type) => Some((Value::Object(*obj_type), o.id)),
OpType::Put(value) => Some((Value::Scalar(Cow::Borrowed(value)), o.id)),
OpType::MarkBegin(_, data) => {
if let Some(mark) = marks.mark_begin(o.id, len, data, doc) {
// side effect
finished.push(mark)
}
None
}
OpType::MarkEnd(_) => {
if let Some(mark) = marks.mark_end(o.id, len, doc) {
// side effect
finished.push(mark)
}
None
}
_ => None,
})
.enumerate()
.last()
.map(|value| {
let pos = len;
len += 1; // increment - side effect
(pos, value)
})
})
.for_each(|(index, (val_enum, (value, opid)))| {
let tagged_value = (value, doc.id_to_exid(opid));
let conflict = val_enum > 0;
observer.insert(doc, exid.clone(), index, tagged_value, conflict);
});
observer.mark(doc, exid, finished.into_iter());
}
fn observe_map_key<'a, I: Iterator<Item = &'a Op>>(
(key, key_ops): (Key, I),
) -> Option<(usize, Put<'a>)> {
key_ops
.filter(|o| o.visible())
.filter_map(|o| match &o.action {
OpType::Make(obj_type) => {
let value = Value::Object(*obj_type);
Some(Put {
value,
key,
id: o.id,
})
}
OpType::Put(value) => {
let value = Value::Scalar(Cow::Borrowed(value));
Some(Put {
value,
key,
id: o.id,
})
}
_ => None,
})
.enumerate()
.last()
}
fn observe_map<'a, I: Iterator<Item = &'a Op>, O: OpObserver>(
doc: &'a Automerge,
observer: &mut O,
obj: &ObjId,
ops: I,
) {
let exid = doc.id_to_exid(obj.0);
let ops_by_key = ops.group_by(|o| o.key);
ops_by_key
.into_iter()
.filter_map(observe_map_key)
.filter_map(|(i, put)| {
let tagged_value = (put.value, doc.id_to_exid(put.id));
let prop = doc
.ops()
.m
.props
.safe_get(put.key.prop_index()?)
.map(|s| Prop::Map(s.to_string()))?;
let conflict = i > 0;
Some((tagged_value, prop, conflict))
})
.for_each(|(tagged_value, prop, conflict)| {
observer.put(doc, exid.clone(), prop, tagged_value, conflict);
});
}
/*
pub(crate) fn observe_diff<O: OpObserver>(
doc: &Automerge,
begin: &Clock,
end: &Clock,
observer: &mut O,
) {
for (obj, typ, ops) in doc.ops().iter_objs() {
let ops_by_key = ops.group_by(|o| o.key);
let diffs = ops_by_key
.into_iter()
.filter_map(|(_key, key_ops)| {
key_ops.fold(None, |state, op| {
match (created(&op.id, begin, end), deleted(op, begin, end)) {
(Era::Before, Era::During) => Some(Diff::del(op)),
(Era::Before, Era::After) => Some(Diff::visible(op)),
(Era::During, Era::After) => state.merge(Some(Diff::add(op))),
_ => state,
}
})
})
.filter(|action| action.valid_at(begin, end));
if typ == ObjType::Text && !observer.text_as_seq() {
observe_text_diff(doc, observer, obj, diffs)
} else if typ.is_sequence() {
observe_list_diff(doc, observer, obj, diffs);
} else {
self.ops.next().map(TextAction::Action)
observe_map_diff(doc, observer, obj, diffs);
}
}
}
fn observe_text_diff<'a, I: Iterator<Item = Diff<'a>>, O: OpObserver>(
doc: &Automerge,
observer: &mut O,
obj: &ObjId,
diffs: I,
) {
let exid = doc.id_to_exid(obj.0);
let encoding = ListEncoding::Text(doc.text_encoding());
diffs
.fold(0, |index, action| match action {
Diff::Visible(op) => index + op.width(encoding),
Diff::Add(op, conflict) => {
observer.splice_text(doc, exid.clone(), index, op.to_str());
index + op.width(encoding)
}
Diff::Delete(_) => { observer.delete_seq(doc, exid.clone(), index, 1); index },
});
}
fn observe_list_diff<'a, I: Iterator<Item = Diff<'a>>, O: OpObserver>(
doc: &Automerge,
observer: &mut O,
obj: &ObjId,
diffs: I,
) {
let exid = doc.id_to_exid(obj.0);
diffs
.fold(0, |index, action| match action {
Diff::Visible(_) => index + 1,
Diff::Add(op, conflict) => {
observer.insert(doc, exid.clone(), index, doc.tagged_value(&op), conflict);
index + 1
}
Diff::Delete(_) => { observer.delete_seq(doc, exid.clone(), index, 1); index },
});
}
fn observe_map_diff<'a, I: Iterator<Item = Diff<'a>>, O: OpObserver>(
doc: &Automerge,
observer: &mut O,
obj: &ObjId,
diffs: I,
) {
let exid = doc.id_to_exid(obj.0);
diffs
.filter_map(|action| Some((get_prop(doc, action.op())?, action)))
.for_each(|(prop, action)| match action {
Diff::Add(op, conflict) => {
observer.put(doc, exid.clone(), prop.into(), doc.tagged_value(&op), conflict)
}
Diff::Delete(_) => observer.delete_map(doc, exid.clone(), prop),
_ => {}
});
}
#[derive(PartialEq, PartialOrd, Ord, Eq)]
enum Era {
Before = 1,
During,
After,
}
fn created(id: &OpId, start: &Clock, end: &Clock) -> Era {
if start.covers(&id) {
Era::Before
} else if end.covers(&id) {
Era::During
} else {
Era::After
}
}
fn deleted(op: &Op, start: &Clock, end: &Clock) -> Era {
if op.is_counter() {
Era::After
} else {
op.succ.iter().fold(Era::After, |state, id| {
std::cmp::min(state, created(id, start, end))
})
}
}
fn get_prop<'a>(doc: &'a Automerge, op: &Op) -> Option<&'a str> {
Some(
doc.ops()
.m
.props
.safe_get(op.key.prop_index()?)?
//.map(|s| Prop::Map(s.to_string()))?,
)
}
enum Diff<'a> {
Add(Cow<'a, Op>, bool),
Delete(Cow<'a, Op>),
Visible(Cow<'a, Op>),
}
impl<'a> Diff<'a> {
fn valid_at(&self, begin: &Clock, end: &Clock) -> bool {
// counters have special rules for visability
// we cannot know if a succ is an increment or a delete until we've looked at all of them
match self {
Diff::Add(
Cow::Owned(Op {
action: OpType::Put(ScalarValue::Counter(c)),
succ,
..
}),
_,
) => {
c.increments
== succ
.iter()
.filter(|i| created(i, begin, end) == Era::During)
.count()
}
_ => true,
}
}
fn op(&self) -> &Op {
match self {
Diff::Add(a, _) => a,
Diff::Delete(a) => a,
Diff::Visible(a) => a,
}
}
fn add(op: &'a Op) -> Self {
if let OpType::Put(ScalarValue::Counter(c)) = &op.action {
let mut op = op.clone();
op.action = OpType::Put(ScalarValue::Counter(c.start.into()));
Diff::Add(Cow::Owned(op), false)
} else {
Diff::Add(Cow::Borrowed(op), false)
}
}
fn del(op: &'a Op) -> Self {
Diff::Delete(Cow::Borrowed(op))
}
fn visible(op: &'a Op) -> Self {
Diff::Visible(Cow::Borrowed(op))
}
}
trait Mergable {
fn merge(self, other: Self) -> Self;
}
impl<'a> Mergable for Diff<'a> {
fn merge(self, other: Self) -> Self {
match (self, other) {
(Diff::Add(a, c1), Diff::Add(b, c2)) => Diff::Add(a.merge(b), c1 || c2),
(Diff::Visible(_), Diff::Add(b, _)) => Diff::Add(b, true),
(Diff::Visible(a), Diff::Delete(_)) => Diff::Visible(a),
(Diff::Visible(_), Diff::Visible(b)) => Diff::Visible(b),
(Diff::Delete(_), Diff::Visible(a)) => Diff::Visible(a),
(Diff::Add(a, c), Diff::Delete(_)) => Diff::Add(a, c),
(_self, other) => other,
}
}
}
impl<M: Mergable> Mergable for Option<M> {
fn merge(self, other: Self) -> Self {
match (self, other) {
(Some(a), Some(b)) => Some(a.merge(b)),
(None, Some(b)) => Some(b),
(Some(a), None) => Some(a),
(None, None) => None,
}
}
}
impl<'a> Mergable for Cow<'a, Op> {
fn merge(mut self, other: Self) -> Self {
match (&self.action, &other.action) {
(OpType::Put(ScalarValue::Counter(c)), OpType::Increment(n)) => {
let mut counter = c.clone();
counter.increment(*n);
self.to_mut().action = OpType::Put(ScalarValue::Counter(counter));
self
}
(OpType::Increment(n), OpType::Increment(m)) => {
self.to_mut().action = OpType::Increment(m + n);
self
}
_ => other,
}
}
}
*/
#[cfg(test)]
mod tests {
use std::{borrow::Cow, fs};
use crate::{transaction::Transactable, Automerge, ObjType, OpObserver, Prop, ReadDoc, Value};
use crate::{
marks::Mark, transaction::Transactable, Automerge, ObjType, OpObserver, Prop, ReadDoc,
Value,
};
//use crate::{transaction::Transactable, Automerge, ObjType, OpObserver, Prop, ReadDoc, Value};
// Observer ops often carry a "tagged value", which is a value and the OpID of the op which
// created that value. For a lot of values (i.e. any scalar value) we don't care about the
@ -490,6 +583,7 @@ mod tests {
objid: crate::ObjId,
index: usize,
tagged_value: (crate::Value<'_>, crate::ObjId),
_conflict: bool,
) {
self.ops.push(ObserverCall::Insert {
obj: objid,
@ -566,6 +660,24 @@ mod tests {
fn text_as_seq(&self) -> bool {
self.text_as_seq
}
fn mark<'a, R: ReadDoc, M: Iterator<Item = Mark<'a>>>(
&mut self,
_doc: &R,
_objid: crate::ObjId,
_mark: M,
) {
}
fn unmark<R: ReadDoc>(
&mut self,
_doc: &R,
_objid: crate::ObjId,
_name: &str,
_start: usize,
_end: usize,
) {
}
}
#[test]

File diff suppressed because it is too large Load diff

View file

@ -7,6 +7,9 @@ use crate::transaction::Transactable;
use crate::*;
use std::convert::TryInto;
use crate::op_observer::HasPatches;
use test_log::test;
#[test]
fn insert_op() -> Result<(), AutomergeError> {
let mut doc = Automerge::new();
@ -153,9 +156,9 @@ fn test_save_text() -> Result<(), AutomergeError> {
let heads3 = doc.get_heads();
assert!(&doc.text(&text)? == "hello big bad world");
assert!(&doc.text_at(&text, &heads1)?.is_empty());
assert!(&doc.text_at(&text, &heads2)? == "hello world");
assert!(&doc.text_at(&text, &heads3)? == "hello big bad world");
assert!(&doc.at(&heads1).text(&text)?.is_empty());
assert!(&doc.at(&heads2).text(&text)? == "hello world");
assert!(&doc.at(&heads3).text(&text)? == "hello big bad world");
Ok(())
}
@ -189,46 +192,48 @@ fn test_props_vals_at() -> Result<(), AutomergeError> {
tx.commit();
doc.get_heads();
let heads5 = doc.get_heads();
assert!(doc.keys_at(ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]);
assert_eq!(doc.length_at(ROOT, &heads1), 1);
assert!(doc.get_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1"));
assert!(doc.get_at(ROOT, "prop2", &heads1)?.is_none());
assert!(doc.get_at(ROOT, "prop3", &heads1)?.is_none());
let at_h1 = doc.at(&heads1);
assert!(at_h1.keys(ROOT).collect_vec() == vec!["prop1".to_owned()]);
assert_eq!(at_h1.length(ROOT), 1);
assert!(at_h1.get(ROOT, "prop1")?.unwrap().0 == Value::str("val1"));
assert!(at_h1.get(ROOT, "prop2")?.is_none());
assert!(at_h1.get(ROOT, "prop3")?.is_none());
assert!(doc.keys_at(ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]);
assert_eq!(doc.length_at(ROOT, &heads2), 1);
assert!(doc.get_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2"));
assert!(doc.get_at(ROOT, "prop2", &heads2)?.is_none());
assert!(doc.get_at(ROOT, "prop3", &heads2)?.is_none());
let at_h2 = doc.at(&heads2);
assert!(at_h2.keys(ROOT).collect_vec() == vec!["prop1".to_owned()]);
assert_eq!(at_h2.length(ROOT), 1);
assert!(at_h2.get(ROOT, "prop1")?.unwrap().0 == Value::str("val2"));
assert!(at_h2.get(ROOT, "prop2")?.is_none());
assert!(at_h2.get(ROOT, "prop3")?.is_none());
assert!(
doc.keys_at(ROOT, &heads3).collect_vec() == vec!["prop1".to_owned(), "prop2".to_owned()]
);
assert_eq!(doc.length_at(ROOT, &heads3), 2);
assert!(doc.get_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2"));
assert!(doc.get_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3"));
assert!(doc.get_at(ROOT, "prop3", &heads3)?.is_none());
let at_h3 = doc.at(&heads3);
assert!(at_h3.keys(ROOT).collect_vec() == vec!["prop1".to_owned(), "prop2".to_owned()]);
assert_eq!(at_h3.length(ROOT), 2);
assert!(at_h3.get(ROOT, "prop1")?.unwrap().0 == Value::str("val2"));
assert!(at_h3.get(ROOT, "prop2")?.unwrap().0 == Value::str("val3"));
assert!(at_h3.get(ROOT, "prop3")?.is_none());
assert!(doc.keys_at(ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]);
assert_eq!(doc.length_at(ROOT, &heads4), 1);
assert!(doc.get_at(ROOT, "prop1", &heads4)?.is_none());
assert!(doc.get_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3"));
assert!(doc.get_at(ROOT, "prop3", &heads4)?.is_none());
let at_h4 = doc.at(&heads4);
assert!(at_h4.keys(ROOT).collect_vec() == vec!["prop2".to_owned()]);
assert_eq!(at_h4.length(ROOT), 1);
assert!(at_h4.get(ROOT, "prop1")?.is_none());
assert!(at_h4.get(ROOT, "prop2")?.unwrap().0 == Value::str("val3"));
assert!(at_h4.get(ROOT, "prop3")?.is_none());
assert!(
doc.keys_at(ROOT, &heads5).collect_vec() == vec!["prop2".to_owned(), "prop3".to_owned()]
);
assert_eq!(doc.length_at(ROOT, &heads5), 2);
let at_h5 = doc.at(&heads5);
assert!(at_h5.keys(ROOT).collect_vec() == vec!["prop2".to_owned(), "prop3".to_owned()]);
assert_eq!(at_h5.length(ROOT), 2);
assert_eq!(doc.length(ROOT), 2);
assert!(doc.get_at(ROOT, "prop1", &heads5)?.is_none());
assert!(doc.get_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3"));
assert!(doc.get_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4"));
assert!(at_h5.get(ROOT, "prop1")?.is_none());
assert!(at_h5.get(ROOT, "prop2")?.unwrap().0 == Value::str("val3"));
assert!(at_h5.get(ROOT, "prop3")?.unwrap().0 == Value::str("val4"));
assert_eq!(doc.keys_at(ROOT, &[]).count(), 0);
assert_eq!(doc.length_at(ROOT, &[]), 0);
assert!(doc.get_at(ROOT, "prop1", &[])?.is_none());
assert!(doc.get_at(ROOT, "prop2", &[])?.is_none());
assert!(doc.get_at(ROOT, "prop3", &[])?.is_none());
let at_h0 = doc.at(&[]);
assert_eq!(at_h0.keys(ROOT).count(), 0);
assert_eq!(at_h0.length(ROOT), 0);
assert!(at_h0.get(ROOT, "prop1")?.is_none());
assert!(at_h0.get(ROOT, "prop2")?.is_none());
assert!(at_h0.get(ROOT, "prop3")?.is_none());
Ok(())
}
@ -269,28 +274,28 @@ fn test_len_at() -> Result<(), AutomergeError> {
tx.commit();
let heads6 = doc.get_heads();
assert!(doc.length_at(&list, &heads1) == 0);
assert!(doc.get_at(&list, 0, &heads1)?.is_none());
assert!(doc.at(&heads1).length(&list) == 0);
assert!(doc.at(&heads1).get(&list, 0)?.is_none());
assert!(doc.length_at(&list, &heads2) == 1);
assert!(doc.get_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10));
assert!(doc.at(&heads2).length(&list) == 1);
assert!(doc.at(&heads2).get(&list, 0)?.unwrap().0 == Value::int(10));
assert!(doc.length_at(&list, &heads3) == 2);
assert!(doc.get_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30));
assert!(doc.get_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20));
assert!(doc.at(&heads3).length(&list) == 2);
assert!(doc.at(&heads3).get(&list, 0)?.unwrap().0 == Value::int(30));
assert!(doc.at(&heads3).get(&list, 1)?.unwrap().0 == Value::int(20));
assert!(doc.length_at(&list, &heads4) == 3);
assert!(doc.get_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30));
assert!(doc.get_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50));
assert!(doc.get_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40));
assert!(doc.at(&heads4).length(&list) == 3);
assert!(doc.at(&heads4).get(&list, 0)?.unwrap().0 == Value::int(30));
assert!(doc.at(&heads4).get(&list, 1)?.unwrap().0 == Value::int(50));
assert!(doc.at(&heads4).get(&list, 2)?.unwrap().0 == Value::int(40));
assert!(doc.length_at(&list, &heads5) == 2);
assert!(doc.get_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30));
assert!(doc.get_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50));
assert!(doc.at(&heads5).length(&list) == 2);
assert!(doc.at(&heads5).get(&list, 0)?.unwrap().0 == Value::int(30));
assert!(doc.at(&heads5).get(&list, 1)?.unwrap().0 == Value::int(50));
assert!(doc.length_at(&list, &heads6) == 1);
assert!(doc.at(&heads6).length(&list) == 1);
assert!(doc.length(&list) == 1);
assert!(doc.get_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50));
assert!(doc.at(&heads6).get(&list, 0)?.unwrap().0 == Value::int(50));
Ok(())
}
@ -645,7 +650,7 @@ fn map_range_at_back_and_forth_single() {
let heads = doc.get_heads();
let mut range_all = doc.map_range_at(ROOT, .., &heads);
let mut range_all = doc.at(&heads).map_range(ROOT, ..);
assert_eq!(
range_all.next(),
Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0)))
@ -661,7 +666,7 @@ fn map_range_at_back_and_forth_single() {
assert_eq!(range_all.next_back(), None);
assert_eq!(range_all.next(), None);
let mut range_all = doc.map_range_at(ROOT, .., &heads);
let mut range_all = doc.at(&heads).map_range(ROOT, ..);
assert_eq!(
range_all.next(),
Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0)))
@ -677,7 +682,7 @@ fn map_range_at_back_and_forth_single() {
assert_eq!(range_all.next_back(), None);
assert_eq!(range_all.next(), None);
let mut range_all = doc.map_range_at(ROOT, .., &heads);
let mut range_all = doc.at(&heads).map_range(ROOT, ..);
assert_eq!(
range_all.next(),
Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0)))
@ -693,7 +698,7 @@ fn map_range_at_back_and_forth_single() {
assert_eq!(range_all.next_back(), None);
assert_eq!(range_all.next(), None);
let mut range_all = doc.map_range_at(ROOT, .., &heads);
let mut range_all = doc.at(&heads).map_range(ROOT, ..);
assert_eq!(
range_all.next_back(),
Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0)))
@ -730,7 +735,7 @@ fn map_range_at_back_and_forth_double() {
doc1.merge(&mut doc2).unwrap();
let heads = doc1.get_heads();
let mut range_all = doc1.map_range_at(ROOT, .., &heads);
let mut range_all = doc1.at(&heads).map_range(ROOT, ..);
assert_eq!(
range_all.next(),
Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1)))
@ -746,7 +751,7 @@ fn map_range_at_back_and_forth_double() {
assert_eq!(range_all.next_back(), None);
assert_eq!(range_all.next(), None);
let mut range_all = doc1.map_range_at(ROOT, .., &heads);
let mut range_all = doc1.at(&heads).map_range(ROOT, ..);
assert_eq!(
range_all.next(),
Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1)))
@ -762,7 +767,7 @@ fn map_range_at_back_and_forth_double() {
assert_eq!(range_all.next_back(), None);
assert_eq!(range_all.next(), None);
let mut range_all = doc1.map_range_at(ROOT, .., &heads);
let mut range_all = doc1.at(&heads).map_range(ROOT, ..);
assert_eq!(
range_all.next(),
Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1)))
@ -778,7 +783,7 @@ fn map_range_at_back_and_forth_double() {
assert_eq!(range_all.next_back(), None);
assert_eq!(range_all.next(), None);
let mut range_all = doc1.map_range_at(ROOT, .., &heads);
let mut range_all = doc1.at(&heads).map_range(ROOT, ..);
assert_eq!(
range_all.next_back(),
Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1)))
@ -854,18 +859,18 @@ fn get_list_values() -> Result<(), AutomergeError> {
assert_eq!(Some((val1, id)), val2);
}
assert_eq!(doc1.list_range_at(&list, .., &v1).count(), 8);
for (i, val1, id) in doc1.list_range_at(&list, .., &v1) {
let val2 = doc1.get_at(&list, i, &v1)?;
assert_eq!(doc1.at(&v1).list_range(&list, ..).count(), 8);
for (i, val1, id) in doc1.at(&v1).list_range(&list, ..) {
let val2 = doc1.at(&v1).get(&list, i)?;
assert_eq!(Some((val1, id)), val2);
}
assert_eq!(doc1.list_range_at(&list, 3..6, &v1).count(), 3);
assert_eq!(doc1.list_range_at(&list, 3..6, &v1).next().unwrap().0, 3);
assert_eq!(doc1.list_range_at(&list, 3..6, &v1).last().unwrap().0, 5);
assert_eq!(doc1.at(&v1).list_range(&list, 3..6).count(), 3);
assert_eq!(doc1.at(&v1).list_range(&list, 3..6).next().unwrap().0, 3);
assert_eq!(doc1.at(&v1).list_range(&list, 3..6).last().unwrap().0, 5);
for (i, val1, id) in doc1.list_range_at(&list, 3..6, &v1) {
let val2 = doc1.get_at(&list, i, &v1)?;
for (i, val1, id) in doc1.list_range(&list, 3..6) {
let val2 = doc1.at(&v1).get(&list, i)?;
assert_eq!(Some((val1, id)), val2);
}
@ -878,10 +883,11 @@ fn get_list_values() -> Result<(), AutomergeError> {
assert_eq!(range, values);
let range: Vec<_> = doc1
.list_range_at(&list, .., &v1)
.at(&v1)
.list_range(&list, ..)
.map(|(_, val, id)| (val, id))
.collect();
let values: Vec<_> = doc1.values_at(&list, &v1).collect();
let values: Vec<_> = doc1.at(&v1).values(&list).collect();
assert_eq!(range, values);
Ok(())
@ -926,17 +932,17 @@ fn get_range_values() -> Result<(), AutomergeError> {
assert_eq!(Some((val1, id)), val2);
}
assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).count(), 2);
assert_eq!(doc1.at(&v1).map_range(ROOT, range.clone()).count(), 2);
for (key, val1, id) in doc1.map_range_at(ROOT, range.clone(), &v1) {
let val2 = doc1.get_at(ROOT, key, &v1)?;
for (key, val1, id) in doc1.at(&v1).map_range(ROOT, range.clone()) {
let val2 = doc1.get(ROOT, key)?;
assert_eq!(Some((val1, id)), val2);
}
assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).rev().count(), 2);
assert_eq!(doc1.at(&v1).map_range(ROOT, range.clone()).rev().count(), 2);
for (key, val1, id) in doc1.map_range_at(ROOT, range, &v1).rev() {
let val2 = doc1.get_at(ROOT, key, &v1)?;
for (key, val1, id) in doc1.at(&v1).map_range(ROOT, range).rev() {
let val2 = doc1.get(ROOT, key)?;
assert_eq!(Some((val1, id)), val2);
}
@ -948,10 +954,11 @@ fn get_range_values() -> Result<(), AutomergeError> {
assert_eq!(range, values);
let range: Vec<_> = doc1
.map_range_at(ROOT, .., &v1)
.at(&v1)
.map_range(ROOT, ..)
.map(|(_, val, id)| (val, id))
.collect();
let values: Vec<_> = doc1.values_at(ROOT, &v1).collect();
let values: Vec<_> = doc1.at(&v1).values(ROOT).collect();
assert_eq!(range, values);
Ok(())
@ -1479,15 +1486,18 @@ fn observe_counter_change_application_overwrite() {
assert_eq!(
doc3.observer().take_patches(),
vec![Patch::Put {
vec![Patch {
obj: ExId::Root,
path: vec![],
prop: Prop::Map("counter".into()),
value: (
ScalarValue::Str("mystring".into()).into(),
ExId::Id(2, doc2.get_actor().clone(), 1)
),
conflict: false
action: PatchAction::PutMap {
key: "counter".into(),
value: (
ScalarValue::Str("mystring".into()).into(),
ExId::Id(2, doc2.get_actor().clone(), 1)
),
conflict: false,
expose: false
}
}]
);
@ -1514,29 +1524,29 @@ fn observe_counter_change_application() {
new_doc.observer().take_patches();
new_doc.apply_changes(changes).unwrap();
assert_eq!(
new_doc.observer().take_patches(),
new_doc
.observer()
.take_patches()
.into_iter()
.map(|p| p.action)
.collect::<Vec<_>>(),
vec![
Patch::Put {
obj: ExId::Root,
path: vec![],
prop: Prop::Map("counter".into()),
PatchAction::PutMap {
key: "counter".into(),
value: (
ScalarValue::counter(1).into(),
ExId::Id(1, doc.get_actor().clone(), 0)
),
conflict: false
conflict: false,
expose: false,
},
Patch::Increment {
obj: ExId::Root,
path: vec![],
PatchAction::Increment {
prop: Prop::Map("counter".into()),
value: (2, ExId::Id(2, doc.get_actor().clone(), 0)),
value: 2,
},
Patch::Increment {
obj: ExId::Root,
path: vec![],
PatchAction::Increment {
prop: Prop::Map("counter".into()),
value: (5, ExId::Id(3, doc.get_actor().clone(), 0)),
value: 5,
}
]
);

View file

@ -255,6 +255,18 @@ mod convert_expanded {
None => Cow::Owned(ScalarValue::Null),
}
}
fn expand(&self) -> bool {
self.action.expand()
}
fn mark_name(&self) -> Option<Cow<'a, smol_str::SmolStr>> {
if let legacy::OpType::MarkBegin(legacy::MarkData { name, .. }) = &self.action {
Some(Cow::Borrowed(name))
} else {
None
}
}
}
impl<'a> convert::OpId<&'a ActorId> for &'a legacy::OpId {
@ -278,7 +290,12 @@ impl From<&Change> for crate::ExpandedChange {
let operations = c
.iter_ops()
.map(|o| crate::legacy::Op {
action: crate::types::OpType::from_action_and_value(o.action, o.val),
action: crate::legacy::OpType::from_parts(crate::legacy::OpTypeParts {
action: o.action,
value: o.val,
expand: o.expand,
mark_name: o.mark_name,
}),
insert: o.insert,
key: match o.key {
StoredKey::Elem(e) if e.is_head() => {

View file

@ -3,7 +3,7 @@ pub(crate) use rle::RleRange;
mod delta;
pub(crate) use delta::DeltaRange;
mod boolean;
pub(crate) use boolean::BooleanRange;
pub(crate) use boolean::{BooleanRange, MaybeBooleanRange};
mod raw;
pub(crate) use raw::RawRange;
mod opid;

View file

@ -1,6 +1,8 @@
use std::{borrow::Cow, ops::Range};
use crate::columnar::encoding::{BooleanDecoder, BooleanEncoder};
use crate::columnar::encoding::{
BooleanDecoder, BooleanEncoder, MaybeBooleanDecoder, MaybeBooleanEncoder,
};
#[derive(Clone, Debug, PartialEq)]
pub(crate) struct BooleanRange(Range<usize>);
@ -38,3 +40,44 @@ impl From<BooleanRange> for Range<usize> {
r.0
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct MaybeBooleanRange(Range<usize>);
impl MaybeBooleanRange {
pub(crate) fn decoder<'a>(&self, data: &'a [u8]) -> MaybeBooleanDecoder<'a> {
MaybeBooleanDecoder::from(Cow::Borrowed(&data[self.0.clone()]))
}
pub(crate) fn encode<I: Iterator<Item = bool>>(items: I, out: &mut Vec<u8>) -> Self {
let start = out.len();
let mut encoder = MaybeBooleanEncoder::from_sink(out);
for i in items {
encoder.append(i);
}
let (_, len) = encoder.finish();
(start..(start + len)).into()
}
pub(crate) fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl AsRef<Range<usize>> for MaybeBooleanRange {
fn as_ref(&self) -> &Range<usize> {
&self.0
}
}
impl From<Range<usize>> for MaybeBooleanRange {
fn from(r: Range<usize>) -> MaybeBooleanRange {
MaybeBooleanRange(r)
}
}
impl From<MaybeBooleanRange> for Range<usize> {
fn from(r: MaybeBooleanRange) -> Range<usize> {
r.0
}
}

View file

@ -4,7 +4,9 @@ pub(crate) use raw::{RawDecoder, RawEncoder};
mod rle;
pub(crate) use rle::{RleDecoder, RleEncoder};
mod boolean;
pub(crate) use boolean::{BooleanDecoder, BooleanEncoder};
pub(crate) use boolean::{
BooleanDecoder, BooleanEncoder, MaybeBooleanDecoder, MaybeBooleanEncoder,
};
mod delta;
pub(crate) use delta::{DeltaDecoder, DeltaEncoder};
pub(crate) mod leb128;

View file

@ -100,6 +100,72 @@ impl<'a> Iterator for BooleanDecoder<'a> {
}
}
/// Like a `BooleanEncoder` but if all the values in the column are `false` then will return an
/// empty range rather than a range with `count` false values.
pub(crate) struct MaybeBooleanEncoder<S> {
encoder: BooleanEncoder<S>,
all_false: bool,
}
impl MaybeBooleanEncoder<Vec<u8>> {
pub(crate) fn new() -> MaybeBooleanEncoder<Vec<u8>> {
MaybeBooleanEncoder::from_sink(Vec::new())
}
}
impl<S: Sink> MaybeBooleanEncoder<S> {
pub(crate) fn from_sink(buf: S) -> MaybeBooleanEncoder<S> {
MaybeBooleanEncoder {
encoder: BooleanEncoder::from_sink(buf),
all_false: true,
}
}
pub(crate) fn append(&mut self, value: bool) {
if value {
self.all_false = false;
}
self.encoder.append(value);
}
pub(crate) fn finish(self) -> (S, usize) {
if self.all_false {
(self.encoder.buf, 0)
} else {
self.encoder.finish()
}
}
}
/// Like a `BooleanDecoder` but if the underlying range is empty then just returns an infinite
/// sequence of `None`
#[derive(Clone, Debug)]
pub(crate) struct MaybeBooleanDecoder<'a>(BooleanDecoder<'a>);
impl<'a> From<Cow<'a, [u8]>> for MaybeBooleanDecoder<'a> {
fn from(bytes: Cow<'a, [u8]>) -> Self {
MaybeBooleanDecoder(BooleanDecoder::from(bytes))
}
}
impl<'a> From<&'a [u8]> for MaybeBooleanDecoder<'a> {
fn from(d: &'a [u8]) -> Self {
Cow::Borrowed(d).into()
}
}
impl<'a> Iterator for MaybeBooleanDecoder<'a> {
type Item = Result<Option<bool>, raw::Error>;
fn next(&mut self) -> Option<Self::Item> {
if self.0.decoder.is_empty() {
None
} else {
self.0.next().transpose().map(Some).transpose()
}
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -59,6 +59,10 @@ impl<'a> RawDecoder<'a> {
pub(crate) fn done(&self) -> bool {
self.offset >= self.data.len()
}
pub(crate) fn is_empty(&self) -> bool {
self.data.is_empty()
}
}
impl<'a> From<&'a [u8]> for RawDecoder<'a> {

View file

@ -54,6 +54,8 @@ pub enum AutomergeError {
NonChangeCompressed,
#[error("id was not an object id")]
NotAnObject,
#[error("an observer is required")]
NoObserver,
}
impl PartialEq for AutomergeError {

View file

@ -5,33 +5,57 @@ use crate::{query, Automerge};
/// This is returned by [`crate::ReadDoc::keys`] and method. The returned item is either
/// the keys of a map, or the encoded element IDs of a sequence.
#[derive(Debug)]
pub struct Keys<'a, 'k> {
keys: Option<query::Keys<'k>>,
pub struct Keys<'a> {
keys: KeyQuery<'a>,
doc: &'a Automerge,
}
impl<'a, 'k> Keys<'a, 'k> {
pub(crate) fn new(doc: &'a Automerge, keys: Option<query::Keys<'k>>) -> Self {
#[derive(Debug)]
enum KeyQuery<'a> {
Keys(query::Keys<'a>),
KeysAt(query::KeysAt<'a>),
None,
}
impl<'a> Keys<'a> {
pub(crate) fn with_keys(self, query: query::Keys<'a>) -> Self {
Self {
keys: KeyQuery::Keys(query),
doc: self.doc,
}
}
pub(crate) fn with_keys_at(self, query: query::KeysAt<'a>) -> Self {
Self {
keys: KeyQuery::KeysAt(query),
doc: self.doc,
}
}
pub(crate) fn new(doc: &'a Automerge) -> Self {
let keys = KeyQuery::None;
Self { keys, doc }
}
}
impl<'a, 'k> Iterator for Keys<'a, 'k> {
impl<'a> Iterator for Keys<'a> {
type Item = String;
fn next(&mut self) -> Option<Self::Item> {
self.keys
.as_mut()?
.next()
.map(|key| self.doc.to_string(key))
match &mut self.keys {
KeyQuery::Keys(keys) => keys.next().map(|key| self.doc.to_string(key)),
KeyQuery::KeysAt(keys_at) => keys_at.next().map(|key| self.doc.to_string(key)),
KeyQuery::None => None,
}
}
}
impl<'a, 'k> DoubleEndedIterator for Keys<'a, 'k> {
impl<'a> DoubleEndedIterator for Keys<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.keys
.as_mut()?
.next_back()
.map(|key| self.doc.to_string(key))
match &mut self.keys {
KeyQuery::Keys(keys) => keys.next_back().map(|key| self.doc.to_string(key)),
KeyQuery::KeysAt(keys_at) => keys_at.next_back().map(|key| self.doc.to_string(key)),
KeyQuery::None => None,
}
}
}

View file

@ -1,37 +0,0 @@
use crate::{query, Automerge};
/// An iterator over the keys of an object at a particular point in history
///
/// This is returned by [`crate::ReadDoc::keys_at`] method. The returned item is either the keys of a map,
/// or the encoded element IDs of a sequence.
#[derive(Debug)]
pub struct KeysAt<'a, 'k> {
keys: Option<query::KeysAt<'k>>,
doc: &'a Automerge,
}
impl<'a, 'k> KeysAt<'a, 'k> {
pub(crate) fn new(doc: &'a Automerge, keys: Option<query::KeysAt<'k>>) -> Self {
Self { keys, doc }
}
}
impl<'a, 'k> Iterator for KeysAt<'a, 'k> {
type Item = String;
fn next(&mut self) -> Option<Self::Item> {
self.keys
.as_mut()?
.next()
.map(|key| self.doc.to_string(key))
}
}
impl<'a, 'k> DoubleEndedIterator for KeysAt<'a, 'k> {
fn next_back(&mut self) -> Option<Self::Item> {
self.keys
.as_mut()?
.next()
.map(|key| self.doc.to_string(key))
}
}

View file

@ -3,7 +3,7 @@ mod utility_impls;
use std::num::NonZeroU64;
pub(crate) use crate::types::{ActorId, ChangeHash, ObjType, OpType, ScalarValue};
pub(crate) use crate::types::{ActorId, ChangeHash, ObjType, ScalarValue};
pub(crate) use crate::value::DataType;
use serde::{Deserialize, Serialize};
@ -204,6 +204,96 @@ where
}
}
pub(crate) struct OpTypeParts {
pub(crate) action: u64,
pub(crate) value: ScalarValue,
pub(crate) expand: bool,
pub(crate) mark_name: Option<smol_str::SmolStr>,
}
// Like `types::OpType` except using a String for mark names
#[derive(PartialEq, Debug, Clone)]
pub enum OpType {
Make(ObjType),
Delete,
Increment(i64),
Put(ScalarValue),
MarkBegin(MarkData),
MarkEnd(bool),
}
impl OpType {
/// Create a new legacy OpType
///
/// This is really only meant to be used to convert from a crate::Change to a
/// crate::legacy::Change, so the arguments should all have been validated. Consequently it
/// does not return an error and instead panics on the following conditions
///
/// # Panics
///
/// * If The action index is unrecognized
/// * If the action index indicates that the value should be numeric but the value is not a
/// number
pub(crate) fn from_parts(
OpTypeParts {
action,
value,
expand,
mark_name,
}: OpTypeParts,
) -> Self {
match action {
0 => Self::Make(ObjType::Map),
1 => Self::Put(value),
2 => Self::Make(ObjType::List),
3 => Self::Delete,
4 => Self::Make(ObjType::Text),
5 => match value {
ScalarValue::Int(i) => Self::Increment(i),
ScalarValue::Uint(i) => Self::Increment(i as i64),
_ => panic!("non numeric value for integer action"),
},
6 => Self::Make(ObjType::Table),
7 => match mark_name {
Some(name) => Self::MarkBegin(MarkData {
name,
value,
expand,
}),
None => Self::MarkEnd(expand),
},
other => panic!("unknown action type {}", other),
}
}
pub(crate) fn action_index(&self) -> u64 {
match self {
Self::Make(ObjType::Map) => 0,
Self::Put(_) => 1,
Self::Make(ObjType::List) => 2,
Self::Delete => 3,
Self::Make(ObjType::Text) => 4,
Self::Increment(_) => 5,
Self::Make(ObjType::Table) => 6,
Self::MarkBegin(_) | Self::MarkEnd(_) => 7,
}
}
pub(crate) fn expand(&self) -> bool {
matches!(
self,
Self::MarkBegin(MarkData { expand: true, .. }) | Self::MarkEnd(true)
)
}
}
#[derive(PartialEq, Debug, Clone)]
pub struct MarkData {
pub name: smol_str::SmolStr,
pub value: ScalarValue,
pub expand: bool,
}
#[derive(PartialEq, Debug, Clone)]
pub struct Op {
pub action: OpType,
@ -217,6 +307,7 @@ impl Op {
pub fn primitive_value(&self) -> Option<ScalarValue> {
match &self.action {
OpType::Put(v) => Some(v.clone()),
OpType::MarkBegin(MarkData { value, .. }) => Some(value.clone()),
OpType::Increment(i) => Some(ScalarValue::Int(*i)),
_ => None,
}

View file

@ -5,7 +5,9 @@ use serde::{
};
use super::read_field;
use crate::legacy::{DataType, Key, ObjType, ObjectId, Op, OpId, OpType, ScalarValue, SortedVec};
use crate::legacy::{
DataType, Key, MarkData, ObjType, ObjectId, Op, OpId, OpType, ScalarValue, SortedVec,
};
impl Serialize for Op {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
@ -50,6 +52,16 @@ impl Serialize for Op {
OpType::Increment(n) => op.serialize_field("value", &n)?,
OpType::Put(ScalarValue::Counter(c)) => op.serialize_field("value", &c.start)?,
OpType::Put(value) => op.serialize_field("value", &value)?,
OpType::MarkBegin(MarkData {
name,
value,
expand,
}) => {
op.serialize_field("name", &name)?;
op.serialize_field("value", &value)?;
op.serialize_field("expand", &expand)?
}
OpType::MarkEnd(expand) => op.serialize_field("expand", &expand)?,
_ => {}
}
op.serialize_field("pred", &self.pred)?;
@ -71,6 +83,8 @@ pub(crate) enum RawOpType {
Del,
Inc,
Set,
MarkBegin,
MarkEnd,
}
impl Serialize for RawOpType {
@ -86,6 +100,8 @@ impl Serialize for RawOpType {
RawOpType::Del => "del",
RawOpType::Inc => "inc",
RawOpType::Set => "set",
RawOpType::MarkBegin => "markBegin",
RawOpType::MarkEnd => "markEnd",
};
serializer.serialize_str(s)
}
@ -104,8 +120,8 @@ impl<'de> Deserialize<'de> for RawOpType {
"del",
"inc",
"set",
"mark",
"unmark",
"markBegin",
"markEnd",
];
// TODO: Probably more efficient to deserialize to a `&str`
let raw_type = String::deserialize(deserializer)?;
@ -117,6 +133,8 @@ impl<'de> Deserialize<'de> for RawOpType {
"del" => Ok(RawOpType::Del),
"inc" => Ok(RawOpType::Inc),
"set" => Ok(RawOpType::Set),
"markBegin" => Ok(RawOpType::MarkBegin),
"markEnd" => Ok(RawOpType::MarkEnd),
other => Err(Error::unknown_variant(other, VARIANTS)),
}
}
@ -189,24 +207,7 @@ impl<'de> Deserialize<'de> for Op {
RawOpType::MakeList => OpType::Make(ObjType::List),
RawOpType::MakeText => OpType::Make(ObjType::Text),
RawOpType::Del => OpType::Delete,
RawOpType::Set => {
let value = if let Some(datatype) = datatype {
let raw_value = value
.ok_or_else(|| Error::missing_field("value"))?
.unwrap_or(ScalarValue::Null);
raw_value.as_datatype(datatype).map_err(|e| {
Error::invalid_value(
Unexpected::Other(e.unexpected.as_str()),
&e.expected.as_str(),
)
})?
} else {
value
.ok_or_else(|| Error::missing_field("value"))?
.unwrap_or(ScalarValue::Null)
};
OpType::Put(value)
}
RawOpType::Set => OpType::Put(unwrap_value(value, datatype)?),
RawOpType::Inc => match value.flatten() {
Some(ScalarValue::Int(n)) => Ok(OpType::Increment(n)),
Some(ScalarValue::Uint(n)) => Ok(OpType::Increment(n as i64)),
@ -230,6 +231,18 @@ impl<'de> Deserialize<'de> for Op {
}
None => Err(Error::missing_field("value")),
}?,
RawOpType::MarkBegin => {
let name = name.ok_or_else(|| Error::missing_field("name"))?;
let name = smol_str::SmolStr::new(name);
let expand = expand.unwrap_or(false);
let value = unwrap_value(value, datatype)?;
OpType::MarkBegin(MarkData {
name,
value,
expand,
})
}
RawOpType::MarkEnd => OpType::MarkEnd(expand.unwrap_or(false)),
};
Ok(Op {
action,
@ -244,6 +257,27 @@ impl<'de> Deserialize<'de> for Op {
}
}
fn unwrap_value<E: Error>(
value: Option<Option<ScalarValue>>,
datatype: Option<DataType>,
) -> Result<ScalarValue, E> {
if let Some(datatype) = datatype {
let raw_value = value
.ok_or_else(|| Error::missing_field("value"))?
.unwrap_or(ScalarValue::Null);
raw_value.as_datatype(datatype).map_err(|e| {
Error::invalid_value(
Unexpected::Other(e.unexpected.as_str()),
&e.expected.as_str(),
)
})
} else {
Ok(value
.ok_or_else(|| Error::missing_field("value"))?
.unwrap_or(ScalarValue::Null))
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;

View file

@ -1,7 +1,7 @@
use serde::{Serialize, Serializer};
use super::op::RawOpType;
use crate::{ObjType, OpType};
use crate::{legacy::OpType, ObjType};
impl Serialize for OpType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
@ -18,6 +18,8 @@ impl Serialize for OpType {
OpType::Delete => RawOpType::Del,
OpType::Increment(_) => RawOpType::Inc,
OpType::Put(_) => RawOpType::Set,
OpType::MarkBegin(_) => RawOpType::MarkBegin,
OpType::MarkEnd(_) => RawOpType::MarkEnd,
};
raw_type.serialize(serializer)
}

View file

@ -252,18 +252,17 @@ mod error;
mod exid;
mod indexed_cache;
mod keys;
mod keys_at;
mod legacy;
mod list_range;
mod list_range_at;
mod map_range;
mod map_range_at;
pub mod marks;
pub mod op_observer;
mod op_set;
mod op_tree;
mod parents;
mod query;
mod read;
mod sequence_tree;
mod storage;
pub mod sync;
pub mod transaction;
@ -282,15 +281,12 @@ pub use error::InvalidActorId;
pub use error::InvalidChangeHashSlice;
pub use exid::{ExId as ObjId, ObjIdFromBytesError};
pub use keys::Keys;
pub use keys_at::KeysAt;
pub use legacy::Change as ExpandedChange;
pub use list_range::ListRange;
pub use list_range_at::ListRangeAt;
pub use map_range::MapRange;
pub use map_range_at::MapRangeAt;
pub use op_observer::OpObserver;
pub use op_observer::Patch;
pub use op_observer::VecOpObserver;
pub use op_observer::{
OpObserver, Patch, PatchAction, ToggleObserver, VecOpObserver, VecOpObserver16,
};
pub use parents::{Parent, Parents};
pub use read::ReadDoc;
pub use types::{ActorId, ChangeHash, ObjType, OpType, ParseChangeHashError, Prop, TextEncoding};

View file

@ -8,13 +8,39 @@ use std::ops::RangeBounds;
/// This is returned by the [`crate::ReadDoc::list_range`] method
#[derive(Debug)]
pub struct ListRange<'a, R: RangeBounds<usize>> {
range: Option<query::ListRange<'a, R>>,
range: ListRangeQuery<'a, R>,
doc: &'a Automerge,
}
#[derive(Debug)]
enum ListRangeQuery<'a, R: RangeBounds<usize>> {
ListRange(query::ListRange<'a, R>),
ListRangeAt(query::ListRangeAt<'a, R>),
None,
}
impl<'a, R: RangeBounds<usize>> ListRange<'a, R> {
pub(crate) fn new(doc: &'a Automerge, range: Option<query::ListRange<'a, R>>) -> Self {
Self { range, doc }
pub(crate) fn new(doc: &'a Automerge) -> Self {
Self {
range: ListRangeQuery::None,
doc,
}
}
pub(crate) fn with_list_range(self, query: query::ListRange<'a, R>) -> Self {
let range = ListRangeQuery::ListRange(query);
Self {
range,
doc: self.doc,
}
}
pub(crate) fn with_list_range_at(self, query: query::ListRangeAt<'a, R>) -> Self {
let range = ListRangeQuery::ListRangeAt(query);
Self {
range,
doc: self.doc,
}
}
}
@ -22,9 +48,14 @@ impl<'a, R: RangeBounds<usize>> Iterator for ListRange<'a, R> {
type Item = (usize, Value<'a>, ExId);
fn next(&mut self) -> Option<Self::Item> {
self.range
.as_mut()?
.next()
.map(|(idx, value, id)| (idx, value, self.doc.id_to_exid(id)))
match &mut self.range {
ListRangeQuery::ListRange(query) => query
.next()
.map(|(idx, value, id)| (idx, value, self.doc.id_to_exid(id))),
ListRangeQuery::ListRangeAt(query) => query
.next()
.map(|(idx, value, id)| (idx, value, self.doc.id_to_exid(id))),
ListRangeQuery::None => None,
}
}
}

View file

@ -1,30 +0,0 @@
use crate::{exid::ExId, Value};
use std::ops::RangeBounds;
use crate::{query, Automerge};
/// An iterator over the elements of a list object at a particular set of heads
///
/// This is returned by the [`crate::ReadDoc::list_range_at`] method
#[derive(Debug)]
pub struct ListRangeAt<'a, R: RangeBounds<usize>> {
range: Option<query::ListRangeAt<'a, R>>,
doc: &'a Automerge,
}
impl<'a, R: RangeBounds<usize>> ListRangeAt<'a, R> {
pub(crate) fn new(doc: &'a Automerge, range: Option<query::ListRangeAt<'a, R>>) -> Self {
Self { range, doc }
}
}
impl<'a, R: RangeBounds<usize>> Iterator for ListRangeAt<'a, R> {
type Item = (usize, Value<'a>, ExId);
fn next(&mut self) -> Option<Self::Item> {
self.range
.as_mut()?
.next()
.map(|(key, value, id)| (key, value, self.doc.id_to_exid(id)))
}
}

View file

@ -8,13 +8,37 @@ use crate::{query, Automerge};
/// This is returned by the [`crate::ReadDoc::map_range`] method
#[derive(Debug)]
pub struct MapRange<'a, R: RangeBounds<String>> {
range: Option<query::MapRange<'a, R>>,
range: MapRangeQuery<'a, R>,
doc: &'a Automerge,
}
#[derive(Debug)]
enum MapRangeQuery<'a, R: RangeBounds<String>> {
MapRange(query::MapRange<'a, R>),
MapRangeAt(query::MapRangeAt<'a, R>),
None,
}
impl<'a, R: RangeBounds<String>> MapRange<'a, R> {
pub(crate) fn new(doc: &'a Automerge, range: Option<query::MapRange<'a, R>>) -> Self {
Self { range, doc }
pub(crate) fn new(doc: &'a Automerge) -> Self {
Self {
range: MapRangeQuery::None,
doc,
}
}
pub(crate) fn with_map_range(self, query: query::MapRange<'a, R>) -> Self {
let range = MapRangeQuery::MapRange(query);
Self {
range,
doc: self.doc,
}
}
pub(crate) fn with_map_range_at(self, query: query::MapRangeAt<'a, R>) -> Self {
let range = MapRangeQuery::MapRangeAt(query);
Self {
range,
doc: self.doc,
}
}
}
@ -22,18 +46,28 @@ impl<'a, R: RangeBounds<String>> Iterator for MapRange<'a, R> {
type Item = (&'a str, Value<'a>, ExId);
fn next(&mut self) -> Option<Self::Item> {
self.range
.as_mut()?
.next()
.map(|(key, value, id)| (key, value, self.doc.id_to_exid(id)))
match &mut self.range {
MapRangeQuery::MapRange(query) => query
.next()
.map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))),
MapRangeQuery::MapRangeAt(query) => query
.next()
.map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))),
MapRangeQuery::None => None,
}
}
}
impl<'a, R: RangeBounds<String>> DoubleEndedIterator for MapRange<'a, R> {
fn next_back(&mut self) -> Option<Self::Item> {
self.range
.as_mut()?
.next_back()
.map(|(key, value, id)| (key, value, self.doc.id_to_exid(id)))
match &mut self.range {
MapRangeQuery::MapRange(query) => query
.next_back()
.map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))),
MapRangeQuery::MapRangeAt(query) => query
.next_back()
.map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))),
MapRangeQuery::None => None,
}
}
}

View file

@ -1,39 +0,0 @@
use crate::{exid::ExId, Value};
use std::ops::RangeBounds;
use crate::{query, Automerge};
/// An iterator over the keys and values of a map object as at a particuar heads
///
/// This is returned by the [`crate::ReadDoc::map_range_at`] method
#[derive(Debug)]
pub struct MapRangeAt<'a, R: RangeBounds<String>> {
range: Option<query::MapRangeAt<'a, R>>,
doc: &'a Automerge,
}
impl<'a, R: RangeBounds<String>> MapRangeAt<'a, R> {
pub(crate) fn new(doc: &'a Automerge, range: Option<query::MapRangeAt<'a, R>>) -> Self {
Self { range, doc }
}
}
impl<'a, R: RangeBounds<String>> Iterator for MapRangeAt<'a, R> {
type Item = (&'a str, Value<'a>, ExId);
fn next(&mut self) -> Option<Self::Item> {
self.range
.as_mut()?
.next()
.map(|(key, value, id)| (key, value, self.doc.id_to_exid(id)))
}
}
impl<'a, R: RangeBounds<String>> DoubleEndedIterator for MapRangeAt<'a, R> {
fn next_back(&mut self) -> Option<Self::Item> {
self.range
.as_mut()?
.next_back()
.map(|(key, value, id)| (key, value, self.doc.id_to_exid(id)))
}
}

236
rust/automerge/src/marks.rs Normal file
View file

@ -0,0 +1,236 @@
use smol_str::SmolStr;
use std::fmt;
use std::fmt::Display;
use crate::types::{Op, OpId, OpType};
use crate::value::ScalarValue;
use crate::Automerge;
use std::borrow::Cow;
#[derive(Debug, Clone, PartialEq)]
pub struct Mark<'a> {
pub start: usize,
pub end: usize,
pub(crate) data: Cow<'a, MarkData>,
}
impl<'a> Mark<'a> {
pub fn new<V: Into<ScalarValue>>(
name: String,
value: V,
start: usize,
end: usize,
) -> Mark<'static> {
Mark {
data: Cow::Owned(MarkData {
name: name.into(),
value: value.into(),
}),
start,
end,
}
}
pub(crate) fn is_null(&self) -> bool {
self.data.value.is_null()
}
pub(crate) fn from_data(start: usize, end: usize, data: &MarkData) -> Mark<'_> {
Mark {
data: Cow::Borrowed(data),
start,
end,
}
}
pub fn into_owned(self) -> Mark<'static> {
Mark {
data: Cow::Owned(self.data.into_owned()),
start: self.start,
end: self.end,
}
}
pub fn name(&self) -> &str {
self.data.name.as_str()
}
pub fn value(&self) -> &ScalarValue {
&self.data.value
}
}
#[derive(Debug, Clone, PartialEq, Default)]
pub(crate) struct MarkStateMachine<'a> {
state: Vec<(OpId, Mark<'a>)>,
}
impl<'a> MarkStateMachine<'a> {
pub(crate) fn mark_begin(
&mut self,
id: OpId,
pos: usize,
data: &'a MarkData,
doc: &'a Automerge,
) -> Option<Mark<'a>> {
self.mark_or_unmark_begin(id, pos, data, doc).and_then(|m| {
if m.is_null() {
None
} else {
Some(m)
}
})
}
pub(crate) fn mark_or_unmark(
&mut self,
op: &'a Op,
pos: usize,
doc: &'a Automerge,
) -> Option<Mark<'a>> {
match &op.action {
OpType::MarkBegin(_, m) => self.mark_or_unmark_begin(op.id, pos, m, &doc),
OpType::MarkEnd(_) => self.mark_or_unmark_end(op.id, pos, &doc),
_ => None,
}
}
pub(crate) fn mark_or_unmark_begin(
&mut self,
id: OpId,
pos: usize,
data: &'a MarkData,
doc: &'a Automerge,
) -> Option<Mark<'a>> {
let mut result = None;
let index = self.find(id, doc).err()?;
let mut mark = Mark::from_data(pos, pos, data);
if let Some(above) = Self::mark_above(&self.state, index, &mark) {
if above.value() == mark.value() {
mark.start = above.start;
}
} else if let Some(below) = Self::mark_below(&mut self.state, index, &mark) {
if below.value() == mark.value() {
mark.start = below.start;
} else {
let mut m = below.clone();
m.end = pos;
result = Some(m);
}
}
self.state.insert(index, (id, mark));
result
}
pub(crate) fn mark_end(
&mut self,
id: OpId,
pos: usize,
doc: &'a Automerge,
) -> Option<Mark<'a>> {
self.mark_or_unmark_end(id, pos, doc)
.and_then(|m| if m.is_null() { None } else { Some(m) })
}
pub(crate) fn mark_or_unmark_end(
&mut self,
id: OpId,
pos: usize,
doc: &'a Automerge,
) -> Option<Mark<'a>> {
let mut result = None;
let index = self.find(id.prev(), doc).ok()?;
let mut mark = self.state.remove(index).1;
mark.end = pos;
if Self::mark_above(&self.state, index, &mark).is_none() {
match Self::mark_below(&mut self.state, index, &mark) {
Some(below) if below.value() == mark.value() => {}
Some(below) => {
below.start = pos;
result = Some(mark.clone());
}
None => {
result = Some(mark.clone());
}
}
}
result
}
fn find(&self, target: OpId, doc: &Automerge) -> Result<usize, usize> {
let metadata = &doc.ops().m;
self.state
.binary_search_by(|probe| metadata.lamport_cmp(probe.0, target))
}
fn mark_above<'b>(
state: &'b [(OpId, Mark<'a>)],
index: usize,
mark: &Mark<'a>,
) -> Option<&'b Mark<'a>> {
Some(
&state[index..]
.iter()
.find(|(_, m)| m.name() == mark.name())?
.1,
)
}
fn mark_below<'b>(
state: &'b mut [(OpId, Mark<'a>)],
index: usize,
mark: &Mark<'a>,
) -> Option<&'b mut Mark<'a>> {
Some(
&mut state[0..index]
.iter_mut()
.filter(|(_, m)| m.data.name == mark.data.name)
.last()?
.1,
)
}
}
#[derive(PartialEq, Debug, Clone)]
pub struct MarkData {
pub name: SmolStr,
pub value: ScalarValue,
}
impl Display for MarkData {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "name={} value={}", self.name, self.value)
}
}
#[derive(PartialEq, Debug, Clone, Copy)]
pub enum ExpandMark {
Left,
Right,
Both,
None,
}
impl ExpandMark {
pub fn from(left: bool, right: bool) -> Self {
match (left, right) {
(true, true) => Self::Both,
(false, true) => Self::Right,
(true, false) => Self::Left,
(false, false) => Self::None,
}
}
pub fn left(&self) -> bool {
matches!(self, Self::Left | Self::Both)
}
pub fn right(&self) -> bool {
matches!(self, Self::Right | Self::Both)
}
}

View file

@ -1,10 +1,17 @@
use crate::exid::ExId;
use crate::marks::Mark;
use crate::Prop;
use crate::ReadDoc;
use crate::Value;
mod compose;
mod patch;
mod toggle_observer;
mod vec_observer;
pub use compose::compose;
pub use patch::{Patch, PatchAction};
pub use toggle_observer::ToggleObserver;
pub use vec_observer::{HasPatches, TextRepresentation, VecOpObserver, VecOpObserver16};
/// An observer of operations applied to the document.
pub trait OpObserver {
@ -21,6 +28,7 @@ pub trait OpObserver {
objid: ExId,
index: usize,
tagged_value: (Value<'_>, ExId),
conflict: bool,
);
/// Some text has been spliced into a text object
@ -111,6 +119,15 @@ pub trait OpObserver {
/// - `num`: the number of sequential elements deleted
fn delete_seq<R: ReadDoc>(&mut self, doc: &R, objid: ExId, index: usize, num: usize);
fn mark<'a, R: ReadDoc, M: Iterator<Item = Mark<'a>>>(
&mut self,
doc: &'a R,
objid: ExId,
mark: M,
);
fn unmark<R: ReadDoc>(&mut self, doc: &R, objid: ExId, name: &str, start: usize, end: usize);
/// Whether to call sequence methods or `splice_text` when encountering changes in text
///
/// Returns `false` by default
@ -146,6 +163,7 @@ impl OpObserver for () {
_objid: ExId,
_index: usize,
_tagged_value: (Value<'_>, ExId),
_conflict: bool,
) {
}
@ -180,6 +198,24 @@ impl OpObserver for () {
) {
}
fn mark<'a, R: ReadDoc, M: Iterator<Item = Mark<'a>>>(
&mut self,
_doc: &'a R,
_objid: ExId,
_mark: M,
) {
}
fn unmark<R: ReadDoc>(
&mut self,
_doc: &R,
_objid: ExId,
_name: &str,
_start: usize,
_end: usize,
) {
}
fn delete_map<R: ReadDoc>(&mut self, _doc: &R, _objid: ExId, _key: &str) {}
fn delete_seq<R: ReadDoc>(&mut self, _doc: &R, _objid: ExId, _index: usize, _num: usize) {}
@ -189,204 +225,3 @@ impl BranchableObserver for () {
fn merge(&mut self, _other: &Self) {}
fn branch(&self) -> Self {}
}
/// Capture operations into a [`Vec`] and store them as patches.
#[derive(Default, Debug, Clone)]
pub struct VecOpObserver {
patches: Vec<Patch>,
}
impl VecOpObserver {
/// Take the current list of patches, leaving the internal list empty and ready for new
/// patches.
pub fn take_patches(&mut self) -> Vec<Patch> {
std::mem::take(&mut self.patches)
}
}
impl OpObserver for VecOpObserver {
fn insert<R: ReadDoc>(
&mut self,
doc: &R,
obj: ExId,
index: usize,
(value, id): (Value<'_>, ExId),
) {
if let Ok(p) = doc.parents(&obj) {
self.patches.push(Patch::Insert {
obj,
path: p.path(),
index,
value: (value.into_owned(), id),
});
}
}
fn splice_text<R: ReadDoc>(&mut self, doc: &R, obj: ExId, index: usize, value: &str) {
if let Ok(p) = doc.parents(&obj) {
self.patches.push(Patch::Splice {
obj,
path: p.path(),
index,
value: value.to_string(),
})
}
}
fn put<R: ReadDoc>(
&mut self,
doc: &R,
obj: ExId,
prop: Prop,
(value, id): (Value<'_>, ExId),
conflict: bool,
) {
if let Ok(p) = doc.parents(&obj) {
self.patches.push(Patch::Put {
obj,
path: p.path(),
prop,
value: (value.into_owned(), id),
conflict,
});
}
}
fn expose<R: ReadDoc>(
&mut self,
doc: &R,
obj: ExId,
prop: Prop,
(value, id): (Value<'_>, ExId),
conflict: bool,
) {
if let Ok(p) = doc.parents(&obj) {
self.patches.push(Patch::Expose {
obj,
path: p.path(),
prop,
value: (value.into_owned(), id),
conflict,
});
}
}
fn increment<R: ReadDoc>(&mut self, doc: &R, obj: ExId, prop: Prop, tagged_value: (i64, ExId)) {
if let Ok(p) = doc.parents(&obj) {
self.patches.push(Patch::Increment {
obj,
path: p.path(),
prop,
value: tagged_value,
});
}
}
fn delete_map<R: ReadDoc>(&mut self, doc: &R, obj: ExId, key: &str) {
if let Ok(p) = doc.parents(&obj) {
self.patches.push(Patch::Delete {
obj,
path: p.path(),
prop: Prop::Map(key.to_owned()),
num: 1,
})
}
}
fn delete_seq<R: ReadDoc>(&mut self, doc: &R, obj: ExId, index: usize, num: usize) {
if let Ok(p) = doc.parents(&obj) {
self.patches.push(Patch::Delete {
obj,
path: p.path(),
prop: Prop::Seq(index),
num,
})
}
}
}
impl BranchableObserver for VecOpObserver {
fn merge(&mut self, other: &Self) {
self.patches.extend_from_slice(other.patches.as_slice())
}
fn branch(&self) -> Self {
Self::default()
}
}
/// A notification to the application that something has changed in a document.
#[derive(Debug, Clone, PartialEq)]
pub enum Patch {
/// Associating a new value with a prop in a map, or an existing list element
Put {
/// path to the object
path: Vec<(ExId, Prop)>,
/// The object that was put into.
obj: ExId,
/// The prop that the new value was put at.
prop: Prop,
/// The value that was put, and the id of the operation that put it there.
value: (Value<'static>, ExId),
/// Whether this put conflicts with another.
conflict: bool,
},
/// Exposing (via delete) an old but conflicted value with a prop in a map, or a list element
Expose {
/// path to the object
path: Vec<(ExId, Prop)>,
/// The object that was put into.
obj: ExId,
/// The prop that the new value was put at.
prop: Prop,
/// The value that was put, and the id of the operation that put it there.
value: (Value<'static>, ExId),
/// Whether this put conflicts with another.
conflict: bool,
},
/// Inserting a new element into a list
Insert {
/// path to the object
path: Vec<(ExId, Prop)>,
/// The object that was inserted into.
obj: ExId,
/// The index that the new value was inserted at.
index: usize,
/// The value that was inserted, and the id of the operation that inserted it there.
value: (Value<'static>, ExId),
},
/// Splicing a text object
Splice {
/// path to the object
path: Vec<(ExId, Prop)>,
/// The object that was inserted into.
obj: ExId,
/// The index that the new value was inserted at.
index: usize,
/// The value that was spliced
value: String,
},
/// Incrementing a counter.
Increment {
/// path to the object
path: Vec<(ExId, Prop)>,
/// The object that was incremented in.
obj: ExId,
/// The prop that was incremented.
prop: Prop,
/// The amount that the counter was incremented by, and the id of the operation that
/// did the increment.
value: (i64, ExId),
},
/// Deleting an element from a list/text
Delete {
/// path to the object
path: Vec<(ExId, Prop)>,
/// The object that was deleted from.
obj: ExId,
/// The prop that was deleted.
prop: Prop,
/// number of items deleted (for seq)
num: usize,
},
}

View file

@ -19,10 +19,11 @@ impl<'a, O1: OpObserver, O2: OpObserver> OpObserver for ComposeObservers<'a, O1,
objid: crate::ObjId,
index: usize,
tagged_value: (crate::Value<'_>, crate::ObjId),
conflict: bool,
) {
self.obs1
.insert(doc, objid.clone(), index, tagged_value.clone());
self.obs2.insert(doc, objid, index, tagged_value);
.insert(doc, objid.clone(), index, tagged_value.clone(), conflict);
self.obs2.insert(doc, objid, index, tagged_value, conflict);
}
fn splice_text<R: crate::ReadDoc>(
@ -84,6 +85,30 @@ impl<'a, O1: OpObserver, O2: OpObserver> OpObserver for ComposeObservers<'a, O1,
self.obs2.increment(doc, objid, prop, tagged_value);
}
fn mark<'b, R: crate::ReadDoc, M: Iterator<Item = crate::marks::Mark<'b>>>(
&mut self,
doc: &'b R,
objid: crate::ObjId,
mark: M,
) {
let marks: Vec<_> = mark.collect();
self.obs1
.mark(doc, objid.clone(), marks.clone().into_iter());
self.obs2.mark(doc, objid, marks.into_iter());
}
fn unmark<R: crate::ReadDoc>(
&mut self,
doc: &R,
objid: crate::ObjId,
name: &str,
start: usize,
end: usize,
) {
self.obs1.unmark(doc, objid.clone(), name, start, end);
self.obs2.unmark(doc, objid, name, start, end);
}
fn delete_map<R: crate::ReadDoc>(&mut self, doc: &R, objid: crate::ObjId, key: &str) {
self.obs1.delete_map(doc, objid.clone(), key);
self.obs2.delete_map(doc, objid, key);

View file

@ -0,0 +1,57 @@
#![allow(dead_code)]
use crate::{marks::Mark, ObjId, Prop, Value};
use core::fmt::Debug;
use crate::sequence_tree::SequenceTree;
#[derive(Debug, Clone, PartialEq)]
pub struct Patch<T: PartialEq + Clone + Debug> {
pub obj: ObjId,
pub path: Vec<(ObjId, Prop)>,
pub action: PatchAction<T>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum PatchAction<T: PartialEq + Clone + Debug> {
PutMap {
key: String,
value: (Value<'static>, ObjId),
expose: bool,
conflict: bool,
},
PutSeq {
index: usize,
value: (Value<'static>, ObjId),
expose: bool,
conflict: bool,
},
Insert {
index: usize,
values: SequenceTree<(Value<'static>, ObjId)>,
conflict: bool,
},
SpliceText {
index: usize,
value: SequenceTree<T>,
},
Increment {
prop: Prop,
value: i64,
},
DeleteMap {
key: String,
},
DeleteSeq {
index: usize,
length: usize,
},
Mark {
marks: Vec<Mark<'static>>,
},
Unmark {
name: String,
start: usize,
end: usize,
},
}

View file

@ -0,0 +1,178 @@
#![allow(dead_code)]
use crate::ChangeHash;
use core::fmt::Debug;
use crate::{marks::Mark, ObjId, OpObserver, Prop, ReadDoc, Value};
use crate::op_observer::BranchableObserver;
use crate::op_observer::{HasPatches, TextRepresentation};
#[derive(Debug, Clone)]
pub struct ToggleObserver<T> {
enabled: bool,
last_heads: Option<Vec<ChangeHash>>,
observer: T,
}
impl<T: Default> Default for ToggleObserver<T> {
fn default() -> Self {
Self {
enabled: false,
last_heads: None,
observer: T::default(),
}
}
}
impl<T: HasPatches> ToggleObserver<T> {
pub fn new(observer: T) -> Self {
ToggleObserver {
enabled: false,
last_heads: None,
observer,
}
}
pub fn take_patches(&mut self, heads: Vec<ChangeHash>) -> (T::Patches, Vec<ChangeHash>) {
let old_heads = self.last_heads.replace(heads).unwrap_or_default();
let patches = self.observer.take_patches();
(patches, old_heads)
}
pub fn with_text_rep(mut self, text_rep: TextRepresentation) -> Self {
self.observer = self.observer.with_text_rep(text_rep);
self
}
pub fn set_text_rep(&mut self, text_rep: TextRepresentation) {
self.observer.set_text_rep(text_rep)
}
pub fn enable(&mut self, enable: bool, heads: Vec<ChangeHash>) -> bool {
if self.enabled && !enable {
self.observer.take_patches();
self.last_heads = Some(heads);
}
let old_enabled = self.enabled;
self.enabled = enable;
old_enabled
}
fn get_path<R: ReadDoc>(&mut self, doc: &R, obj: &ObjId) -> Option<Vec<(ObjId, Prop)>> {
match doc.parents(obj) {
Ok(parents) => parents.visible_path(),
Err(e) => {
log!("error generating patch : {:?}", e);
None
}
}
}
}
impl<T: OpObserver + HasPatches> OpObserver for ToggleObserver<T> {
fn insert<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
index: usize,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
if self.enabled {
self.observer
.insert(doc, obj, index, tagged_value, conflict)
}
}
fn splice_text<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, value: &str) {
if self.enabled {
self.observer.splice_text(doc, obj, index, value)
}
}
fn delete_seq<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, length: usize) {
if self.enabled {
self.observer.delete_seq(doc, obj, index, length)
}
}
fn delete_map<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, key: &str) {
if self.enabled {
self.observer.delete_map(doc, obj, key)
}
}
fn put<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
if self.enabled {
self.observer.put(doc, obj, prop, tagged_value, conflict)
}
}
fn expose<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
if self.enabled {
self.observer.expose(doc, obj, prop, tagged_value, conflict)
}
}
fn increment<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (i64, ObjId),
) {
if self.enabled {
self.observer.increment(doc, obj, prop, tagged_value)
}
}
fn mark<'a, R: ReadDoc, M: Iterator<Item = Mark<'a>>>(
&mut self,
doc: &'a R,
obj: ObjId,
mark: M,
) {
if self.enabled {
self.observer.mark(doc, obj, mark)
}
}
fn unmark<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, name: &str, start: usize, end: usize) {
if self.enabled {
self.observer.unmark(doc, obj, name, start, end)
}
}
fn text_as_seq(&self) -> bool {
self.observer.get_text_rep() == TextRepresentation::Array
}
}
impl<T: BranchableObserver> BranchableObserver for ToggleObserver<T> {
fn merge(&mut self, other: &Self) {
self.observer.merge(&other.observer)
}
fn branch(&self) -> Self {
ToggleObserver {
observer: self.observer.branch(),
last_heads: None,
enabled: self.enabled,
}
}
}

View file

@ -0,0 +1,561 @@
#![allow(dead_code)]
use core::fmt::Debug;
use crate::{marks::Mark, ObjId, OpObserver, Prop, ReadDoc, ScalarValue, Value};
use crate::sequence_tree::SequenceTree;
use crate::op_observer::BranchableObserver;
use crate::op_observer::{Patch, PatchAction};
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum TextRepresentation {
Array,
String,
}
impl TextRepresentation {
pub fn is_array(&self) -> bool {
matches!(self, TextRepresentation::Array)
}
pub fn is_string(&self) -> bool {
matches!(self, TextRepresentation::String)
}
}
impl std::default::Default for TextRepresentation {
fn default() -> Self {
TextRepresentation::Array
}
}
pub(crate) trait TextIndex {
type Item: Debug + PartialEq + Clone;
type Iter<'a>: Iterator<Item = Self::Item>;
fn chars(text: &str) -> Self::Iter<'_>;
}
#[derive(Debug, Clone, Default)]
struct VecOpObserverInner<T: TextIndex> {
pub(crate) patches: Vec<Patch<T::Item>>,
pub(crate) text_rep: TextRepresentation,
}
#[derive(Debug, Clone, Default)]
pub struct VecOpObserver(VecOpObserverInner<Utf8TextIndex>);
#[derive(Debug, Clone, Default)]
pub struct VecOpObserver16(VecOpObserverInner<Utf16TextIndex>);
#[derive(Debug, Clone, Default)]
pub(crate) struct Utf16TextIndex;
#[derive(Debug, Clone, Default)]
pub(crate) struct Utf8TextIndex;
impl TextIndex for Utf8TextIndex {
type Item = char;
type Iter<'a> = std::str::Chars<'a>;
fn chars(text: &str) -> Self::Iter<'_> {
text.chars()
}
}
impl TextIndex for Utf16TextIndex {
type Item = u16;
type Iter<'a> = std::str::EncodeUtf16<'a>;
fn chars(text: &str) -> Self::Iter<'_> {
text.encode_utf16()
}
}
pub trait HasPatches {
type Patches;
fn take_patches(&mut self) -> Self::Patches;
fn with_text_rep(self, text_rep: TextRepresentation) -> Self;
fn set_text_rep(&mut self, text_rep: TextRepresentation);
fn get_text_rep(&self) -> TextRepresentation;
}
impl HasPatches for VecOpObserver {
type Patches = Vec<Patch<char>>;
fn take_patches(&mut self) -> Self::Patches {
std::mem::take(&mut self.0.patches)
}
fn with_text_rep(mut self, text_rep: TextRepresentation) -> Self {
self.0.text_rep = text_rep;
self
}
fn set_text_rep(&mut self, text_rep: TextRepresentation) {
self.0.text_rep = text_rep;
}
fn get_text_rep(&self) -> TextRepresentation {
self.0.text_rep
}
}
impl HasPatches for VecOpObserver16 {
type Patches = Vec<Patch<u16>>;
fn take_patches(&mut self) -> Self::Patches {
std::mem::take(&mut self.0.patches)
}
fn with_text_rep(mut self, text_rep: TextRepresentation) -> Self {
self.0.text_rep = text_rep;
self
}
fn set_text_rep(&mut self, text_rep: TextRepresentation) {
self.0.text_rep = text_rep;
}
fn get_text_rep(&self) -> TextRepresentation {
self.0.text_rep
}
}
impl<T: TextIndex> VecOpObserverInner<T> {
fn get_path<R: ReadDoc>(&mut self, doc: &R, obj: &ObjId) -> Option<Vec<(ObjId, Prop)>> {
match doc.parents(obj) {
Ok(parents) => parents.visible_path(),
Err(e) => {
log!("error generating patch : {:?}", e);
None
}
}
}
fn maybe_append(&mut self, obj: &ObjId) -> Option<&mut PatchAction<T::Item>> {
match self.patches.last_mut() {
Some(Patch {
obj: tail_obj,
action,
..
}) if obj == tail_obj => Some(action),
_ => None,
}
}
}
impl<T: TextIndex> OpObserver for VecOpObserverInner<T> {
fn insert<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
index: usize,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
let value = (tagged_value.0.to_owned(), tagged_value.1);
if let Some(PatchAction::Insert {
index: tail_index,
values,
..
}) = self.maybe_append(&obj)
{
let range = *tail_index..=*tail_index + values.len();
if range.contains(&index) {
values.insert(index - *tail_index, value);
return;
}
}
if let Some(path) = self.get_path(doc, &obj) {
let mut values = SequenceTree::new();
values.push(value);
let action = PatchAction::Insert {
index,
values,
conflict,
};
self.patches.push(Patch { obj, path, action });
}
}
fn splice_text<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, value: &str) {
if self.text_rep == TextRepresentation::Array {
for (offset, c) in value.chars().map(ScalarValue::from).enumerate() {
let value = (c.into(), ObjId::Root);
self.insert(doc, obj.clone(), index + offset, value, false);
}
return;
}
if let Some(PatchAction::SpliceText {
index: tail_index,
value: prev_value,
..
}) = self.maybe_append(&obj)
{
let range = *tail_index..=*tail_index + prev_value.len();
if range.contains(&index) {
let i = index - *tail_index;
for (n, ch) in T::chars(value).enumerate() {
prev_value.insert(i + n, ch)
}
return;
}
}
if let Some(path) = self.get_path(doc, &obj) {
let mut v = SequenceTree::new();
for ch in T::chars(value) {
v.push(ch)
}
let action = PatchAction::SpliceText { index, value: v };
self.patches.push(Patch { obj, path, action });
}
}
fn delete_seq<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, length: usize) {
match self.maybe_append(&obj) {
Some(PatchAction::SpliceText {
index: tail_index,
value,
..
}) => {
let range = *tail_index..*tail_index + value.len();
if range.contains(&index) && range.contains(&(index + length - 1)) {
for _ in 0..length {
value.remove(index - *tail_index);
}
return;
}
}
Some(PatchAction::Insert {
index: tail_index,
values,
..
}) => {
let range = *tail_index..*tail_index + values.len();
if range.contains(&index) && range.contains(&(index + length - 1)) {
for _ in 0..length {
values.remove(index - *tail_index);
}
return;
}
}
Some(PatchAction::DeleteSeq {
index: tail_index,
length: tail_length,
..
}) => {
if index == *tail_index {
*tail_length += length;
return;
}
}
_ => {}
}
if let Some(path) = self.get_path(doc, &obj) {
let action = PatchAction::DeleteSeq { index, length };
self.patches.push(Patch { obj, path, action })
}
}
fn delete_map<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, key: &str) {
if let Some(path) = self.get_path(doc, &obj) {
let action = PatchAction::DeleteMap {
key: key.to_owned(),
};
self.patches.push(Patch { obj, path, action })
}
}
fn put<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
let expose = false;
if let Some(path) = self.get_path(doc, &obj) {
let value = (tagged_value.0.to_owned(), tagged_value.1);
let action = match prop {
Prop::Map(key) => PatchAction::PutMap {
key,
value,
expose,
conflict,
},
Prop::Seq(index) => PatchAction::PutSeq {
index,
value,
expose,
conflict,
},
};
self.patches.push(Patch { obj, path, action })
}
}
fn expose<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
let expose = true;
if let Some(path) = self.get_path(doc, &obj) {
let value = (tagged_value.0.to_owned(), tagged_value.1);
let action = match prop {
Prop::Map(key) => PatchAction::PutMap {
key,
value,
expose,
conflict,
},
Prop::Seq(index) => PatchAction::PutSeq {
index,
value,
expose,
conflict,
},
};
self.patches.push(Patch { obj, path, action })
}
}
fn increment<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (i64, ObjId),
) {
if let Some(path) = self.get_path(doc, &obj) {
let value = tagged_value.0;
let action = PatchAction::Increment { prop, value };
self.patches.push(Patch { obj, path, action })
}
}
fn mark<'a, R: ReadDoc, M: Iterator<Item = Mark<'a>>>(
&mut self,
doc: &'a R,
obj: ObjId,
mark: M,
) {
if let Some(PatchAction::Mark { marks, .. }) = self.maybe_append(&obj) {
for m in mark {
marks.push(m.into_owned())
}
return;
}
if let Some(path) = self.get_path(doc, &obj) {
let marks: Vec<_> = mark.map(|m| m.into_owned()).collect();
if !marks.is_empty() {
let action = PatchAction::Mark { marks };
self.patches.push(Patch { obj, path, action });
}
}
}
fn unmark<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, name: &str, start: usize, end: usize) {
if let Some(path) = self.get_path(doc, &obj) {
let action = PatchAction::Unmark {
name: name.to_string(),
start,
end,
};
self.patches.push(Patch { obj, path, action });
}
}
fn text_as_seq(&self) -> bool {
self.text_rep == TextRepresentation::Array
}
}
impl<T: TextIndex> BranchableObserver for VecOpObserverInner<T> {
fn merge(&mut self, other: &Self) {
self.patches.extend_from_slice(other.patches.as_slice())
}
fn branch(&self) -> Self {
VecOpObserverInner {
patches: vec![],
text_rep: self.text_rep,
}
}
}
impl OpObserver for VecOpObserver {
fn insert<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
index: usize,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
self.0.insert(doc, obj, index, tagged_value, conflict)
}
fn splice_text<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, value: &str) {
self.0.splice_text(doc, obj, index, value)
}
fn delete_seq<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, length: usize) {
self.0.delete_seq(doc, obj, index, length)
}
fn delete_map<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, key: &str) {
self.0.delete_map(doc, obj, key)
}
fn put<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
self.0.put(doc, obj, prop, tagged_value, conflict)
}
fn expose<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
self.0.expose(doc, obj, prop, tagged_value, conflict)
}
fn increment<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (i64, ObjId),
) {
self.0.increment(doc, obj, prop, tagged_value)
}
fn mark<'a, R: ReadDoc, M: Iterator<Item = Mark<'a>>>(
&mut self,
doc: &'a R,
obj: ObjId,
mark: M,
) {
self.0.mark(doc, obj, mark)
}
fn unmark<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, name: &str, start: usize, end: usize) {
self.0.unmark(doc, obj, name, start, end)
}
fn text_as_seq(&self) -> bool {
self.0.text_as_seq()
}
}
impl OpObserver for VecOpObserver16 {
fn insert<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
index: usize,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
self.0.insert(doc, obj, index, tagged_value, conflict)
}
fn splice_text<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, value: &str) {
self.0.splice_text(doc, obj, index, value)
}
fn delete_seq<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, index: usize, length: usize) {
self.0.delete_seq(doc, obj, index, length)
}
fn delete_map<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, key: &str) {
self.0.delete_map(doc, obj, key)
}
fn put<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
self.0.put(doc, obj, prop, tagged_value, conflict)
}
fn expose<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (Value<'_>, ObjId),
conflict: bool,
) {
self.0.expose(doc, obj, prop, tagged_value, conflict)
}
fn increment<R: ReadDoc>(
&mut self,
doc: &R,
obj: ObjId,
prop: Prop,
tagged_value: (i64, ObjId),
) {
self.0.increment(doc, obj, prop, tagged_value)
}
fn mark<'a, R: ReadDoc, M: Iterator<Item = Mark<'a>>>(
&mut self,
doc: &'a R,
obj: ObjId,
mark: M,
) {
self.0.mark(doc, obj, mark)
}
fn unmark<R: ReadDoc>(&mut self, doc: &R, obj: ObjId, name: &str, start: usize, end: usize) {
self.0.unmark(doc, obj, name, start, end)
}
fn text_as_seq(&self) -> bool {
self.0.text_as_seq()
}
}
impl BranchableObserver for VecOpObserver {
fn merge(&mut self, other: &Self) {
self.0.merge(&other.0)
}
fn branch(&self) -> Self {
VecOpObserver(self.0.branch())
}
}
impl BranchableObserver for VecOpObserver16 {
fn merge(&mut self, other: &Self) {
self.0.merge(&other.0)
}
fn branch(&self) -> Self {
VecOpObserver16(self.0.branch())
}
}

View file

@ -78,6 +78,10 @@ impl OpSetInternal {
}
}
pub(crate) fn iter_ops(&self, obj: &ObjId) -> impl Iterator<Item = &Op> {
self.trees.get(obj).map(|o| o.iter()).into_iter().flatten()
}
pub(crate) fn parents(&self, obj: ObjId) -> Parents<'_> {
Parents { obj, ops: self }
}

View file

@ -319,8 +319,7 @@ struct CounterData {
#[cfg(test)]
mod tests {
use crate::legacy as amp;
use crate::types::{Op, OpId};
use crate::types::{Op, OpId, OpType};
use super::*;
@ -328,7 +327,7 @@ mod tests {
let zero = OpId::new(0, 0);
Op {
id: zero,
action: amp::OpType::Put(0.into()),
action: OpType::Put(0.into()),
key: zero.into(),
succ: Default::default(),
pred: Default::default(),

View file

@ -25,6 +25,7 @@ mod opid;
mod opid_vis;
mod prop;
mod prop_at;
mod seek_mark;
mod seek_op;
mod seek_op_with_patch;
@ -46,6 +47,7 @@ pub(crate) use opid::OpIdSearch;
pub(crate) use opid_vis::OpIdVisSearch;
pub(crate) use prop::Prop;
pub(crate) use prop_at::PropAt;
pub(crate) use seek_mark::SeekMark;
pub(crate) use seek_op::SeekOp;
pub(crate) use seek_op_with_patch::SeekOpWithPatch;
@ -287,7 +289,7 @@ pub(crate) struct VisWindow {
}
impl VisWindow {
fn visible_at(&mut self, op: &Op, pos: usize, clock: &Clock) -> bool {
pub(crate) fn visible_at(&mut self, op: &Op, pos: usize, clock: &Clock) -> bool {
if !clock.covers(&op.id) {
return false;
}

View file

@ -110,6 +110,12 @@ impl<'a> TreeQuery<'a> for InsertNth {
self.last_seen = None;
self.last_insert = element.elemid();
}
/*-------------------*/
if self.valid.is_some() && element.valid_mark_anchor() {
self.last_valid_insert = Some(element.elemid_or_key());
self.valid = None;
}
/*-------------------*/
if self.last_seen.is_none() && element.visible() {
if self.seen >= self.target {
return QueryResult::Finish;

View file

@ -0,0 +1,124 @@
use crate::marks::Mark;
use crate::op_tree::OpSetMetadata;
use crate::query::{QueryResult, TreeQuery};
use crate::types::{Key, ListEncoding, Op, OpId, OpType};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct SeekMark<'a> {
/// the mark we are looking for
id: OpId,
end: usize,
encoding: ListEncoding,
found: bool,
mark_name: smol_str::SmolStr,
next_mark: Option<Mark<'a>>,
pos: usize,
seen: usize,
last_seen: Option<Key>,
super_marks: HashMap<OpId, smol_str::SmolStr>,
pub(crate) marks: Vec<Mark<'a>>,
}
impl<'a> SeekMark<'a> {
pub(crate) fn new(id: OpId, end: usize, encoding: ListEncoding) -> Self {
SeekMark {
id,
encoding,
end,
found: false,
next_mark: None,
mark_name: "".into(),
pos: 0,
seen: 0,
last_seen: None,
super_marks: Default::default(),
marks: Default::default(),
}
}
fn count_visible(&mut self, e: &Op) {
if e.insert {
self.last_seen = None
}
if e.visible() && self.last_seen.is_none() {
self.seen += e.width(self.encoding);
self.last_seen = Some(e.elemid_or_key())
}
}
}
impl<'a> TreeQuery<'a> for SeekMark<'a> {
fn query_element_with_metadata(&mut self, op: &'a Op, m: &OpSetMetadata) -> QueryResult {
match &op.action {
OpType::MarkBegin(_, data) if op.id == self.id => {
if !op.succ.is_empty() {
return QueryResult::Finish;
}
self.found = true;
self.mark_name = data.name.clone();
// retain the name and the value
self.next_mark = Some(Mark::from_data(self.seen, self.seen, data));
// change id to the end id
self.id = self.id.next();
// remove all marks that dont match
self.super_marks.retain(|_, v| v == &data.name);
}
OpType::MarkBegin(_, mark) => {
if m.lamport_cmp(op.id, self.id) == Ordering::Greater {
if let Some(next_mark) = &mut self.next_mark {
// gather marks of the same type that supersede us
if mark.name == self.mark_name {
self.super_marks.insert(op.id.next(), mark.name.clone());
if self.super_marks.len() == 1 {
// complete a mark
next_mark.end = self.seen;
self.marks.push(next_mark.clone());
}
}
} else {
// gather all marks until we know what our mark's name is
self.super_marks.insert(op.id.next(), mark.name.clone());
}
}
}
OpType::MarkEnd(_) if self.end == self.pos => {
if self.super_marks.is_empty() {
// complete a mark
if let Some(next_mark) = &mut self.next_mark {
next_mark.end = self.seen;
self.marks.push(next_mark.clone());
}
}
return QueryResult::Finish;
}
OpType::MarkEnd(_) if self.super_marks.contains_key(&op.id) => {
self.super_marks.remove(&op.id);
if let Some(next_mark) = &mut self.next_mark {
if self.super_marks.is_empty() {
// begin a new mark
next_mark.start = self.seen;
}
}
}
_ => {}
}
// the end op hasn't been inserted yet so we need to work off the position
if self.end == self.pos {
if self.super_marks.is_empty() {
// complete a mark
if let Some(next_mark) = &mut self.next_mark {
next_mark.end = self.seen;
self.marks.push(next_mark.clone());
}
}
return QueryResult::Finish;
}
self.pos += 1;
self.count_visible(op);
QueryResult::Next
}
}

View file

@ -1,7 +1,6 @@
use crate::{
error::AutomergeError, exid::ExId, keys::Keys, keys_at::KeysAt, list_range::ListRange,
list_range_at::ListRangeAt, map_range::MapRange, map_range_at::MapRangeAt, parents::Parents,
values::Values, Change, ChangeHash, ObjType, Prop, Value,
error::AutomergeError, exid::ExId, keys::Keys, list_range::ListRange, map_range::MapRange,
marks::Mark, parents::Parents, values::Values, Change, ChangeHash, ObjType, Prop, Value,
};
use std::ops::RangeBounds;
@ -42,12 +41,7 @@ pub trait ReadDoc {
///
/// For a map this returns the keys of the map.
/// For a list this returns the element ids (opids) encoded as strings.
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_, '_>;
/// Get the keys of the object `obj` as at `heads`
///
/// See [`Self::keys`]
fn keys_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_>;
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_>;
/// Iterate over the keys and values of the map `obj` in the given range.
///
@ -61,22 +55,6 @@ pub trait ReadDoc {
range: R,
) -> MapRange<'_, R>;
/// Iterate over the keys and values of the map `obj` in the given range as
/// at `heads`
///
/// If the object correspoding to `obj` is a list then this will return an empty iterator
///
/// The returned iterator yields `(key, value, exid)` tuples, where the
/// third element is the ID of the operation which created the value.
///
/// See [`Self::map_range`]
fn map_range_at<O: AsRef<ExId>, R: RangeBounds<String>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> MapRangeAt<'_, R>;
/// Iterate over the indexes and values of the list or text `obj` in the given range.
///
/// The reuturned iterator yields `(index, value, exid)` tuples, where the third
@ -87,59 +65,28 @@ pub trait ReadDoc {
range: R,
) -> ListRange<'_, R>;
/// Iterate over the indexes and values of the list or text `obj` in the given range as at `heads`
///
/// The returned iterator yields `(index, value, exid)` tuples, where the third
/// element is the ID of the operation which created the value.
///
/// See [`Self::list_range`]
fn list_range_at<O: AsRef<ExId>, R: RangeBounds<usize>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> ListRangeAt<'_, R>;
/// Iterate over the values in a map, list, or text object
///
/// The returned iterator yields `(value, exid)` tuples, where the second element
/// is the ID of the operation which created the value.
fn values<O: AsRef<ExId>>(&self, obj: O) -> Values<'_>;
/// Iterate over the values in a map, list, or text object as at `heads`
///
/// The returned iterator yields `(value, exid)` tuples, where the second element
/// is the ID of the operation which created the value.
///
/// See [`Self::values`]
fn values_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_>;
fn values<O: AsRef<ExId>>(&self, obj: O) -> Values<'_> {
todo!()
}
/// Get the length of the given object.
///
/// If the given object is not in this document this method will return `0`
fn length<O: AsRef<ExId>>(&self, obj: O) -> usize;
/// Get the length of the given object as at `heads`
///
/// If the given object is not in this document this method will return `0`
///
/// See [`Self::length`]
fn length_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> usize;
/// Get the type of this object, if it is an object.
fn object_type<O: AsRef<ExId>>(&self, obj: O) -> Result<ObjType, AutomergeError>;
/// Get all marks on a current sequence
fn marks<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<Mark<'_>>, AutomergeError>;
/// Get the string represented by the given text object.
fn text<O: AsRef<ExId>>(&self, obj: O) -> Result<String, AutomergeError>;
/// Get the string represented by the given text object as at `heads`, see
/// [`Self::text`]
fn text_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<String, AutomergeError>;
/// Get a value out of the document.
///
/// This returns a tuple of `(value, object ID)`. This is for two reasons:
@ -161,14 +108,6 @@ pub trait ReadDoc {
prop: P,
) -> Result<Option<(Value<'_>, ExId)>, AutomergeError>;
/// Get the value of the given key as at `heads`, see `[Self::get]`
fn get_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Option<(Value<'_>, ExId)>, AutomergeError>;
/// Get all conflicting values out of the document at this prop that conflict.
///
/// If there are multiple conflicting values for a given key this method
@ -180,16 +119,6 @@ pub trait ReadDoc {
prop: P,
) -> Result<Vec<(Value<'_>, ExId)>, AutomergeError>;
/// Get all possibly conflicting values for a key as at `heads`
///
/// See `[Self::get_all]`
fn get_all_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Vec<(Value<'_>, ExId)>, AutomergeError>;
/// Get the hashes of the changes in this document that aren't transitive dependencies of the
/// given `heads`.
fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec<ChangeHash>;

View file

@ -0,0 +1,617 @@
use std::{
cmp::{min, Ordering},
fmt::Debug,
mem,
};
pub(crate) const B: usize = 16;
pub(crate) type SequenceTree<T> = SequenceTreeInternal<T>;
#[derive(Clone, Debug)]
pub struct SequenceTreeInternal<T> {
root_node: Option<SequenceTreeNode<T>>,
}
#[derive(Clone, Debug, PartialEq)]
struct SequenceTreeNode<T> {
elements: Vec<T>,
children: Vec<SequenceTreeNode<T>>,
length: usize,
}
impl<T> SequenceTreeInternal<T>
where
T: Clone + Debug,
{
/// Construct a new, empty, sequence.
pub fn new() -> Self {
Self { root_node: None }
}
/// Get the length of the sequence.
pub fn len(&self) -> usize {
self.root_node.as_ref().map_or(0, |n| n.len())
}
/// Create an iterator through the sequence.
pub fn iter(&self) -> Iter<'_, T> {
Iter {
inner: self,
index: 0,
}
}
/// Insert the `element` into the sequence at `index`.
///
/// # Panics
///
/// Panics if `index > len`.
pub fn insert(&mut self, index: usize, element: T) {
let old_len = self.len();
if let Some(root) = self.root_node.as_mut() {
#[cfg(debug_assertions)]
root.check();
if root.is_full() {
let original_len = root.len();
let new_root = SequenceTreeNode::new();
// move new_root to root position
let old_root = mem::replace(root, new_root);
root.length += old_root.len();
root.children.push(old_root);
root.split_child(0);
assert_eq!(original_len, root.len());
// after splitting the root has one element and two children, find which child the
// index is in
let first_child_len = root.children[0].len();
let (child, insertion_index) = if first_child_len < index {
(&mut root.children[1], index - (first_child_len + 1))
} else {
(&mut root.children[0], index)
};
root.length += 1;
child.insert_into_non_full_node(insertion_index, element)
} else {
root.insert_into_non_full_node(index, element)
}
} else {
self.root_node = Some(SequenceTreeNode {
elements: vec![element],
children: Vec::new(),
length: 1,
})
}
assert_eq!(self.len(), old_len + 1, "{:#?}", self);
}
/// Push the `element` onto the back of the sequence.
pub fn push(&mut self, element: T) {
let l = self.len();
self.insert(l, element)
}
/// Get the `element` at `index` in the sequence.
pub fn get(&self, index: usize) -> Option<&T> {
self.root_node.as_ref().and_then(|n| n.get(index))
}
/// Removes the element at `index` from the sequence.
///
/// # Panics
///
/// Panics if `index` is out of bounds.
pub fn remove(&mut self, index: usize) -> T {
if let Some(root) = self.root_node.as_mut() {
#[cfg(debug_assertions)]
let len = root.check();
let old = root.remove(index);
if root.elements.is_empty() {
if root.is_leaf() {
self.root_node = None;
} else {
self.root_node = Some(root.children.remove(0));
}
}
#[cfg(debug_assertions)]
debug_assert_eq!(len, self.root_node.as_ref().map_or(0, |r| r.check()) + 1);
old
} else {
panic!("remove from empty tree")
}
}
}
impl<T> SequenceTreeNode<T>
where
T: Clone + Debug,
{
fn new() -> Self {
Self {
elements: Vec::new(),
children: Vec::new(),
length: 0,
}
}
fn len(&self) -> usize {
self.length
}
fn is_leaf(&self) -> bool {
self.children.is_empty()
}
fn is_full(&self) -> bool {
self.elements.len() >= 2 * B - 1
}
/// Returns the child index and the given index adjusted for the cumulative index before that
/// child.
fn find_child_index(&self, index: usize) -> (usize, usize) {
let mut cumulative_len = 0;
for (child_index, child) in self.children.iter().enumerate() {
if cumulative_len + child.len() >= index {
return (child_index, index - cumulative_len);
} else {
cumulative_len += child.len() + 1;
}
}
panic!("index not found in node")
}
fn insert_into_non_full_node(&mut self, index: usize, element: T) {
assert!(!self.is_full());
if self.is_leaf() {
self.length += 1;
self.elements.insert(index, element);
} else {
let (child_index, sub_index) = self.find_child_index(index);
let child = &mut self.children[child_index];
if child.is_full() {
self.split_child(child_index);
// child structure has changed so we need to find the index again
let (child_index, sub_index) = self.find_child_index(index);
let child = &mut self.children[child_index];
child.insert_into_non_full_node(sub_index, element);
} else {
child.insert_into_non_full_node(sub_index, element);
}
self.length += 1;
}
}
// A utility function to split the child `full_child_index` of this node
// Note that `full_child_index` must be full when this function is called.
fn split_child(&mut self, full_child_index: usize) {
let original_len_self = self.len();
// Create a new node which is going to store (B-1) keys
// of the full child.
let mut successor_sibling = SequenceTreeNode::new();
let full_child = &mut self.children[full_child_index];
let original_len = full_child.len();
assert!(full_child.is_full());
successor_sibling.elements = full_child.elements.split_off(B);
if !full_child.is_leaf() {
successor_sibling.children = full_child.children.split_off(B);
}
let middle = full_child.elements.pop().unwrap();
full_child.length =
full_child.elements.len() + full_child.children.iter().map(|c| c.len()).sum::<usize>();
successor_sibling.length = successor_sibling.elements.len()
+ successor_sibling
.children
.iter()
.map(|c| c.len())
.sum::<usize>();
let z_len = successor_sibling.len();
let full_child_len = full_child.len();
self.children
.insert(full_child_index + 1, successor_sibling);
self.elements.insert(full_child_index, middle);
assert_eq!(full_child_len + z_len + 1, original_len, "{:#?}", self);
assert_eq!(original_len_self, self.len());
}
fn remove_from_leaf(&mut self, index: usize) -> T {
self.length -= 1;
self.elements.remove(index)
}
fn remove_element_from_non_leaf(&mut self, index: usize, element_index: usize) -> T {
self.length -= 1;
if self.children[element_index].elements.len() >= B {
let total_index = self.cumulative_index(element_index);
// recursively delete index - 1 in predecessor_node
let predecessor = self.children[element_index].remove(index - 1 - total_index);
// replace element with that one
mem::replace(&mut self.elements[element_index], predecessor)
} else if self.children[element_index + 1].elements.len() >= B {
// recursively delete index + 1 in successor_node
let total_index = self.cumulative_index(element_index + 1);
let successor = self.children[element_index + 1].remove(index + 1 - total_index);
// replace element with that one
mem::replace(&mut self.elements[element_index], successor)
} else {
let middle_element = self.elements.remove(element_index);
let successor_child = self.children.remove(element_index + 1);
self.children[element_index].merge(middle_element, successor_child);
let total_index = self.cumulative_index(element_index);
self.children[element_index].remove(index - total_index)
}
}
fn cumulative_index(&self, child_index: usize) -> usize {
self.children[0..child_index]
.iter()
.map(|c| c.len() + 1)
.sum()
}
fn remove_from_internal_child(&mut self, index: usize, mut child_index: usize) -> T {
if self.children[child_index].elements.len() < B
&& if child_index > 0 {
self.children[child_index - 1].elements.len() < B
} else {
true
}
&& if child_index + 1 < self.children.len() {
self.children[child_index + 1].elements.len() < B
} else {
true
}
{
// if the child and its immediate siblings have B-1 elements merge the child
// with one sibling, moving an element from this node into the new merged node
// to be the median
if child_index > 0 {
let middle = self.elements.remove(child_index - 1);
// use the predessor sibling
let successor = self.children.remove(child_index);
child_index -= 1;
self.children[child_index].merge(middle, successor);
} else {
let middle = self.elements.remove(child_index);
// use the sucessor sibling
let successor = self.children.remove(child_index + 1);
self.children[child_index].merge(middle, successor);
}
} else if self.children[child_index].elements.len() < B {
if child_index > 0
&& self
.children
.get(child_index - 1)
.map_or(false, |c| c.elements.len() >= B)
{
let last_element = self.children[child_index - 1].elements.pop().unwrap();
assert!(!self.children[child_index - 1].elements.is_empty());
self.children[child_index - 1].length -= 1;
let parent_element =
mem::replace(&mut self.elements[child_index - 1], last_element);
self.children[child_index]
.elements
.insert(0, parent_element);
self.children[child_index].length += 1;
if let Some(last_child) = self.children[child_index - 1].children.pop() {
self.children[child_index - 1].length -= last_child.len();
self.children[child_index].length += last_child.len();
self.children[child_index].children.insert(0, last_child);
}
} else if self
.children
.get(child_index + 1)
.map_or(false, |c| c.elements.len() >= B)
{
let first_element = self.children[child_index + 1].elements.remove(0);
self.children[child_index + 1].length -= 1;
assert!(!self.children[child_index + 1].elements.is_empty());
let parent_element = mem::replace(&mut self.elements[child_index], first_element);
self.children[child_index].length += 1;
self.children[child_index].elements.push(parent_element);
if !self.children[child_index + 1].is_leaf() {
let first_child = self.children[child_index + 1].children.remove(0);
self.children[child_index + 1].length -= first_child.len();
self.children[child_index].length += first_child.len();
self.children[child_index].children.push(first_child);
}
}
}
self.length -= 1;
let total_index = self.cumulative_index(child_index);
self.children[child_index].remove(index - total_index)
}
fn check(&self) -> usize {
let l = self.elements.len() + self.children.iter().map(|c| c.check()).sum::<usize>();
assert_eq!(self.len(), l, "{:#?}", self);
l
}
fn remove(&mut self, index: usize) -> T {
let original_len = self.len();
if self.is_leaf() {
let v = self.remove_from_leaf(index);
assert_eq!(original_len, self.len() + 1);
debug_assert_eq!(self.check(), self.len());
v
} else {
let mut total_index = 0;
for (child_index, child) in self.children.iter().enumerate() {
match (total_index + child.len()).cmp(&index) {
Ordering::Less => {
// should be later on in the loop
total_index += child.len() + 1;
continue;
}
Ordering::Equal => {
let v = self.remove_element_from_non_leaf(
index,
min(child_index, self.elements.len() - 1),
);
assert_eq!(original_len, self.len() + 1);
debug_assert_eq!(self.check(), self.len());
return v;
}
Ordering::Greater => {
let v = self.remove_from_internal_child(index, child_index);
assert_eq!(original_len, self.len() + 1);
debug_assert_eq!(self.check(), self.len());
return v;
}
}
}
panic!(
"index not found to remove {} {} {} {}",
index,
total_index,
self.len(),
self.check()
);
}
}
fn merge(&mut self, middle: T, successor_sibling: SequenceTreeNode<T>) {
self.elements.push(middle);
self.elements.extend(successor_sibling.elements);
self.children.extend(successor_sibling.children);
self.length += successor_sibling.length + 1;
assert!(self.is_full());
}
fn get(&self, index: usize) -> Option<&T> {
if self.is_leaf() {
return self.elements.get(index);
} else {
let mut cumulative_len = 0;
for (child_index, child) in self.children.iter().enumerate() {
match (cumulative_len + child.len()).cmp(&index) {
Ordering::Less => {
cumulative_len += child.len() + 1;
}
Ordering::Equal => return self.elements.get(child_index),
Ordering::Greater => {
return child.get(index - cumulative_len);
}
}
}
}
None
}
}
impl<T> Default for SequenceTreeInternal<T>
where
T: Clone + Debug,
{
fn default() -> Self {
Self::new()
}
}
impl<T> PartialEq for SequenceTreeInternal<T>
where
T: Clone + Debug + PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b)
}
}
impl<'a, T> IntoIterator for &'a SequenceTreeInternal<T>
where
T: Clone + Debug,
{
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
Iter {
inner: self,
index: 0,
}
}
}
#[derive(Debug)]
pub struct Iter<'a, T> {
inner: &'a SequenceTreeInternal<T>,
index: usize,
}
impl<'a, T> Iterator for Iter<'a, T>
where
T: Clone + Debug,
{
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
self.index += 1;
self.inner.get(self.index - 1)
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.index += n + 1;
self.inner.get(self.index - 1)
}
}
#[cfg(test)]
mod tests {
use proptest::prelude::*;
use super::*;
#[test]
fn push_back() {
let mut t = SequenceTree::new();
t.push(1);
t.push(2);
t.push(3);
t.push(4);
t.push(5);
t.push(6);
t.push(8);
t.push(100);
}
#[test]
fn insert() {
let mut t = SequenceTree::new();
t.insert(0, 1);
t.insert(1, 1);
t.insert(0, 1);
t.insert(0, 1);
t.insert(0, 1);
t.insert(3, 1);
t.insert(4, 1);
}
#[test]
fn insert_book() {
let mut t = SequenceTree::new();
for i in 0..100 {
t.insert(i % 2, ());
}
}
#[test]
fn insert_book_vec() {
let mut t = SequenceTree::new();
let mut v = Vec::new();
for i in 0..100 {
t.insert(i % 3, ());
v.insert(i % 3, ());
assert_eq!(v, t.iter().copied().collect::<Vec<_>>())
}
}
fn arb_indices() -> impl Strategy<Value = Vec<usize>> {
proptest::collection::vec(any::<usize>(), 0..1000).prop_map(|v| {
let mut len = 0;
v.into_iter()
.map(|i| {
len += 1;
i % len
})
.collect::<Vec<_>>()
})
}
proptest! {
#[test]
fn proptest_insert(indices in arb_indices()) {
let mut t = SequenceTreeInternal::<usize>::new();
let mut v = Vec::new();
for i in indices{
if i <= v.len() {
t.insert(i % 3, i);
v.insert(i % 3, i);
} else {
return Err(proptest::test_runner::TestCaseError::reject("index out of bounds"))
}
assert_eq!(v, t.iter().copied().collect::<Vec<_>>())
}
}
}
proptest! {
// This is a really slow test due to all the copying of the Vecs (i.e. not due to the
// sequencetree) so we only do a few runs
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn proptest_remove(inserts in arb_indices(), removes in arb_indices()) {
let mut t = SequenceTreeInternal::<usize>::new();
let mut v = Vec::new();
for i in inserts {
if i <= v.len() {
t.insert(i , i);
v.insert(i , i);
} else {
return Err(proptest::test_runner::TestCaseError::reject("index out of bounds"))
}
assert_eq!(v, t.iter().copied().collect::<Vec<_>>())
}
for i in removes {
if i < v.len() {
let tr = t.remove(i);
let vr = v.remove(i);
assert_eq!(tr, vr);
} else {
return Err(proptest::test_runner::TestCaseError::reject("index out of bounds"))
}
assert_eq!(v, t.iter().copied().collect::<Vec<_>>())
}
}
}
}

View file

@ -437,6 +437,8 @@ pub(crate) trait AsChangeOp<'a> {
fn action(&self) -> u64;
fn val(&self) -> Cow<'a, ScalarValue>;
fn pred(&self) -> Self::PredIter;
fn expand(&self) -> bool;
fn mark_name(&self) -> Option<Cow<'a, smol_str::SmolStr>>;
}
impl ChangeBuilder<Set<NonZeroU64>, Set<ActorId>, Set<u64>, Set<i64>> {

View file

@ -1,4 +1,7 @@
use std::collections::{BTreeMap, BTreeSet};
use std::{
borrow::Cow,
collections::{BTreeMap, BTreeSet},
};
use crate::convert;
@ -244,6 +247,14 @@ where
fn val(&self) -> std::borrow::Cow<'aschangeop, crate::ScalarValue> {
self.op.val()
}
fn expand(&self) -> bool {
self.op.expand()
}
fn mark_name(&self) -> Option<Cow<'aschangeop, smol_str::SmolStr>> {
self.op.mark_name()
}
}
pub(crate) struct WithChangeActorsPredIter<'actors, 'aschangeop, A, I, O, C, P> {

View file

@ -1,16 +1,16 @@
use std::{convert::TryFrom, ops::Range};
use std::{borrow::Cow, convert::TryFrom, ops::Range};
use crate::{
columnar::{
column_range::{
generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange},
BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, ObjIdEncoder, ObjIdIter,
ObjIdRange, OpIdListEncoder, OpIdListIter, OpIdListRange, RleRange, ValueEncoder,
ValueIter, ValueRange,
BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, MaybeBooleanRange,
ObjIdEncoder, ObjIdIter, ObjIdRange, OpIdListEncoder, OpIdListIter, OpIdListRange,
RleRange, ValueEncoder, ValueIter, ValueRange,
},
encoding::{
BooleanDecoder, BooleanEncoder, ColumnDecoder, DecodeColumnError, RleDecoder,
RleEncoder,
BooleanDecoder, BooleanEncoder, ColumnDecoder, DecodeColumnError, MaybeBooleanDecoder,
MaybeBooleanEncoder, RleDecoder, RleEncoder,
},
},
convert,
@ -32,6 +32,8 @@ const INSERT_COL_ID: ColumnId = ColumnId::new(3);
const ACTION_COL_ID: ColumnId = ColumnId::new(4);
const VAL_COL_ID: ColumnId = ColumnId::new(5);
const PRED_COL_ID: ColumnId = ColumnId::new(7);
const EXPAND_COL_ID: ColumnId = ColumnId::new(8);
const MARK_NAME_COL_ID: ColumnId = ColumnId::new(9);
#[derive(Clone, Debug, PartialEq)]
pub(crate) struct ChangeOp {
@ -41,6 +43,8 @@ pub(crate) struct ChangeOp {
pub(crate) pred: Vec<OpId>,
pub(crate) action: u64,
pub(crate) obj: ObjId,
pub(crate) expand: bool,
pub(crate) mark_name: Option<smol_str::SmolStr>,
}
impl<'a, A: AsChangeOp<'a, ActorId = usize, OpId = OpId>> From<A> for ChangeOp {
@ -59,6 +63,8 @@ impl<'a, A: AsChangeOp<'a, ActorId = usize, OpId = OpId>> From<A> for ChangeOp {
pred: a.pred().collect(),
insert: a.insert(),
action: a.action(),
expand: a.expand(),
mark_name: a.mark_name().map(|n| n.into_owned()),
}
}
}
@ -99,6 +105,14 @@ impl<'a> AsChangeOp<'a> for &'a ChangeOp {
fn action(&self) -> u64 {
self.action
}
fn expand(&self) -> bool {
self.expand
}
fn mark_name(&self) -> Option<Cow<'a, smol_str::SmolStr>> {
self.mark_name.as_ref().map(Cow::Borrowed)
}
}
#[derive(Clone, Debug, PartialEq)]
@ -109,6 +123,8 @@ pub(crate) struct ChangeOpsColumns {
action: RleRange<u64>,
val: ValueRange,
pred: OpIdListRange,
expand: MaybeBooleanRange,
mark_name: RleRange<smol_str::SmolStr>,
}
impl ChangeOpsColumns {
@ -121,6 +137,8 @@ impl ChangeOpsColumns {
action: self.action.decoder(data),
val: self.val.iter(data),
pred: self.pred.iter(data),
expand: self.expand.decoder(data),
mark_name: self.mark_name.decoder(data),
}
}
@ -147,12 +165,16 @@ impl ChangeOpsColumns {
Op: convert::OpId<usize> + 'a,
C: AsChangeOp<'c, OpId = Op> + 'a,
{
tracing::trace!(expands = ?ops.clone().map(|op| op.expand()).collect::<Vec<_>>(), "encoding change ops");
let obj = ObjIdRange::encode(ops.clone().map(|o| o.obj()), out);
let key = KeyRange::encode(ops.clone().map(|o| o.key()), out);
let insert = BooleanRange::encode(ops.clone().map(|o| o.insert()), out);
let action = RleRange::encode(ops.clone().map(|o| Some(o.action())), out);
let val = ValueRange::encode(ops.clone().map(|o| o.val()), out);
let pred = OpIdListRange::encode(ops.map(|o| o.pred()), out);
let pred = OpIdListRange::encode(ops.clone().map(|o| o.pred()), out);
let expand = MaybeBooleanRange::encode(ops.clone().map(|o| o.expand()), out);
let mark_name =
RleRange::encode::<Cow<'_, smol_str::SmolStr>, _>(ops.map(|o| o.mark_name()), out);
Self {
obj,
key,
@ -160,6 +182,8 @@ impl ChangeOpsColumns {
action,
val,
pred,
expand,
mark_name,
}
}
@ -175,13 +199,18 @@ impl ChangeOpsColumns {
let mut action = RleEncoder::<_, u64>::from(Vec::new());
let mut val = ValueEncoder::new();
let mut pred = OpIdListEncoder::new();
let mut expand = MaybeBooleanEncoder::new();
let mut mark_name = RleEncoder::<_, smol_str::SmolStr>::new(Vec::new());
for op in ops {
tracing::trace!(expand=?op.expand(), "expand");
obj.append(op.obj());
key.append(op.key());
insert.append(op.insert());
action.append_value(op.action());
val.append(&op.val());
pred.append(op.pred());
expand.append(op.expand());
mark_name.append(op.mark_name());
}
let obj = obj.finish(out);
let key = key.finish(out);
@ -199,6 +228,16 @@ impl ChangeOpsColumns {
let val = val.finish(out);
let pred = pred.finish(out);
let expand_start = out.len();
let (expand, _) = expand.finish();
out.extend(expand);
let expand = MaybeBooleanRange::from(expand_start..out.len());
let mark_name_start = out.len();
let (mark_name, _) = mark_name.finish();
out.extend(mark_name);
let mark_name = RleRange::from(mark_name_start..out.len());
Self {
obj,
key,
@ -206,6 +245,8 @@ impl ChangeOpsColumns {
action,
val,
pred,
expand,
mark_name,
}
}
@ -272,6 +313,18 @@ impl ChangeOpsColumns {
),
]);
}
if !self.expand.is_empty() {
cols.push(RawColumn::new(
ColumnSpec::new(EXPAND_COL_ID, ColumnType::Boolean, false),
self.expand.clone().into(),
));
}
if !self.mark_name.is_empty() {
cols.push(RawColumn::new(
ColumnSpec::new(MARK_NAME_COL_ID, ColumnType::String, false),
self.mark_name.clone().into(),
));
}
cols.into_iter().collect()
}
}
@ -296,6 +349,8 @@ pub(crate) struct ChangeOpsIter<'a> {
action: RleDecoder<'a, u64>,
val: ValueIter<'a>,
pred: OpIdListIter<'a>,
expand: MaybeBooleanDecoder<'a>,
mark_name: RleDecoder<'a, smol_str::SmolStr>,
}
impl<'a> ChangeOpsIter<'a> {
@ -317,6 +372,8 @@ impl<'a> ChangeOpsIter<'a> {
let action = self.action.next_in_col("action")?;
let val = self.val.next_in_col("value")?;
let pred = self.pred.next_in_col("pred")?;
let expand = self.expand.maybe_next_in_col("expand")?.unwrap_or(false);
let mark_name = self.mark_name.maybe_next_in_col("mark_name")?;
// This check is necessary to ensure that OpType::from_action_and_value
// cannot panic later in the process.
@ -329,6 +386,8 @@ impl<'a> ChangeOpsIter<'a> {
action,
val,
pred,
expand,
mark_name,
}))
}
}
@ -375,6 +434,8 @@ impl TryFrom<Columns> for ChangeOpsColumns {
let mut pred_group: Option<RleRange<u64>> = None;
let mut pred_actor: Option<RleRange<u64>> = None;
let mut pred_ctr: Option<DeltaRange> = None;
let mut expand: Option<MaybeBooleanRange> = None;
let mut mark_name: Option<RleRange<smol_str::SmolStr>> = None;
let mut other = Columns::empty();
for (index, col) in columns.into_iter().enumerate() {
@ -429,6 +490,8 @@ impl TryFrom<Columns> for ChangeOpsColumns {
}
_ => return Err(ParseChangeColumnsError::MismatchingColumn { index }),
},
(EXPAND_COL_ID, ColumnType::Boolean) => expand = Some(col.range().into()),
(MARK_NAME_COL_ID, ColumnType::String) => mark_name = Some(col.range().into()),
(other_type, other_col) => {
tracing::warn!(typ=?other_type, id=?other_col, "unknown column");
other.append(col);
@ -454,6 +517,8 @@ impl TryFrom<Columns> for ChangeOpsColumns {
action: action.unwrap_or(0..0).into(),
val: val.unwrap_or_else(|| ValueRange::new((0..0).into(), (0..0).into())),
pred,
expand: expand.unwrap_or_else(|| (0..0).into()),
mark_name: mark_name.unwrap_or_else(|| (0..0).into()),
})
}
}
@ -471,6 +536,8 @@ mod tests {
pred in proptest::collection::vec(opid(), 0..20),
action in 0_u64..6,
obj in opid(),
mark_name in proptest::option::of(any::<String>().prop_map(|s| s.into())),
expand in any::<bool>(),
insert in any::<bool>()) -> ChangeOp {
let val = if action == 5 && !(value.is_int() || value.is_uint()) {
@ -483,6 +550,8 @@ mod tests {
pred,
action,
insert,
expand,
mark_name,
}
}
}

View file

@ -5,7 +5,7 @@ use crate::{
convert,
op_set::OpSetMetadata,
storage::AsChangeOp,
types::{ActorId, Key, ObjId, Op, OpId, OpType, ScalarValue},
types::{ActorId, Key, MarkData, ObjId, Op, OpId, OpType, ScalarValue},
};
/// Wrap an op in an implementation of `AsChangeOp` which represents actor IDs using a reference to
@ -93,9 +93,12 @@ impl<'a> AsChangeOp<'a> for OpWithMetadata<'a> {
fn val(&self) -> Cow<'a, ScalarValue> {
match &self.op.action {
OpType::Make(..) | OpType::Delete => Cow::Owned(ScalarValue::Null),
OpType::Make(..) | OpType::Delete | OpType::MarkEnd(..) => {
Cow::Owned(ScalarValue::Null)
}
OpType::Increment(i) => Cow::Owned(ScalarValue::Int(*i)),
OpType::Put(s) => Cow::Borrowed(s),
OpType::MarkBegin(_, MarkData { value, .. }) => Cow::Borrowed(value),
}
}
@ -125,4 +128,19 @@ impl<'a> AsChangeOp<'a> for OpWithMetadata<'a> {
Key::Seq(e) => convert::Key::Elem(convert::ElemId::Op(self.wrap(&e.0))),
}
}
fn expand(&self) -> bool {
matches!(
self.op.action,
OpType::MarkBegin(true, _) | OpType::MarkEnd(true)
)
}
fn mark_name(&self) -> Option<Cow<'a, smol_str::SmolStr>> {
if let OpType::MarkBegin(_, MarkData { name, .. }) = &self.op.action {
Some(Cow::Owned(name.clone()))
} else {
None
}
}
}

View file

@ -4,7 +4,7 @@ use crate::{
convert,
indexed_cache::IndexedCache,
storage::AsDocOp,
types::{ElemId, Key, ObjId, Op, OpId, OpType, ScalarValue},
types::{ElemId, Key, MarkData, ObjId, Op, OpId, OpType, ScalarValue},
};
/// Create an [`AsDocOp`] implementation for a [`crate::types::Op`]
@ -90,6 +90,7 @@ impl<'a> AsDocOp<'a> for OpAsDocOp<'a> {
match &self.op.action {
OpType::Put(v) => Cow::Borrowed(v),
OpType::Increment(i) => Cow::Owned(ScalarValue::Int(*i)),
OpType::MarkBegin(_, MarkData { value, .. }) => Cow::Borrowed(value),
_ => Cow::Owned(ScalarValue::Null),
}
}
@ -109,6 +110,22 @@ impl<'a> AsDocOp<'a> for OpAsDocOp<'a> {
fn action(&self) -> u64 {
self.op.action.action_index()
}
fn expand(&self) -> bool {
if let OpType::MarkBegin(expand, _) | OpType::MarkEnd(expand) = &self.op.action {
*expand
} else {
false
}
}
fn mark_name(&self) -> Option<Cow<'a, smol_str::SmolStr>> {
if let OpType::MarkBegin(_, MarkData { name, .. }) = &self.op.action {
Some(Cow::Owned(name.clone()))
} else {
None
}
}
}
pub(crate) struct OpAsDocOpSuccIter<'a> {

View file

@ -4,13 +4,13 @@ use crate::{
columnar::{
column_range::{
generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange},
BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, ObjIdEncoder, ObjIdIter,
ObjIdRange, OpIdEncoder, OpIdIter, OpIdListEncoder, OpIdListIter, OpIdListRange,
OpIdRange, RleRange, ValueEncoder, ValueIter, ValueRange,
BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, MaybeBooleanRange,
ObjIdEncoder, ObjIdIter, ObjIdRange, OpIdEncoder, OpIdIter, OpIdListEncoder,
OpIdListIter, OpIdListRange, OpIdRange, RleRange, ValueEncoder, ValueIter, ValueRange,
},
encoding::{
BooleanDecoder, BooleanEncoder, ColumnDecoder, DecodeColumnError, RleDecoder,
RleEncoder,
BooleanDecoder, BooleanEncoder, ColumnDecoder, DecodeColumnError, MaybeBooleanDecoder,
MaybeBooleanEncoder, RleDecoder, RleEncoder,
},
},
convert,
@ -28,6 +28,8 @@ const INSERT_COL_ID: ColumnId = ColumnId::new(3);
const ACTION_COL_ID: ColumnId = ColumnId::new(4);
const VAL_COL_ID: ColumnId = ColumnId::new(5);
const SUCC_COL_ID: ColumnId = ColumnId::new(8);
const EXPAND_COL_ID: ColumnId = ColumnId::new(9);
const MARK_NAME_COL_ID: ColumnId = ColumnId::new(10);
/// The form operations take in the compressed document format.
#[derive(Debug)]
@ -36,9 +38,11 @@ pub(crate) struct DocOp {
pub(crate) object: ObjId,
pub(crate) key: Key,
pub(crate) insert: bool,
pub(crate) action: usize,
pub(crate) action: u64,
pub(crate) value: ScalarValue,
pub(crate) succ: Vec<OpId>,
pub(crate) expand: bool,
pub(crate) mark_name: Option<smol_str::SmolStr>,
}
#[derive(Debug, Clone)]
@ -52,6 +56,8 @@ pub(crate) struct DocOpColumns {
succ: OpIdListRange,
#[allow(dead_code)]
other: Columns,
expand: MaybeBooleanRange,
mark_name: RleRange<smol_str::SmolStr>,
}
struct DocId {
@ -90,6 +96,8 @@ pub(crate) trait AsDocOp<'a> {
fn action(&self) -> u64;
fn val(&self) -> Cow<'a, ScalarValue>;
fn succ(&self) -> Self::SuccIter;
fn expand(&self) -> bool;
fn mark_name(&self) -> Option<Cow<'a, smol_str::SmolStr>>;
}
impl DocOpColumns {
@ -118,7 +126,9 @@ impl DocOpColumns {
let insert = BooleanRange::encode(ops.clone().map(|o| o.insert()), out);
let action = RleRange::encode(ops.clone().map(|o| Some(o.action())), out);
let val = ValueRange::encode(ops.clone().map(|o| o.val()), out);
let succ = OpIdListRange::encode(ops.map(|o| o.succ()), out);
let succ = OpIdListRange::encode(ops.clone().map(|o| o.succ()), out);
let expand = MaybeBooleanRange::encode(ops.clone().map(|o| o.expand()), out);
let mark_name = RleRange::encode(ops.map(|o| o.mark_name()), out);
Self {
obj,
key,
@ -127,6 +137,8 @@ impl DocOpColumns {
action,
val,
succ,
expand,
mark_name,
other: Columns::empty(),
}
}
@ -144,6 +156,8 @@ impl DocOpColumns {
let mut action = RleEncoder::<_, u64>::from(Vec::new());
let mut val = ValueEncoder::new();
let mut succ = OpIdListEncoder::new();
let mut expand = MaybeBooleanEncoder::new();
let mut mark_name = RleEncoder::<_, smol_str::SmolStr>::new(Vec::new());
for op in ops {
obj.append(op.obj());
key.append(op.key());
@ -152,6 +166,8 @@ impl DocOpColumns {
action.append(Some(op.action()));
val.append(&op.val());
succ.append(op.succ());
expand.append(op.expand());
mark_name.append(op.mark_name());
}
let obj = obj.finish(out);
let key = key.finish(out);
@ -169,6 +185,17 @@ impl DocOpColumns {
let val = val.finish(out);
let succ = succ.finish(out);
let expand_start = out.len();
let (expand_out, _) = expand.finish();
out.extend(expand_out);
let expand = MaybeBooleanRange::from(expand_start..out.len());
let mark_name_start = out.len();
let (mark_name_out, _) = mark_name.finish();
out.extend(mark_name_out);
let mark_name = RleRange::from(mark_name_start..out.len());
DocOpColumns {
obj,
key,
@ -177,6 +204,8 @@ impl DocOpColumns {
action,
val,
succ,
expand,
mark_name,
other: Columns::empty(),
}
}
@ -190,6 +219,8 @@ impl DocOpColumns {
insert: self.insert.decoder(data),
value: self.val.iter(data),
succ: self.succ.iter(data),
expand: self.expand.decoder(data),
mark_name: self.mark_name.decoder(data),
}
}
@ -264,6 +295,18 @@ impl DocOpColumns {
),
]);
}
if !self.expand.is_empty() {
cols.push(RawColumn::new(
ColumnSpec::new(EXPAND_COL_ID, ColumnType::Boolean, false),
self.expand.clone().into(),
));
}
if !self.mark_name.is_empty() {
cols.push(RawColumn::new(
ColumnSpec::new(MARK_NAME_COL_ID, ColumnType::String, false),
self.mark_name.clone().into(),
));
}
cols.into_iter().collect()
}
}
@ -277,6 +320,8 @@ pub(crate) struct DocOpColumnIter<'a> {
insert: BooleanDecoder<'a>,
value: ValueIter<'a>,
succ: OpIdListIter<'a>,
expand: MaybeBooleanDecoder<'a>,
mark_name: RleDecoder<'a, smol_str::SmolStr>,
}
impl<'a> DocOpColumnIter<'a> {
@ -321,14 +366,18 @@ impl<'a> DocOpColumnIter<'a> {
let value = self.value.next_in_col("value")?;
let succ = self.succ.next_in_col("succ")?;
let insert = self.insert.next_in_col("insert")?;
let expand = self.expand.maybe_next_in_col("expand")?.unwrap_or(false);
let mark_name = self.mark_name.maybe_next_in_col("mark_name")?;
Ok(Some(DocOp {
id,
value,
action: action as usize,
action,
object: obj,
key,
succ,
insert,
expand,
mark_name,
}))
}
}
@ -363,6 +412,8 @@ impl TryFrom<Columns> for DocOpColumns {
let mut succ_group: Option<RleRange<u64>> = None;
let mut succ_actor: Option<RleRange<u64>> = None;
let mut succ_ctr: Option<DeltaRange> = None;
let mut expand: Option<MaybeBooleanRange> = None;
let mut mark_name: Option<RleRange<smol_str::SmolStr>> = None;
let mut other = Columns::empty();
for (index, col) in columns.into_iter().enumerate() {
@ -416,6 +467,8 @@ impl TryFrom<Columns> for DocOpColumns {
}
_ => return Err(Error::MismatchingColumn { index }),
},
(EXPAND_COL_ID, ColumnType::Boolean) => expand = Some(col.range().into()),
(MARK_NAME_COL_ID, ColumnType::String) => mark_name = Some(col.range().into()),
(other_col, other_type) => {
tracing::warn!(id=?other_col, typ=?other_type, "unknown column type");
other.append(col)
@ -444,6 +497,8 @@ impl TryFrom<Columns> for DocOpColumns {
succ_actor.unwrap_or_else(|| (0..0).into()),
succ_ctr.unwrap_or_else(|| (0..0).into()),
),
expand: expand.unwrap_or_else(|| (0..0).into()),
mark_name: mark_name.unwrap_or_else(|| (0..0).into()),
other,
})
}

View file

@ -17,8 +17,6 @@ pub(crate) enum Error {
OpsOutOfOrder,
#[error("error reading operation: {0:?}")]
ReadOp(Box<dyn std::error::Error + Send + Sync + 'static>),
#[error("an operation contained an invalid action")]
InvalidAction,
#[error("an operation referenced a missing actor id")]
MissingActor,
#[error("invalid changes: {0}")]
@ -29,6 +27,8 @@ pub(crate) enum Error {
MissingOps,
#[error("succ out of order")]
SuccOutOfOrder,
#[error(transparent)]
InvalidOp(#[from] crate::error::InvalidOpType),
}
pub(crate) struct MismatchedHeads {
@ -300,8 +300,9 @@ impl LoadingObject {
op.pred = meta.sorted_opids(preds.into_iter());
}
if let OpType::Put(ScalarValue::Counter(c)) = &mut op.action {
let inc_ops = op.succ.iter().filter_map(|s| self.inc_ops.get(s).copied());
c.increment(inc_ops);
for inc in op.succ.iter().filter_map(|s| self.inc_ops.get(s)) {
c.increment(*inc);
}
}
collector.collect(self.id, op.clone())?;
ops.push(op)
@ -345,9 +346,10 @@ fn import_op(m: &mut OpSetMetadata, op: DocOp) -> Result<Op, Error> {
return Err(Error::MissingActor);
}
}
let action = OpType::from_action_and_value(op.action, op.value, op.mark_name, op.expand);
Ok(Op {
id: check_opid(m, op.id)?,
action: parse_optype(op.action, op.value)?,
action,
key,
succ: m.try_sorted_opids(op.succ).ok_or(Error::SuccOutOfOrder)?,
pred: OpIds::empty(),
@ -367,25 +369,3 @@ fn check_opid(m: &OpSetMetadata, opid: OpId) -> Result<OpId, Error> {
}
}
}
fn parse_optype(action_index: usize, value: ScalarValue) -> Result<OpType, Error> {
match action_index {
0 => Ok(OpType::Make(ObjType::Map)),
1 => Ok(OpType::Put(value)),
2 => Ok(OpType::Make(ObjType::List)),
3 => Ok(OpType::Delete),
4 => Ok(OpType::Make(ObjType::Text)),
5 => match value {
ScalarValue::Int(i) => Ok(OpType::Increment(i)),
_ => {
tracing::error!(?value, "invalid value for counter op");
Err(Error::InvalidAction)
}
},
6 => Ok(OpType::Make(ObjType::Table)),
other => {
tracing::error!(action = other, "unknown action type");
Err(Error::InvalidAction)
}
}
}

View file

@ -1,6 +1,7 @@
use std::num::NonZeroU64;
use crate::exid::ExId;
use crate::marks::{ExpandMark, Mark};
use crate::query::{self, OpIdSearch};
use crate::storage::Change as StoredChange;
use crate::types::{Key, ListEncoding, ObjId, OpId, OpIds, TextEncoding};
@ -638,7 +639,7 @@ impl TransactionInner {
for (offset, v) in values.iter().enumerate() {
let op = &self.operations[start + offset].1;
let value = (v.clone().into(), doc.ops().id_to_exid(op.id));
obs.insert(doc, ex_obj.clone(), index + offset, value)
obs.insert(doc, ex_obj.clone(), index + offset, value, false)
}
}
}
@ -648,6 +649,51 @@ impl TransactionInner {
Ok(())
}
pub(crate) fn mark<Obs: OpObserver>(
&mut self,
doc: &mut Automerge,
op_observer: Option<&mut Obs>,
ex_obj: &ExId,
mark: Mark<'_>,
expand: ExpandMark,
) -> Result<(), AutomergeError> {
let (obj, _obj_type) = doc.exid_to_obj(ex_obj)?;
if let Some(obs) = op_observer {
let action = OpType::MarkBegin(expand.left(), mark.data.clone().into_owned());
self.do_insert(doc, Some(obs), obj, mark.start, action)?;
self.do_insert(
doc,
Some(obs),
obj,
mark.end,
OpType::MarkEnd(expand.right()),
)?;
if mark.value().is_null() {
obs.unmark(doc, ex_obj.clone(), mark.name(), mark.start, mark.end);
} else {
obs.mark(doc, ex_obj.clone(), Some(mark).into_iter())
}
} else {
let action = OpType::MarkBegin(expand.left(), mark.data.into_owned());
self.do_insert::<Obs>(doc, None, obj, mark.start, action)?;
self.do_insert::<Obs>(doc, None, obj, mark.end, OpType::MarkEnd(expand.right()))?;
}
Ok(())
}
pub(crate) fn unmark<Obs: OpObserver>(
&mut self,
doc: &mut Automerge,
op_observer: Option<&mut Obs>,
ex_obj: &ExId,
name: &str,
start: usize,
end: usize,
) -> Result<(), AutomergeError> {
let mark = Mark::new(name.to_string(), ScalarValue::Null, start, end);
self.mark(doc, op_observer, ex_obj, mark, ExpandMark::None)
}
fn finalize_op<Obs: OpObserver>(
&mut self,
doc: &mut Automerge,
@ -660,23 +706,24 @@ impl TransactionInner {
if let Some(op_observer) = op_observer {
let ex_obj = doc.ops().id_to_exid(obj.0);
if op.insert {
let obj_type = doc.ops().object_type(&obj);
assert!(obj_type.unwrap().is_sequence());
match (obj_type, prop) {
(Some(ObjType::List), Prop::Seq(index)) => {
let value = (op.value(), doc.ops().id_to_exid(op.id));
op_observer.insert(doc, ex_obj, index, value)
}
(Some(ObjType::Text), Prop::Seq(index)) => {
// FIXME
if op_observer.text_as_seq() {
if !op.is_mark() {
let obj_type = doc.ops().object_type(&obj);
assert!(obj_type.unwrap().is_sequence());
match (obj_type, prop) {
(Some(ObjType::List), Prop::Seq(index)) => {
let value = (op.value(), doc.ops().id_to_exid(op.id));
op_observer.insert(doc, ex_obj, index, value)
} else {
op_observer.splice_text(doc, ex_obj, index, op.to_str())
op_observer.insert(doc, ex_obj, index, value, false)
}
(Some(ObjType::Text), Prop::Seq(index)) => {
if op_observer.text_as_seq() {
let value = (op.value(), doc.ops().id_to_exid(op.id));
op_observer.insert(doc, ex_obj, index, value, false)
} else {
op_observer.splice_text(doc, ex_obj, index, op.to_str())
}
}
_ => {}
}
_ => {}
}
} else if op.is_delete() {
op_observer.delete(doc, ex_obj, prop);

View file

@ -1,12 +1,13 @@
use std::ops::RangeBounds;
use crate::exid::ExId;
use crate::marks::{ExpandMark, Mark};
use crate::op_observer::BranchableObserver;
use crate::{
Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ReadDoc, ScalarValue, Value, Values,
Automerge, ChangeHash, ObjType, OpObserver, Prop, ReadDoc, ScalarValue, Value, Values,
};
use crate::{AutomergeError, Keys};
use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt};
use crate::{ListRange, MapRange};
use super::{observation, CommitOptions, Transactable, TransactionArgs, TransactionInner};
@ -116,14 +117,10 @@ impl<'a, Obs: observation::Observation> Transaction<'a, Obs> {
}
impl<'a, Obs: observation::Observation> ReadDoc for Transaction<'a, Obs> {
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_, '_> {
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys<'_> {
self.doc.keys(obj)
}
fn keys_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> {
self.doc.keys_at(obj, heads)
}
fn map_range<O: AsRef<ExId>, R: RangeBounds<String>>(
&self,
obj: O,
@ -132,15 +129,6 @@ impl<'a, Obs: observation::Observation> ReadDoc for Transaction<'a, Obs> {
self.doc.map_range(obj, range)
}
fn map_range_at<O: AsRef<ExId>, R: RangeBounds<String>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> MapRangeAt<'_, R> {
self.doc.map_range_at(obj, range, heads)
}
fn list_range<O: AsRef<ExId>, R: RangeBounds<usize>>(
&self,
obj: O,
@ -149,31 +137,14 @@ impl<'a, Obs: observation::Observation> ReadDoc for Transaction<'a, Obs> {
self.doc.list_range(obj, range)
}
fn list_range_at<O: AsRef<ExId>, R: RangeBounds<usize>>(
&self,
obj: O,
range: R,
heads: &[ChangeHash],
) -> ListRangeAt<'_, R> {
self.doc.list_range_at(obj, range, heads)
}
fn values<O: AsRef<ExId>>(&self, obj: O) -> Values<'_> {
self.doc.values(obj)
}
fn values_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> {
self.doc.values_at(obj, heads)
}
fn length<O: AsRef<ExId>>(&self, obj: O) -> usize {
self.doc.length(obj)
}
fn length_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> usize {
self.doc.length_at(obj, heads)
}
fn object_type<O: AsRef<ExId>>(&self, obj: O) -> Result<ObjType, AutomergeError> {
self.doc.object_type(obj)
}
@ -182,12 +153,8 @@ impl<'a, Obs: observation::Observation> ReadDoc for Transaction<'a, Obs> {
self.doc.text(obj)
}
fn text_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<String, AutomergeError> {
self.doc.text_at(obj, heads)
fn marks<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<Mark<'_>>, AutomergeError> {
self.doc.marks(obj)
}
fn get<O: AsRef<ExId>, P: Into<Prop>>(
@ -198,15 +165,6 @@ impl<'a, Obs: observation::Observation> ReadDoc for Transaction<'a, Obs> {
self.doc.get(obj, prop)
}
fn get_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Option<(Value<'_>, ExId)>, AutomergeError> {
self.doc.get_at(obj, prop, heads)
}
fn get_all<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
@ -215,15 +173,6 @@ impl<'a, Obs: observation::Observation> ReadDoc for Transaction<'a, Obs> {
self.doc.get_all(obj, prop)
}
fn get_all_at<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,
prop: P,
heads: &[ChangeHash],
) -> Result<Vec<(Value<'_>, ExId)>, AutomergeError> {
self.doc.get_all_at(obj, prop, heads)
}
fn parents<O: AsRef<ExId>>(&self, obj: O) -> Result<crate::Parents<'_>, AutomergeError> {
self.doc.parents(obj)
}
@ -330,6 +279,25 @@ impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> {
self.do_tx(|tx, doc, obs| tx.splice_text(doc, obs, obj.as_ref(), pos, del, text))
}
fn mark<O: AsRef<ExId>>(
&mut self,
obj: O,
mark: Mark<'_>,
expand: ExpandMark,
) -> Result<(), AutomergeError> {
self.do_tx(|tx, doc, obs| tx.mark(doc, obs, obj.as_ref(), mark, expand))
}
fn unmark<O: AsRef<ExId>>(
&mut self,
obj: O,
name: &str,
start: usize,
end: usize,
) -> Result<(), AutomergeError> {
self.do_tx(|tx, doc, obs| tx.unmark(doc, obs, obj.as_ref(), name, start, end))
}
fn base_heads(&self) -> Vec<ChangeHash> {
self.doc.get_heads()
}

View file

@ -1,4 +1,5 @@
use crate::exid::ExId;
use crate::marks::{ExpandMark, Mark};
use crate::{AutomergeError, ChangeHash, ObjType, Prop, ReadDoc, ScalarValue};
/// A way of mutating a document within a single change.
@ -88,6 +89,21 @@ pub trait Transactable: ReadDoc {
text: &str,
) -> Result<(), AutomergeError>;
fn mark<O: AsRef<ExId>>(
&mut self,
obj: O,
mark: Mark<'_>,
expand: ExpandMark,
) -> Result<(), AutomergeError>;
fn unmark<O: AsRef<ExId>>(
&mut self,
obj: O,
key: &str,
start: usize,
end: usize,
) -> Result<(), AutomergeError>;
/// The heads this transaction will be based on
fn base_heads(&self) -> Vec<ChangeHash>;
}

View file

@ -14,6 +14,7 @@ mod opids;
pub(crate) use opids::OpIds;
pub(crate) use crate::clock::Clock;
pub(crate) use crate::marks::MarkData;
pub(crate) use crate::value::{Counter, ScalarValue, Value};
pub(crate) const HEAD: ElemId = ElemId(OpId(0, 0));
@ -198,6 +199,8 @@ pub enum OpType {
Delete,
Increment(i64),
Put(ScalarValue),
MarkBegin(bool, MarkData),
MarkEnd(bool),
}
impl OpType {
@ -213,6 +216,7 @@ impl OpType {
Self::Make(ObjType::Text) => 4,
Self::Increment(_) => 5,
Self::Make(ObjType::Table) => 6,
Self::MarkBegin(_, _) | Self::MarkEnd(_) => 7,
}
}
@ -227,11 +231,17 @@ impl OpType {
_ => Err(error::InvalidOpType::NonNumericInc),
},
6 => Ok(()),
7 => Ok(()),
_ => Err(error::InvalidOpType::UnknownAction(action)),
}
}
pub(crate) fn from_action_and_value(action: u64, value: ScalarValue) -> OpType {
pub(crate) fn from_action_and_value(
action: u64,
value: ScalarValue,
mark_name: Option<smol_str::SmolStr>,
expand: bool,
) -> OpType {
match action {
0 => Self::Make(ObjType::Map),
1 => Self::Put(value),
@ -244,9 +254,27 @@ impl OpType {
_ => unreachable!("validate_action_and_value returned NonNumericInc"),
},
6 => Self::Make(ObjType::Table),
7 => match mark_name {
Some(name) => Self::MarkBegin(expand, MarkData { name, value }),
None => Self::MarkEnd(expand),
},
_ => unreachable!("validate_action_and_value returned UnknownAction"),
}
}
pub(crate) fn to_str(&self) -> &str {
if let OpType::Put(ScalarValue::Str(s)) = &self {
s
} else if self.is_mark() {
""
} else {
"\u{fffc}"
}
}
pub(crate) fn is_mark(&self) -> bool {
matches!(&self, OpType::MarkBegin(_, _) | OpType::MarkEnd(_))
}
}
impl From<ObjType> for OpType {
@ -426,6 +454,13 @@ impl Display for Prop {
}
impl Key {
pub(crate) fn prop_index(&self) -> Option<usize> {
match self {
Key::Map(n) => Some(*n),
Key::Seq(_) => None,
}
}
pub(crate) fn elemid(&self) -> Option<ElemId> {
match self {
Key::Map(_) => None,
@ -458,6 +493,16 @@ impl OpId {
.cmp(&other.0)
.then_with(|| actors[self.1 as usize].cmp(&actors[other.1 as usize]))
}
#[inline]
pub(crate) fn prev(&self) -> OpId {
OpId(self.0 - 1, self.1)
}
#[inline]
pub(crate) fn next(&self) -> OpId {
OpId(self.0 + 1, self.1)
}
}
#[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)]
@ -545,16 +590,15 @@ pub(crate) struct Op {
impl Op {
pub(crate) fn add_succ<F: Fn(&OpId, &OpId) -> std::cmp::Ordering>(&mut self, op: &Op, cmp: F) {
self.succ.add(op.id, cmp);
if let OpType::Put(ScalarValue::Counter(Counter {
current,
increments,
..
})) = &mut self.action
{
if let OpType::Increment(n) = &op.action {
*current += *n;
*increments += 1;
}
if let OpType::Increment(n) = &op.action {
self.increment(*n);
}
}
pub(crate) fn increment(&mut self, n: i64) {
if let OpType::Put(ScalarValue::Counter(c)) = &mut self.action {
c.current += n;
c.increments += 1;
}
}
@ -582,14 +626,20 @@ impl Op {
}
pub(crate) fn to_str(&self) -> &str {
if let OpType::Put(ScalarValue::Str(s)) = &self.action {
s
} else {
"\u{fffc}"
}
self.action.to_str()
}
pub(crate) fn visible(&self) -> bool {
if self.is_inc() || self.is_mark() {
false
} else if self.is_counter() {
self.succ.len() <= self.incs()
} else {
self.succ.is_empty()
}
}
pub(crate) fn visible_or_mark(&self) -> bool {
if self.is_inc() {
false
} else if self.is_counter() {
@ -619,6 +669,18 @@ impl Op {
matches!(&self.action, OpType::Put(ScalarValue::Counter(_)))
}
pub(crate) fn is_mark(&self) -> bool {
self.action.is_mark()
}
pub(crate) fn valid_mark_anchor(&self) -> bool {
self.succ.is_empty()
&& matches!(
&self.action,
OpType::MarkBegin(true, _) | OpType::MarkEnd(false)
)
}
pub(crate) fn is_noop(&self, action: &OpType) -> bool {
matches!((&self.action, action), (OpType::Put(n), OpType::Put(m)) if n == m)
}
@ -655,6 +717,10 @@ impl Op {
match &self.action {
OpType::Make(obj_type) => Value::Object(*obj_type),
OpType::Put(scalar) => Value::Scalar(Cow::Borrowed(scalar)),
OpType::MarkBegin(_, mark) => {
Value::Scalar(Cow::Owned(format!("markBegin={}", mark.value).into()))
}
OpType::MarkEnd(_) => Value::Scalar(Cow::Owned("markEnd".into())),
_ => panic!("cant convert op into a value - {:?}", self),
}
}
@ -675,6 +741,8 @@ impl Op {
OpType::Make(obj) => format!("make{}", obj),
OpType::Increment(val) => format!("inc:{}", val),
OpType::Delete => "del".to_string(),
OpType::MarkBegin(_, _) => "markBegin".to_string(),
OpType::MarkEnd(_) => "markEnd".to_string(),
}
}
}

View file

@ -73,6 +73,13 @@ impl OpIds {
}
}
pub(crate) fn remove(&mut self, id: &OpId) -> Option<OpId> {
self.0
.iter()
.position(|i| i == id)
.map(|index| self.0.remove(index))
}
pub(crate) fn retain<F: Fn(&OpId) -> bool>(&mut self, f: F) {
self.0.retain(f)
}

View file

@ -245,13 +245,13 @@ impl<'a> Value<'a> {
impl<'a> fmt::Display for Value<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Value::Object(o) => write!(f, "Object: {}", o),
Value::Scalar(s) => write!(f, "Scalar: {}", s),
Value::Object(o) => write!(f, "{}", o),
Value::Scalar(s) => write!(f, "{}", s),
}
}
}
impl<'a> From<&str> for Value<'a> {
impl From<&str> for Value<'static> {
fn from(s: &str) -> Self {
Value::Scalar(Cow::Owned(ScalarValue::Str(s.into())))
}
@ -341,6 +341,12 @@ impl<'a> From<ScalarValue> for Value<'a> {
}
}
impl<'a> From<&'a ScalarValue> for Value<'a> {
fn from(v: &'a ScalarValue) -> Self {
Value::Scalar(Cow::Borrowed(v))
}
}
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone, Copy)]
pub(crate) enum DataType {
#[serde(rename = "counter")]
@ -367,11 +373,9 @@ pub struct Counter {
}
impl Counter {
pub(crate) fn increment<I: Iterator<Item = i64>>(&mut self, increments: I) {
for inc in increments {
self.current += inc;
self.increments += 1;
}
pub(crate) fn increment(&mut self, inc: i64) {
self.current += inc;
self.increments += 1;
}
}

View file

@ -249,6 +249,8 @@ impl OpTableRow {
crate::OpType::Put(v) => format!("set {}", v),
crate::OpType::Make(obj) => format!("make {}", obj),
crate::OpType::Increment(v) => format!("inc {}", v),
crate::OpType::MarkBegin(_, m) => format!("markEnd {}", m),
crate::OpType::MarkEnd(m) => format!("markEnd {}", m),
};
let prop = match op.key {
crate::types::Key::Map(k) => metadata.props[k].clone(),

View file

@ -6,7 +6,7 @@ use automerge::{
use std::fs;
// set up logging for all the tests
//use test_log::test;
use test_log::test;
#[allow(unused_imports)]
use automerge_test::{