Compare commits

...

4 commits

Author SHA1 Message Date
Alex Good
6e82954ded
Use cached hash for Opid 2021-12-31 19:38:59 +00:00
Alex Good
26773f6870
delete tests 2021-12-30 19:16:51 +00:00
Alex Good
6b23637c75
fix bench 2021-12-30 19:16:31 +00:00
Orion Henry
5324569019 convert opid to use Rc<> 2021-12-30 00:32:01 -05:00
25 changed files with 695 additions and 1669 deletions

View file

@ -10,7 +10,7 @@
"mocha": "^9.1.1" "mocha": "^9.1.1"
}, },
"dependencies": { "dependencies": {
"automerge-wasm": "file:../automerge-wasm", "automerge-wasm": "file:../automerge-wasm/dev",
"fast-sha256": "^1.3.0", "fast-sha256": "^1.3.0",
"pako": "^2.0.4", "pako": "^2.0.4",
"uuid": "^8.3" "uuid": "^8.3"

View file

@ -1,6 +1,6 @@
extern crate web_sys; extern crate web_sys;
use automerge as am; use automerge as am;
use automerge::{Change, ChangeHash, Prop, Value}; use automerge::{Change, ChangeHash, ObjId, Prop, Value};
use js_sys::{Array, Object, Reflect, Uint8Array}; use js_sys::{Array, Object, Reflect, Uint8Array};
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use serde::Serialize; use serde::Serialize;
@ -151,9 +151,9 @@ impl Automerge {
pub fn keys(&mut self, obj: JsValue, heads: JsValue) -> Result<Array, JsValue> { pub fn keys(&mut self, obj: JsValue, heads: JsValue) -> Result<Array, JsValue> {
let obj = self.import(obj)?; let obj = self.import(obj)?;
let result = if let Some(heads) = get_heads(heads) { let result = if let Some(heads) = get_heads(heads) {
self.0.keys_at(obj, &heads) self.0.keys_at(&obj, &heads)
} else { } else {
self.0.keys(obj) self.0.keys(&obj)
} }
.iter() .iter()
.map(|s| JsValue::from_str(s)) .map(|s| JsValue::from_str(s))
@ -164,9 +164,9 @@ impl Automerge {
pub fn text(&mut self, obj: JsValue, heads: JsValue) -> Result<JsValue, JsValue> { pub fn text(&mut self, obj: JsValue, heads: JsValue) -> Result<JsValue, JsValue> {
let obj = self.import(obj)?; let obj = self.import(obj)?;
if let Some(heads) = get_heads(heads) { if let Some(heads) = get_heads(heads) {
self.0.text_at(obj, &heads) self.0.text_at(&obj, &heads)
} else { } else {
self.0.text(obj) self.0.text(&obj)
} }
.map_err(to_js_err) .map_err(to_js_err)
.map(|t| t.into()) .map(|t| t.into())
@ -185,7 +185,7 @@ impl Automerge {
let mut vals = vec![]; let mut vals = vec![];
if let Some(t) = text.as_string() { if let Some(t) = text.as_string() {
self.0 self.0
.splice_text(obj, start, delete_count, &t) .splice_text(&obj, start, delete_count, &t)
.map_err(to_js_err)?; .map_err(to_js_err)?;
} else { } else {
if let Ok(array) = text.dyn_into::<Array>() { if let Ok(array) = text.dyn_into::<Array>() {
@ -201,7 +201,7 @@ impl Automerge {
} }
} }
self.0 self.0
.splice(obj, start, delete_count, vals) .splice(&obj, start, delete_count, vals)
.map_err(to_js_err)?; .map_err(to_js_err)?;
} }
Ok(()) Ok(())
@ -223,7 +223,7 @@ impl Automerge {
let value = self.import_value(value, datatype)?; let value = self.import_value(value, datatype)?;
let opid = self let opid = self
.0 .0
.insert(obj, index as usize, value) .insert(&obj, index as usize, value)
.map_err(to_js_err)?; .map_err(to_js_err)?;
Ok(self.export(opid)) Ok(self.export(opid))
} }
@ -238,7 +238,7 @@ impl Automerge {
let obj = self.import(obj)?; let obj = self.import(obj)?;
let prop = self.import_prop(prop)?; let prop = self.import_prop(prop)?;
let value = self.import_value(value, datatype)?; let value = self.import_value(value, datatype)?;
let opid = self.0.set(obj, prop, value).map_err(to_js_err)?; let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?;
match opid { match opid {
Some(opid) => Ok(self.export(opid)), Some(opid) => Ok(self.export(opid)),
None => Ok(JsValue::null()), None => Ok(JsValue::null()),
@ -252,7 +252,7 @@ impl Automerge {
.as_f64() .as_f64()
.ok_or("inc needs a numberic value") .ok_or("inc needs a numberic value")
.map_err(to_js_err)?; .map_err(to_js_err)?;
self.0.inc(obj, prop, value as i64).map_err(to_js_err)?; self.0.inc(&obj, prop, value as i64).map_err(to_js_err)?;
Ok(()) Ok(())
} }
@ -263,9 +263,9 @@ impl Automerge {
let heads = get_heads(heads); let heads = get_heads(heads);
if let Ok(prop) = prop { if let Ok(prop) = prop {
let value = if let Some(h) = heads { let value = if let Some(h) = heads {
self.0.value_at(obj, prop, &h) self.0.value_at(&obj, prop, &h)
} else { } else {
self.0.value(obj, prop) self.0.value(&obj, prop)
} }
.map_err(to_js_err)?; .map_err(to_js_err)?;
match value { match value {
@ -289,9 +289,9 @@ impl Automerge {
let prop = to_prop(arg); let prop = to_prop(arg);
if let Ok(prop) = prop { if let Ok(prop) = prop {
let values = if let Some(heads) = get_heads(heads) { let values = if let Some(heads) = get_heads(heads) {
self.0.values_at(obj, prop, &heads) self.0.values_at(&obj, prop, &heads)
} else { } else {
self.0.values(obj, prop) self.0.values(&obj, prop)
} }
.map_err(to_js_err)?; .map_err(to_js_err)?;
for value in values { for value in values {
@ -318,16 +318,16 @@ impl Automerge {
pub fn length(&mut self, obj: JsValue, heads: JsValue) -> Result<JsValue, JsValue> { pub fn length(&mut self, obj: JsValue, heads: JsValue) -> Result<JsValue, JsValue> {
let obj = self.import(obj)?; let obj = self.import(obj)?;
if let Some(heads) = get_heads(heads) { if let Some(heads) = get_heads(heads) {
Ok((self.0.length_at(obj, &heads) as f64).into()) Ok((self.0.length_at(&obj, &heads) as f64).into())
} else { } else {
Ok((self.0.length(obj) as f64).into()) Ok((self.0.length(&obj) as f64).into())
} }
} }
pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> {
let obj = self.import(obj)?; let obj = self.import(obj)?;
let prop = to_prop(prop)?; let prop = to_prop(prop)?;
self.0.del(obj, prop).map_err(to_js_err)?; self.0.del(&obj, prop).map_err(to_js_err)?;
Ok(()) Ok(())
} }
@ -442,15 +442,12 @@ impl Automerge {
} }
} }
fn export<E: automerge::Exportable>(&self, val: E) -> JsValue { fn export(&self, id: ObjId) -> JsValue {
self.0.export(val).into() id.to_string().into()
} }
fn import<I: automerge::Importable>(&self, id: JsValue) -> Result<I, JsValue> { fn import(&self, id: JsValue) -> Result<ObjId, JsValue> {
let id_str = id let id_str = id.as_string().ok_or("invalid opid").map_err(to_js_err)?;
.as_string()
.ok_or("invalid opid/objid/elemid")
.map_err(to_js_err)?;
self.0.import(&id_str).map_err(to_js_err) self.0.import(&id_str).map_err(to_js_err)
} }

View file

@ -6,10 +6,7 @@ use crate::decoding;
use crate::decoding::{Decodable, InvalidChangeError}; use crate::decoding::{Decodable, InvalidChangeError};
use crate::encoding::{Encodable, DEFLATE_MIN_SIZE}; use crate::encoding::{Encodable, DEFLATE_MIN_SIZE};
use crate::legacy as amp; use crate::legacy as amp;
use crate::{ use crate::{ActorId, AutomergeError, Op, OpType, Transaction};
ActorId, AutomergeError, ElemId, IndexedCache, Key, ObjId, Op, OpId, OpType, Transaction, HEAD,
ROOT,
};
use core::ops::Range; use core::ops::Range;
use flate2::{ use flate2::{
bufread::{DeflateDecoder, DeflateEncoder}, bufread::{DeflateDecoder, DeflateEncoder},
@ -50,33 +47,15 @@ fn get_heads(changes: &[amp::Change]) -> HashSet<amp::ChangeHash> {
pub(crate) fn encode_document( pub(crate) fn encode_document(
changes: &[amp::Change], changes: &[amp::Change],
doc_ops: &[Op], doc_ops: &[Op],
actors_index: &IndexedCache<ActorId>, actors: &[ActorId],
props: &[String],
) -> Result<Vec<u8>, AutomergeError> { ) -> Result<Vec<u8>, AutomergeError> {
let mut bytes: Vec<u8> = Vec::new(); let mut bytes: Vec<u8> = Vec::new();
let heads = get_heads(changes); let heads = get_heads(changes);
let actors_map = actors_index.encode_index(); let (change_bytes, change_info) = ChangeEncoder::encode_changes(changes, actors);
let actors = actors_index.sorted();
/* let (ops_bytes, ops_info) = DocOpEncoder::encode_doc_ops(doc_ops, actors);
// this assumes that all actor_ids referenced are seen in changes.actor_id which is true
// so long as we have a full history
let mut actors: Vec<_> = changes
.iter()
.map(|c| &c.actor)
.unique()
.sorted()
.cloned()
.collect();
*/
let (change_bytes, change_info) = ChangeEncoder::encode_changes(changes, &actors);
//let doc_ops = group_doc_ops(changes, &actors);
let (ops_bytes, ops_info) = DocOpEncoder::encode_doc_ops(doc_ops, &actors_map, props);
bytes.extend(&MAGIC_BYTES); bytes.extend(&MAGIC_BYTES);
bytes.extend(vec![0, 0, 0, 0]); // we dont know the hash yet so fill in a fake bytes.extend(vec![0, 0, 0, 0]); // we dont know the hash yet so fill in a fake
@ -86,7 +65,7 @@ pub(crate) fn encode_document(
actors.len().encode(&mut chunk)?; actors.len().encode(&mut chunk)?;
for a in actors.into_iter() { for a in actors.iter() {
a.to_bytes().encode(&mut chunk)?; a.to_bytes().encode(&mut chunk)?;
} }
@ -200,7 +179,8 @@ fn encode_chunk(change: &amp::Change, deps: &[amp::ChangeHash]) -> ChunkIntermed
} }
// encode first actor // encode first actor
let mut actors = vec![change.actor_id.clone()]; //let mut actors = vec![change.actor_id.clone()];
let mut actors = change.actors();
change.actor_id.to_bytes().encode(&mut bytes).unwrap(); change.actor_id.to_bytes().encode(&mut bytes).unwrap();
// encode seq, start_op, time, message // encode seq, start_op, time, message
@ -416,61 +396,16 @@ fn increment_range_map(ranges: &mut HashMap<u32, Range<usize>>, len: usize) {
} }
} }
fn export_objid(id: &ObjId, actors: &IndexedCache<ActorId>) -> amp::ObjectId { pub(crate) fn export_change(change: &Transaction) -> Change {
if id.0 == ROOT {
amp::ObjectId::Root
} else {
export_opid(&id.0, actors).into()
}
}
fn export_elemid(id: &ElemId, actors: &IndexedCache<ActorId>) -> amp::ElementId {
if id == &HEAD {
amp::ElementId::Head
} else {
export_opid(&id.0, actors).into()
}
}
fn export_opid(id: &OpId, actors: &IndexedCache<ActorId>) -> amp::OpId {
amp::OpId(id.0, actors.get(id.1).clone())
}
fn export_op(op: &Op, actors: &IndexedCache<ActorId>, props: &IndexedCache<String>) -> amp::Op {
let action = op.action.clone();
let key = match &op.key {
Key::Map(n) => amp::Key::Map(props.get(*n).clone().into()),
Key::Seq(id) => amp::Key::Seq(export_elemid(id, actors)),
};
let obj = export_objid(&op.obj, actors);
let pred = op.pred.iter().map(|id| export_opid(id, actors)).collect();
amp::Op {
action,
obj,
insert: op.insert,
pred,
key,
}
}
pub(crate) fn export_change(
change: &Transaction,
actors: &IndexedCache<ActorId>,
props: &IndexedCache<String>,
) -> Change {
amp::Change { amp::Change {
actor_id: actors.get(change.actor).clone(), actor_id: change.actor.as_ref().clone(),
seq: change.seq, seq: change.seq,
start_op: change.start_op, start_op: change.start_op,
time: change.time, time: change.time,
deps: change.deps.clone(), deps: change.deps.clone(),
message: change.message.clone(), message: change.message.clone(),
hash: change.hash, hash: change.hash,
operations: change operations: change.operations.iter().map(|op| op.into()).collect(),
.operations
.iter()
.map(|op| export_op(op, actors, props))
.collect(),
extra_bytes: change.extra_bytes.clone(), extra_bytes: change.extra_bytes.clone(),
} }
.into() .into()

View file

@ -1,26 +1,25 @@
use crate::OpId; use crate::{ActorId, OpId};
use fxhash::FxBuildHasher;
use std::cmp; use std::cmp;
use std::collections::HashMap; use std::collections::HashMap;
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub(crate) struct Clock(HashMap<usize, u64, FxBuildHasher>); pub(crate) struct Clock(HashMap<ActorId, u64>);
impl Clock { impl Clock {
pub fn new() -> Self { pub fn new() -> Self {
Clock(Default::default()) Clock(HashMap::new())
} }
pub fn include(&mut self, key: usize, n: u64) { pub fn include(&mut self, key: &ActorId, n: u64) {
self.0 self.0
.entry(key) .entry(key.clone())
.and_modify(|m| *m = cmp::max(n, *m)) .and_modify(|m| *m = cmp::max(n, *m))
.or_insert(n); .or_insert(n);
} }
pub fn covers(&self, id: &OpId) -> bool { pub fn covers(&self, id: &OpId) -> bool {
if let Some(val) = self.0.get(&id.1) { if let Some(val) = self.0.get(&id.actor) {
val >= &id.0 val >= &id.counter
} else { } else {
false false
} }
@ -34,19 +33,22 @@ mod tests {
#[test] #[test]
fn covers() { fn covers() {
let mut clock = Clock::new(); let mut clock = Clock::new();
let a1 = ActorId::random();
let a2 = ActorId::random();
let a3 = ActorId::random();
clock.include(1, 20); clock.include(&a1, 20);
clock.include(2, 10); clock.include(&a2, 10);
assert!(clock.covers(&OpId(10, 1))); assert!(clock.covers(&OpId::at(10, &a1)));
assert!(clock.covers(&OpId(20, 1))); assert!(clock.covers(&OpId::at(20, &a1)));
assert!(!clock.covers(&OpId(30, 1))); assert!(!clock.covers(&OpId::at(30, &a1)));
assert!(clock.covers(&OpId(5, 2))); assert!(clock.covers(&OpId::at(5, &a2)));
assert!(clock.covers(&OpId(10, 2))); assert!(clock.covers(&OpId::at(10, &a2)));
assert!(!clock.covers(&OpId(15, 2))); assert!(!clock.covers(&OpId::at(15, &a2)));
assert!(!clock.covers(&OpId(1, 3))); assert!(!clock.covers(&OpId::at(1, &a3)));
assert!(!clock.covers(&OpId(100, 3))); assert!(!clock.covers(&OpId::at(100, &a3)));
} }
} }

View file

@ -11,7 +11,6 @@ use std::{
str, str,
}; };
use crate::ROOT;
use crate::{ActorId, ElemId, Key, ObjId, ObjType, OpId, OpType, ScalarValue}; use crate::{ActorId, ElemId, Key, ObjId, ObjType, OpId, OpType, ScalarValue};
use crate::legacy as amp; use crate::legacy as amp;
@ -23,7 +22,7 @@ use tracing::instrument;
use crate::{ use crate::{
decoding::{BooleanDecoder, Decodable, Decoder, DeltaDecoder, RleDecoder}, decoding::{BooleanDecoder, Decodable, Decoder, DeltaDecoder, RleDecoder},
encoding::{BooleanEncoder, ColData, DeltaEncoder, Encodable, RleEncoder}, encoding::{BooleanEncoder, ColData, DeltaEncoder, Encodable, RleEncoder},
IndexedCache, Op, Op,
}; };
impl Encodable for Action { impl Encodable for Action {
@ -42,12 +41,14 @@ impl Encodable for [ActorId] {
} }
} }
fn map_actor(actor: &ActorId, actors: &mut Vec<ActorId>) -> usize { fn map_actor(actor: &ActorId, actors: &[ActorId]) -> usize {
if let Some(pos) = actors.iter().position(|a| a == actor) { if let Some(pos) = actors.iter().position(|a| a == actor) {
pos pos
} else { } else {
actors.push(actor.clone()); panic!(
actors.len() - 1 "map_actor cant find actor! seeking={} actors={:?}",
actor, actors
);
} }
} }
@ -558,7 +559,7 @@ impl ValEncoder {
} }
} }
fn append_value(&mut self, val: &ScalarValue, actors: &[usize]) { fn append_value(&mut self, val: &ScalarValue, actors: &[ActorId]) {
// It may seem weird to have two consecutive matches on the same value. The reason is so // It may seem weird to have two consecutive matches on the same value. The reason is so
// that we don't have to repeat the `append_null` calls on ref_actor and ref_counter in // that we don't have to repeat the `append_null` calls on ref_actor and ref_counter in
// every arm of the next match // every arm of the next match
@ -679,22 +680,21 @@ impl KeyEncoder {
} }
} }
fn append(&mut self, key: Key, actors: &[usize], props: &[String]) { fn append(&mut self, key: Key, actors: &[ActorId]) {
match key { match key {
Key::Map(i) => { Key::Map(i) => {
self.actor.append_null(); self.actor.append_null();
self.ctr.append_null(); self.ctr.append_null();
self.str.append_value(props[i].clone()); self.str.append_value(i);
} }
Key::Seq(ElemId(OpId(0, 0))) => { Key::Seq(ElemId::Head) => {
// HEAD
self.actor.append_null(); self.actor.append_null();
self.ctr.append_value(0); self.ctr.append_value(0);
self.str.append_null(); self.str.append_null();
} }
Key::Seq(ElemId(OpId(ctr, actor))) => { Key::Seq(ElemId::Id(OpId{ counter, actor, .. })) => {
self.actor.append_value(actors[actor]); self.actor.append_value(map_actor(&actor, actors));
self.ctr.append_value(ctr); self.ctr.append_value(counter);
self.str.append_null(); self.str.append_null();
} }
} }
@ -770,11 +770,11 @@ impl SuccEncoder {
} }
} }
fn append(&mut self, succ: &[OpId], actors: &[usize]) { fn append(&mut self, succ: &[OpId], actors: &[ActorId]) {
self.num.append_value(succ.len()); self.num.append_value(succ.len());
for s in succ.iter() { for s in succ.iter() {
self.ctr.append_value(s.0); self.ctr.append_value(s.counter);
self.actor.append_value(actors[s.1]); self.actor.append_value(map_actor(&s.actor, actors));
} }
} }
@ -844,15 +844,15 @@ impl ObjEncoder {
} }
} }
fn append(&mut self, obj: &ObjId, actors: &[usize]) { fn append(&mut self, obj: &ObjId, actors: &[ActorId]) {
match obj.0 { match obj {
ROOT => { ObjId::Root => {
self.actor.append_null(); self.actor.append_null();
self.ctr.append_null(); self.ctr.append_null();
} }
OpId(ctr, actor) => { ObjId::Id(id) => {
self.actor.append_value(actors[actor]); self.actor.append_value(map_actor(&id.actor, actors));
self.ctr.append_value(ctr); self.ctr.append_value(id.counter);
} }
} }
} }
@ -915,10 +915,7 @@ pub(crate) struct ChangeEncoder {
impl ChangeEncoder { impl ChangeEncoder {
#[instrument(level = "debug", skip(changes, actors))] #[instrument(level = "debug", skip(changes, actors))]
pub fn encode_changes<'a, 'b, I>( pub fn encode_changes<'a, 'b, I>(changes: I, actors: &'a [ActorId]) -> (Vec<u8>, Vec<u8>)
changes: I,
actors: &'a IndexedCache<ActorId>,
) -> (Vec<u8>, Vec<u8>)
where where
I: IntoIterator<Item = &'b amp::Change>, I: IntoIterator<Item = &'b amp::Change>,
{ {
@ -941,7 +938,7 @@ impl ChangeEncoder {
} }
} }
fn encode<'a, 'b, 'c, I>(&'a mut self, changes: I, actors: &'b IndexedCache<ActorId>) fn encode<'a, 'b, 'c, I>(&'a mut self, changes: I, actors: &'b [ActorId])
where where
I: IntoIterator<Item = &'c amp::Change>, I: IntoIterator<Item = &'c amp::Change>,
{ {
@ -950,8 +947,7 @@ impl ChangeEncoder {
if let Some(hash) = change.hash { if let Some(hash) = change.hash {
index_by_hash.insert(hash, index); index_by_hash.insert(hash, index);
} }
self.actor self.actor.append_value(map_actor(&change.actor_id, actors));
.append_value(actors.lookup(change.actor_id.clone()).unwrap()); //actors.iter().position(|a| a == &change.actor_id).unwrap());
self.seq.append_value(change.seq); self.seq.append_value(change.seq);
// FIXME iterops.count is crazy slow // FIXME iterops.count is crazy slow
self.max_op self.max_op
@ -1024,16 +1020,12 @@ pub(crate) struct DocOpEncoder {
impl DocOpEncoder { impl DocOpEncoder {
#[instrument(level = "debug", skip(ops, actors))] #[instrument(level = "debug", skip(ops, actors))]
pub(crate) fn encode_doc_ops<'a, I>( pub(crate) fn encode_doc_ops<'a, I>(ops: I, actors: &'a [ActorId]) -> (Vec<u8>, Vec<u8>)
ops: I,
actors: &'a [usize],
props: &'a [String],
) -> (Vec<u8>, Vec<u8>)
where where
I: IntoIterator<Item = &'a Op>, I: IntoIterator<Item = &'a Op>,
{ {
let mut e = Self::new(); let mut e = Self::new();
e.encode(ops, actors, props); e.encode(ops, actors);
e.finish() e.finish()
} }
@ -1050,15 +1042,15 @@ impl DocOpEncoder {
} }
} }
fn encode<'a, I>(&mut self, ops: I, actors: &[usize], props: &[String]) fn encode<'a, I>(&mut self, ops: I, actors: &[ActorId])
where where
I: IntoIterator<Item = &'a Op>, I: IntoIterator<Item = &'a Op>,
{ {
for op in ops { for op in ops {
self.actor.append_value(actors[op.id.actor()]); self.actor.append_value(map_actor(&op.id.actor, actors));
self.ctr.append_value(op.id.counter()); self.ctr.append_value(op.id.counter);
self.obj.append(&op.obj, actors); self.obj.append(&op.obj, actors);
self.key.append(op.key, actors, props); self.key.append(op.key.clone(), actors);
self.insert.append(op.insert); self.insert.append(op.insert);
self.succ.append(&op.succ, actors); self.succ.append(&op.succ, actors);
let action = match &op.action { let action = match &op.action {

View file

@ -5,8 +5,10 @@ use std::iter::FromIterator;
pub(crate) use crate::value::DataType; pub(crate) use crate::value::DataType;
pub(crate) use crate::{ActorId, ChangeHash, ObjType, OpType, ScalarValue}; pub(crate) use crate::{ActorId, ChangeHash, ObjType, OpType, ScalarValue};
use itertools::Itertools;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use smol_str::SmolStr; use smol_str::SmolStr;
use std::collections::HashSet;
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Copy, Hash)] #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Copy, Hash)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))] #[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
@ -251,6 +253,29 @@ pub struct Change {
pub extra_bytes: Vec<u8>, pub extra_bytes: Vec<u8>,
} }
impl Change {
pub(crate) fn actors(&self) -> Vec<ActorId> {
let first = self.actor_id.clone();
let mut set = HashSet::new();
for o in &self.operations {
if let ObjectId::Id(OpId(_, a)) = &o.obj {
set.insert(a.clone());
}
if let Key::Seq(ElementId::Id(OpId(_, a))) = &o.key {
set.insert(a.clone());
}
for p in o.pred.iter() {
set.insert(p.1.clone());
}
}
set.remove(&first);
let mut result = vec![first];
let set: Vec<ActorId> = set.iter().sorted().cloned().collect();
result.extend(set);
result
}
}
impl PartialEq for Change { impl PartialEq for Change {
// everything but hash (its computed and not always present) // everything but hash (its computed and not always present)
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {

View file

@ -33,7 +33,6 @@ mod clock;
mod columnar; mod columnar;
mod decoding; mod decoding;
mod encoding; mod encoding;
mod indexed_cache;
mod legacy; mod legacy;
mod sync; mod sync;
#[cfg(feature = "optree-visualisation")] #[cfg(feature = "optree-visualisation")]
@ -46,34 +45,36 @@ mod query;
mod types; mod types;
mod value; mod value;
use crate::legacy as amp;
use change::{encode_document, export_change}; use change::{encode_document, export_change};
use clock::Clock; use clock::Clock;
use indexed_cache::IndexedCache; use itertools::Itertools;
use op_set::OpSet; use op_set::OpSet;
use std::collections::{HashMap, HashSet, VecDeque}; use std::collections::{HashMap, HashSet, VecDeque};
use types::{ElemId, Key, ObjId, Op, HEAD}; use std::rc::Rc;
use types::{ElemId, Key, Op};
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
pub const ROOT: ObjId = ObjId::Root;
pub use change::{decode_change, Change}; pub use change::{decode_change, Change};
pub use error::AutomergeError; pub use error::AutomergeError;
pub use legacy::Change as ExpandedChange; pub use legacy::Change as ExpandedChange;
pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState};
pub use types::{ pub use types::{ActorId, ChangeHash, ObjId, ObjType, OpId, OpType, Patch, Peer, Prop};
ActorId, ChangeHash, Export, Exportable, Importable, ObjType, OpId, OpType, Patch, Peer, Prop,
ROOT,
};
pub use value::{ScalarValue, Value}; pub use value::{ScalarValue, Value};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Automerge { pub struct Automerge {
queue: Vec<Change>, queue: Vec<Change>,
history: Vec<Change>, history: Vec<Change>,
actors: HashMap<ActorId, Rc<ActorId>>,
history_index: HashMap<ChangeHash, usize>, history_index: HashMap<ChangeHash, usize>,
states: HashMap<usize, Vec<usize>>, states: HashMap<Rc<ActorId>, Vec<usize>>,
deps: HashSet<ChangeHash>, deps: HashSet<ChangeHash>,
saved: Vec<ChangeHash>, saved: Vec<ChangeHash>,
ops: OpSet, ops: OpSet,
actor: Option<usize>, actor: Option<Rc<ActorId>>,
max_op: u64, max_op: u64,
transaction: Option<Transaction>, transaction: Option<Transaction>,
} }
@ -84,6 +85,7 @@ impl Automerge {
queue: vec![], queue: vec![],
history: vec![], history: vec![],
history_index: HashMap::new(), history_index: HashMap::new(),
actors: HashMap::new(),
states: HashMap::new(), states: HashMap::new(),
ops: Default::default(), ops: Default::default(),
deps: Default::default(), deps: Default::default(),
@ -94,36 +96,57 @@ impl Automerge {
} }
} }
pub fn set_actor(&mut self, actor: ActorId) { pub fn import(&self, s: &str) -> Result<ObjId, AutomergeError> {
self.ensure_transaction_closed(); if s == "_root" {
self.actor = Some(self.ops.m.actors.cache(actor)) Ok(ObjId::Root)
} else {
let n = s
.find('@')
.ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?;
let counter = s[0..n]
.parse()
.map_err(|_| AutomergeError::InvalidOpId(s.to_owned()))?;
let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap());
if let Some(actor) = self.actors.get(&actor).cloned() {
Ok(ObjId::Id(OpId::at(counter, actor)))
} else {
Err(AutomergeError::InvalidOpId(s.to_owned()))
}
}
} }
fn random_actor(&mut self) -> ActorId { fn import_actor(&mut self, actor: &ActorId) -> Rc<ActorId> {
if let Some(a) = self.actors.get(actor) {
a.clone()
} else {
let a = Rc::new(actor.clone());
self.actors.insert(actor.clone(), a.clone());
a
}
}
pub fn set_actor(&mut self, actor: ActorId) {
self.ensure_transaction_closed();
self.actor = Some(self.import_actor(&actor));
}
fn random_actor(&mut self) -> Rc<ActorId> {
let actor = ActorId::from(uuid::Uuid::new_v4().as_bytes().to_vec()); let actor = ActorId::from(uuid::Uuid::new_v4().as_bytes().to_vec());
self.actor = Some(self.ops.m.actors.cache(actor.clone())); let actor = self.import_actor(&actor);
self.actor = Some(actor.clone());
actor actor
} }
pub fn get_actor(&mut self) -> ActorId { pub fn get_actor(&mut self) -> Rc<ActorId> {
if let Some(actor) = self.actor { if let Some(actor) = &self.actor {
self.ops.m.actors[actor].clone() actor.clone()
} else { } else {
self.random_actor() self.random_actor()
} }
} }
pub fn maybe_get_actor(&self) -> Option<ActorId> { pub fn maybe_get_actor(&self) -> Option<Rc<ActorId>> {
self.actor.map(|i| self.ops.m.actors[i].clone()) self.actor.clone()
}
fn get_actor_index(&mut self) -> usize {
if let Some(actor) = self.actor {
actor
} else {
self.random_actor();
self.actor.unwrap() // random_actor always sets actor to is_some()
}
} }
pub fn new_with_actor_id(actor: ActorId) -> Self { pub fn new_with_actor_id(actor: ActorId) -> Self {
@ -131,6 +154,7 @@ impl Automerge {
queue: vec![], queue: vec![],
history: vec![], history: vec![],
history_index: HashMap::new(), history_index: HashMap::new(),
actors: HashMap::new(),
states: HashMap::new(), states: HashMap::new(),
ops: Default::default(), ops: Default::default(),
deps: Default::default(), deps: Default::default(),
@ -139,7 +163,7 @@ impl Automerge {
max_op: 0, max_op: 0,
transaction: None, transaction: None,
}; };
am.actor = Some(am.ops.m.actors.cache(actor)); am.set_actor(actor);
am am
} }
@ -152,12 +176,12 @@ impl Automerge {
fn tx(&mut self) -> &mut Transaction { fn tx(&mut self) -> &mut Transaction {
if self.transaction.is_none() { if self.transaction.is_none() {
let actor = self.get_actor_index(); let actor = self.get_actor();
let seq = self.states.entry(actor).or_default().len() as u64 + 1; let seq = self.states.entry(actor.clone()).or_default().len() as u64 + 1;
let mut deps = self.get_heads(); let mut deps = self.get_heads();
if seq > 1 { if seq > 1 {
let last_hash = self.get_hash(actor, seq - 1).unwrap(); let last_hash = self.get_hash(&actor, seq - 1).unwrap();
if !deps.contains(&last_hash) { if !deps.contains(&last_hash) {
deps.push(last_hash); deps.push(last_hash);
} }
@ -199,7 +223,8 @@ impl Automerge {
pub fn ensure_transaction_closed(&mut self) { pub fn ensure_transaction_closed(&mut self) {
if let Some(tx) = self.transaction.take() { if let Some(tx) = self.transaction.take() {
self.update_history(export_change(&tx, &self.ops.m.actors, &self.ops.m.props)); //self.update_history(export_change(&tx, &self.ops.m.actors, &self.ops.m.props));
self.update_history(export_change(&tx));
} }
} }
@ -211,11 +236,11 @@ impl Automerge {
// FIXME - use query to make this fast // FIXME - use query to make this fast
if let Some(p) = self.ops.iter().position(|o| o.id == *pred_id) { if let Some(p) = self.ops.iter().position(|o| o.id == *pred_id) {
self.ops self.ops
.replace(op.obj, p, |o| o.succ.retain(|i| i != pred_id)); .replace(&op.obj, p, |o| o.succ.retain(|i| i != pred_id));
} }
} }
if let Some(pos) = self.ops.iter().position(|o| o.id == op.id) { if let Some(pos) = self.ops.iter().position(|o| o.id == op.id) {
self.ops.remove(op.obj, pos); self.ops.remove(&op.obj, pos);
} }
} }
num num
@ -226,13 +251,16 @@ impl Automerge {
fn next_id(&mut self) -> OpId { fn next_id(&mut self) -> OpId {
let tx = self.tx(); let tx = self.tx();
OpId(tx.start_op + tx.operations.len() as u64, tx.actor) OpId::at(
tx.start_op + tx.operations.len() as u64,
tx.actor.clone(),
)
} }
fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) {
for succ in succ_pos { for succ in succ_pos {
self.ops.replace(op.obj, *succ, |old_op| { self.ops.replace(&op.obj, *succ, |old_op| {
old_op.succ.push(op.id); old_op.succ.push(op.id.clone());
}); });
} }
@ -244,11 +272,11 @@ impl Automerge {
} }
fn insert_op(&mut self, op: Op) -> Op { fn insert_op(&mut self, op: Op) -> Op {
let q = self.ops.search(op.obj, query::SeekOp::new(&op)); let q = self.ops.search(&op.obj, query::SeekOp::new(&op));
for i in q.succ { for i in q.succ {
self.ops self.ops
.replace(op.obj, i, |old_op| old_op.succ.push(op.id)); .replace(&op.obj, i, |old_op| old_op.succ.push(op.id.clone()));
} }
if !op.is_del() { if !op.is_del() {
@ -262,24 +290,24 @@ impl Automerge {
// PropAt::() // PropAt::()
// NthAt::() // NthAt::()
pub fn keys(&self, obj: OpId) -> Vec<String> { pub fn keys(&self, obj: &ObjId) -> Vec<String> {
let q = self.ops.search(obj.into(), query::Keys::new()); let q = self.ops.search(obj, query::Keys::new());
q.keys.iter().map(|k| self.export(*k)).collect() q.keys.iter().map(|k| k.to_string()).collect()
} }
pub fn keys_at(&self, obj: OpId, heads: &[ChangeHash]) -> Vec<String> { pub fn keys_at(&self, obj: &ObjId, heads: &[ChangeHash]) -> Vec<String> {
let clock = self.clock_at(heads); let clock = self.clock_at(heads);
let q = self.ops.search(obj.into(), query::KeysAt::new(clock)); let q = self.ops.search(obj, query::KeysAt::new(clock));
q.keys.iter().map(|k| self.export(*k)).collect() q.keys.iter().map(|k| k.to_string()).collect()
} }
pub fn length(&self, obj: OpId) -> usize { pub fn length(&self, obj: &ObjId) -> usize {
self.ops.search(obj.into(), query::Len::new(obj.into())).len self.ops.search(obj, query::Len::new()).len
} }
pub fn length_at(&self, obj: OpId, heads: &[ChangeHash]) -> usize { pub fn length_at(&self, obj: &ObjId, heads: &[ChangeHash]) -> usize {
let clock = self.clock_at(heads); let clock = self.clock_at(heads);
self.ops.search(obj.into(), query::LenAt::new(clock)).len self.ops.search(obj, query::LenAt::new(clock)).len
} }
// set(obj, prop, value) - value can be scalar or objtype // set(obj, prop, value) - value can be scalar or objtype
@ -302,21 +330,20 @@ impl Automerge {
/// - The key does not exist in the object /// - The key does not exist in the object
pub fn set<P: Into<Prop>, V: Into<Value>>( pub fn set<P: Into<Prop>, V: Into<Value>>(
&mut self, &mut self,
obj: OpId, obj: &ObjId,
prop: P, prop: P,
value: V, value: V,
) -> Result<Option<OpId>, AutomergeError> { ) -> Result<Option<ObjId>, AutomergeError> {
let value = value.into(); let value = value.into();
self.local_op(obj.into(), prop.into(), value.into()) self.local_op(obj.clone(), prop.into(), value.into())
} }
pub fn insert<V: Into<Value>>( pub fn insert<V: Into<Value>>(
&mut self, &mut self,
obj: OpId, obj: &ObjId,
index: usize, index: usize,
value: V, value: V,
) -> Result<OpId, AutomergeError> { ) -> Result<ObjId, AutomergeError> {
let obj = obj.into();
let id = self.next_id(); let id = self.next_id();
let query = self.ops.search(obj, query::InsertNth::new(index)); let query = self.ops.search(obj, query::InsertNth::new(index));
@ -326,9 +353,9 @@ impl Automerge {
let op = Op { let op = Op {
change: self.history.len(), change: self.history.len(),
id, id: id.clone(),
action: value.into(), action: value.into(),
obj, obj: obj.clone(),
key, key,
succ: Default::default(), succ: Default::default(),
pred: Default::default(), pred: Default::default(),
@ -338,16 +365,16 @@ impl Automerge {
self.ops.insert(query.pos, op.clone()); self.ops.insert(query.pos, op.clone());
self.tx().operations.push(op); self.tx().operations.push(op);
Ok(id) Ok(id.into())
} }
pub fn inc<P: Into<Prop>>( pub fn inc<P: Into<Prop>>(
&mut self, &mut self,
obj: OpId, obj: &ObjId,
prop: P, prop: P,
value: i64, value: i64,
) -> Result<OpId, AutomergeError> { ) -> Result<ObjId, AutomergeError> {
match self.local_op(obj.into(), prop.into(), OpType::Inc(value))? { match self.local_op(obj.clone(), prop.into(), OpType::Inc(value))? {
Some(opid) => Ok(opid), Some(opid) => Ok(opid),
None => { None => {
panic!("increment should always create a new op") panic!("increment should always create a new op")
@ -355,9 +382,9 @@ impl Automerge {
} }
} }
pub fn del<P: Into<Prop>>(&mut self, obj: OpId, prop: P) -> Result<OpId, AutomergeError> { pub fn del<P: Into<Prop>>(&mut self, obj: &ObjId, prop: P) -> Result<ObjId, AutomergeError> {
// TODO: Should we also no-op multiple delete operations? // TODO: Should we also no-op multiple delete operations?
match self.local_op(obj.into(), prop.into(), OpType::Del)? { match self.local_op(obj.clone(), prop.into(), OpType::Del)? {
Some(opid) => Ok(opid), Some(opid) => Ok(opid),
None => { None => {
panic!("delete should always create a new op") panic!("delete should always create a new op")
@ -369,11 +396,11 @@ impl Automerge {
/// the new elements /// the new elements
pub fn splice( pub fn splice(
&mut self, &mut self,
obj: OpId, obj: &ObjId,
mut pos: usize, mut pos: usize,
del: usize, del: usize,
vals: Vec<Value>, vals: Vec<Value>,
) -> Result<Vec<OpId>, AutomergeError> { ) -> Result<Vec<ObjId>, AutomergeError> {
for _ in 0..del { for _ in 0..del {
self.del(obj, pos)?; self.del(obj, pos)?;
} }
@ -387,11 +414,11 @@ impl Automerge {
pub fn splice_text( pub fn splice_text(
&mut self, &mut self,
obj: OpId, obj: &ObjId,
pos: usize, pos: usize,
del: usize, del: usize,
text: &str, text: &str,
) -> Result<Vec<OpId>, AutomergeError> { ) -> Result<Vec<ObjId>, AutomergeError> {
let mut vals = vec![]; let mut vals = vec![];
for c in text.to_owned().graphemes(true) { for c in text.to_owned().graphemes(true) {
vals.push(c.into()); vals.push(c.into());
@ -399,9 +426,8 @@ impl Automerge {
self.splice(obj, pos, del, vals) self.splice(obj, pos, del, vals)
} }
pub fn text(&self, obj: OpId) -> Result<String, AutomergeError> { pub fn text(&self, obj: &ObjId) -> Result<String, AutomergeError> {
let obj = obj.into(); let query = self.ops.search(obj, query::ListVals::new());
let query = self.ops.search(obj, query::ListVals::new(obj));
let mut buffer = String::new(); let mut buffer = String::new();
for q in &query.ops { for q in &query.ops {
if let OpType::Set(ScalarValue::Str(s)) = &q.action { if let OpType::Set(ScalarValue::Str(s)) = &q.action {
@ -411,9 +437,8 @@ impl Automerge {
Ok(buffer) Ok(buffer)
} }
pub fn text_at(&self, obj: OpId, heads: &[ChangeHash]) -> Result<String, AutomergeError> { pub fn text_at(&self, obj: &ObjId, heads: &[ChangeHash]) -> Result<String, AutomergeError> {
let clock = self.clock_at(heads); let clock = self.clock_at(heads);
let obj = obj.into();
let query = self.ops.search(obj, query::ListValsAt::new(clock)); let query = self.ops.search(obj, query::ListValsAt::new(clock));
let mut buffer = String::new(); let mut buffer = String::new();
for q in &query.ops { for q in &query.ops {
@ -429,41 +454,34 @@ impl Automerge {
// Something better? // Something better?
pub fn value<P: Into<Prop>>( pub fn value<P: Into<Prop>>(
&self, &self,
obj: OpId, obj: &ObjId,
prop: P, prop: P,
) -> Result<Option<(Value, OpId)>, AutomergeError> { ) -> Result<Option<(Value, ObjId)>, AutomergeError> {
Ok(self.values(obj, prop.into())?.first().cloned()) Ok(self.values(obj, prop.into())?.first().cloned())
} }
pub fn value_at<P: Into<Prop>>( pub fn value_at<P: Into<Prop>>(
&self, &self,
obj: OpId, obj: &ObjId,
prop: P, prop: P,
heads: &[ChangeHash], heads: &[ChangeHash],
) -> Result<Option<(Value, OpId)>, AutomergeError> { ) -> Result<Option<(Value, ObjId)>, AutomergeError> {
Ok(self.values_at(obj, prop, heads)?.first().cloned()) Ok(self.values_at(obj, prop, heads)?.first().cloned())
} }
pub fn values<P: Into<Prop>>( pub fn values<P: Into<Prop>>(
&self, &self,
obj: OpId, obj: &ObjId,
prop: P, prop: P,
) -> Result<Vec<(Value, OpId)>, AutomergeError> { ) -> Result<Vec<(Value, ObjId)>, AutomergeError> {
let obj = obj.into();
let result = match prop.into() { let result = match prop.into() {
Prop::Map(p) => { Prop::Map(p) => self
let prop = self.ops.m.props.lookup(p); .ops
if let Some(p) = prop { .search(obj, query::Prop::new(p))
self.ops .ops
.search(obj, query::Prop::new(obj, p)) .into_iter()
.ops .map(|o| o.into())
.into_iter() .collect(),
.map(|o| o.into())
.collect()
} else {
vec![]
}
}
Prop::Seq(n) => self Prop::Seq(n) => self
.ops .ops
.search(obj, query::Nth::new(n)) .search(obj, query::Nth::new(n))
@ -477,27 +495,20 @@ impl Automerge {
pub fn values_at<P: Into<Prop>>( pub fn values_at<P: Into<Prop>>(
&self, &self,
obj: OpId, obj: &ObjId,
prop: P, prop: P,
heads: &[ChangeHash], heads: &[ChangeHash],
) -> Result<Vec<(Value, OpId)>, AutomergeError> { ) -> Result<Vec<(Value, ObjId)>, AutomergeError> {
let prop = prop.into(); let prop = prop.into();
let obj = obj.into();
let clock = self.clock_at(heads); let clock = self.clock_at(heads);
let result = match prop { let result = match prop {
Prop::Map(p) => { Prop::Map(p) => self
let prop = self.ops.m.props.lookup(p); .ops
if let Some(p) = prop { .search(obj, query::PropAt::new(p, clock))
self.ops .ops
.search(obj, query::PropAt::new(p, clock)) .into_iter()
.ops .map(|o| o.into())
.into_iter() .collect(),
.map(|o| o.into())
.collect()
} else {
vec![]
}
}
Prop::Seq(n) => self Prop::Seq(n) => self
.ops .ops
.search(obj, query::NthAt::new(n, clock)) .search(obj, query::NthAt::new(n, clock))
@ -555,7 +566,7 @@ impl Automerge {
obj: ObjId, obj: ObjId,
prop: Prop, prop: Prop,
action: OpType, action: OpType,
) -> Result<Option<OpId>, AutomergeError> { ) -> Result<Option<ObjId>, AutomergeError> {
match prop { match prop {
Prop::Map(s) => self.local_map_op(obj, s, action), Prop::Map(s) => self.local_map_op(obj, s, action),
Prop::Seq(n) => self.local_list_op(obj, n, action), Prop::Seq(n) => self.local_list_op(obj, n, action),
@ -567,14 +578,13 @@ impl Automerge {
obj: ObjId, obj: ObjId,
prop: String, prop: String,
action: OpType, action: OpType,
) -> Result<Option<OpId>, AutomergeError> { ) -> Result<Option<ObjId>, AutomergeError> {
if prop.is_empty() { if prop.is_empty() {
return Err(AutomergeError::EmptyStringKey); return Err(AutomergeError::EmptyStringKey);
} }
let id = self.next_id(); let id = self.next_id();
let prop = self.ops.m.props.cache(prop); let query = self.ops.search(&obj, query::Prop::new(prop.clone()));
let query = self.ops.search(obj, query::Prop::new(obj, prop));
match (&query.ops[..], &action) { match (&query.ops[..], &action) {
// If there are no conflicts for this value and the old operation and the new operation are // If there are no conflicts for this value and the old operation and the new operation are
@ -591,11 +601,11 @@ impl Automerge {
_ => {} _ => {}
} }
let pred = query.ops.iter().map(|op| op.id).collect(); let pred = query.ops.iter().map(|op| op.id.clone()).collect();
let op = Op { let op = Op {
change: self.history.len(), change: self.history.len(),
id, id: id.clone(),
action, action,
obj, obj,
key: Key::Map(prop), key: Key::Map(prop),
@ -606,7 +616,7 @@ impl Automerge {
self.insert_local_op(op, query.pos, &query.ops_pos); self.insert_local_op(op, query.pos, &query.ops_pos);
Ok(Some(id)) Ok(Some(id.into()))
} }
fn local_list_op( fn local_list_op(
@ -614,11 +624,11 @@ impl Automerge {
obj: ObjId, obj: ObjId,
index: usize, index: usize,
action: OpType, action: OpType,
) -> Result<Option<OpId>, AutomergeError> { ) -> Result<Option<ObjId>, AutomergeError> {
let query = self.ops.search(obj, query::Nth::new(index)); let query = self.ops.search(&obj, query::Nth::new(index));
let id = self.next_id(); let id = self.next_id();
let pred = query.ops.iter().map(|op| op.id).collect(); let pred = query.ops.iter().map(|op| op.id.clone()).collect();
let key = query.key()?; let key = query.key()?;
match (&query.ops[..], &action) { match (&query.ops[..], &action) {
@ -638,7 +648,7 @@ impl Automerge {
let op = Op { let op = Op {
change: self.history.len(), change: self.history.len(),
id, id: id.clone(),
action, action,
obj, obj,
key, key,
@ -649,7 +659,7 @@ impl Automerge {
self.insert_local_op(op, query.pos, &query.ops_pos); self.insert_local_op(op, query.pos, &query.ops_pos);
Ok(Some(id)) Ok(Some(id.into()))
} }
fn is_causally_ready(&self, change: &Change) -> bool { fn is_causally_ready(&self, change: &Change) -> bool {
@ -675,21 +685,33 @@ impl Automerge {
.iter_ops() .iter_ops()
.enumerate() .enumerate()
.map(|(i, c)| { .map(|(i, c)| {
let actor = self.ops.m.actors.cache(change.actor_id().clone()); let id = OpId::at(
let id = OpId(change.start_op + i as u64, actor); change.start_op + i as u64,
// FIXME dont need to_string() self.import_actor(change.actor_id()),
let obj: ObjId = self.import(&c.obj.to_string()).unwrap(); );
let obj: ObjId = match &c.obj {
amp::ObjectId::Root => ObjId::Root,
amp::ObjectId::Id(amp::OpId(c, a)) => ObjId::Id(OpId::at(
*c,
self.import_actor(a),
)),
};
let pred = c let pred = c
.pred .pred
.iter() .iter()
.map(|i| self.import(&i.to_string()).unwrap()) .map(|amp::OpId(c, a)| OpId::at(
*c,
self.import_actor(a),
))
.collect(); .collect();
let key = match &c.key { let key = match &c.key {
legacy::Key::Map(n) => Key::Map(self.ops.m.props.cache(n.to_string())), amp::Key::Map(n) => Key::Map(n.to_string()),
legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(HEAD), amp::Key::Seq(amp::ElementId::Head) => Key::Seq(ElemId::Head),
// FIXME dont need to_string() amp::Key::Seq(amp::ElementId::Id(amp::OpId(c, a))) => {
legacy::Key::Seq(legacy::ElementId::Id(i)) => { Key::Seq(ElemId::Id(OpId::at(
Key::Seq(self.import(&i.to_string()).unwrap()) *c,
self.import_actor(a),
)))
} }
}; };
Op { Op {
@ -724,12 +746,8 @@ impl Automerge {
let c: Vec<_> = self.history.iter().map(|c| c.decode()).collect(); let c: Vec<_> = self.history.iter().map(|c| c.decode()).collect();
let ops: Vec<_> = self.ops.iter().cloned().collect(); let ops: Vec<_> = self.ops.iter().cloned().collect();
// TODO - can we make encode_document error free // TODO - can we make encode_document error free
let bytes = encode_document( let actors: Vec<_> = self.actors.keys().cloned().sorted().collect();
&c, let bytes = encode_document(&c, ops.as_slice(), &actors);
ops.as_slice(),
&self.ops.m.actors,
&self.ops.m.props.cache,
);
if bytes.is_ok() { if bytes.is_ok() {
self.saved = self.get_heads().iter().copied().collect(); self.saved = self.get_heads().iter().copied().collect();
} }
@ -899,8 +917,11 @@ impl Automerge {
pub fn get_last_local_change(&mut self) -> Option<&Change> { pub fn get_last_local_change(&mut self) -> Option<&Change> {
self.ensure_transaction_closed(); self.ensure_transaction_closed();
if let Some(actor) = &self.actor { if let Some(actor) = &self.actor {
let actor = &self.ops.m.actors[*actor]; return self
return self.history.iter().rev().find(|c| c.actor_id() == actor); .history
.iter()
.rev()
.find(|c| c.actor_id() == actor.as_ref());
} }
None None
} }
@ -930,8 +951,8 @@ impl Automerge {
to_see.push(*h); to_see.push(*h);
} }
} }
let actor = self.ops.m.actors.lookup(c.actor_id().clone()).unwrap(); //let actor = self.ops.m.actors.lookup(c.actor_id().clone()).unwrap();
clock.include(actor, c.max_op()); clock.include(c.actor_id(), c.max_op());
seen.insert(hash); seen.insert(hash);
} }
} }
@ -989,9 +1010,9 @@ impl Automerge {
deps deps
} }
fn get_hash(&mut self, actor: usize, seq: u64) -> Result<ChangeHash, AutomergeError> { fn get_hash(&mut self, actor: &ActorId, seq: u64) -> Result<ChangeHash, AutomergeError> {
self.states self.states
.get(&actor) .get(actor)
.and_then(|v| v.get(seq as usize - 1)) .and_then(|v| v.get(seq as usize - 1))
.and_then(|&i| self.history.get(i)) .and_then(|&i| self.history.get(i))
.map(|c| c.hash) .map(|c| c.hash)
@ -1005,10 +1026,8 @@ impl Automerge {
let history_index = self.history.len(); let history_index = self.history.len();
self.states let actor = self.import_actor(change.actor_id());
.entry(self.ops.m.actors.cache(change.actor_id().clone())) self.states.entry(actor).or_default().push(history_index);
.or_default()
.push(history_index);
self.history_index.insert(change.hash, history_index); self.history_index.insert(change.hash, history_index);
self.history.push(change); self.history.push(change);
@ -1023,35 +1042,6 @@ impl Automerge {
self.deps.insert(change.hash); self.deps.insert(change.hash);
} }
pub fn import<I: Importable>(&self, s: &str) -> Result<I, AutomergeError> {
if let Some(x) = I::from(s) {
Ok(x)
} else {
let n = s
.find('@')
.ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?;
let counter = s[0..n]
.parse()
.map_err(|_| AutomergeError::InvalidOpId(s.to_owned()))?;
let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap());
let actor = self
.ops
.m
.actors
.lookup(actor)
.ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?;
Ok(I::wrap(OpId(counter, actor)))
}
}
pub fn export<E: Exportable>(&self, id: E) -> String {
match id.export() {
Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.actors[id.actor()]),
Export::Prop(index) => self.ops.m.props[index].clone(),
Export::Special(s) => s,
}
}
pub fn dump(&self) { pub fn dump(&self) {
log!( log!(
" {:12} {:12} {:12} {} {} {}", " {:12} {:12} {:12} {} {} {}",
@ -1063,20 +1053,17 @@ impl Automerge {
"succ" "succ"
); );
for i in self.ops.iter() { for i in self.ops.iter() {
let id = self.export(i.id); let id = &i.id;
let obj = self.export(i.obj); let obj = &i.obj;
let key = match i.key { let key = &i.key;
Key::Map(n) => self.ops.m.props[n].clone(),
Key::Seq(n) => self.export(n),
};
let value: String = match &i.action { let value: String = match &i.action {
OpType::Set(value) => format!("{}", value), OpType::Set(value) => format!("{}", value),
OpType::Make(obj) => format!("make{}", obj), OpType::Make(obj) => format!("make{}", obj),
OpType::Inc(obj) => format!("inc{}", obj), OpType::Inc(obj) => format!("inc{}", obj),
OpType::Del => format!("del{}", 0), OpType::Del => format!("del{}", 0),
}; };
let pred: Vec<_> = i.pred.iter().map(|id| self.export(*id)).collect(); let pred = &i.pred;
let succ: Vec<_> = i.succ.iter().map(|id| self.export(*id)).collect(); let succ = &i.succ;
log!( log!(
" {:12} {:12} {:12} {} {:?} {:?}", " {:12} {:12} {:12} {} {:?} {:?}",
id, id,
@ -1097,7 +1084,7 @@ impl Automerge {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct Transaction { pub(crate) struct Transaction {
pub actor: usize, pub actor: Rc<ActorId>,
pub seq: u64, pub seq: u64,
pub start_op: u64, pub start_op: u64,
pub time: i64, pub time: i64,
@ -1123,9 +1110,9 @@ mod tests {
fn insert_op() -> Result<(), AutomergeError> { fn insert_op() -> Result<(), AutomergeError> {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
doc.set_actor(ActorId::random()); doc.set_actor(ActorId::random());
doc.set(ROOT, "hello", "world")?; doc.set(&ROOT, "hello", "world")?;
assert!(doc.pending_ops() == 1); assert!(doc.pending_ops() == 1);
doc.value(ROOT, "hello")?; doc.value(&ROOT, "hello")?;
Ok(()) Ok(())
} }
@ -1133,18 +1120,18 @@ mod tests {
fn test_list() -> Result<(), AutomergeError> { fn test_list() -> Result<(), AutomergeError> {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
doc.set_actor(ActorId::random()); doc.set_actor(ActorId::random());
let list_id = doc.set(ROOT, "items", Value::list())?.unwrap(); let list_id = doc.set(&ROOT, "items", Value::list())?.unwrap();
doc.set(ROOT, "zzz", "zzzval")?; doc.set(&ROOT, "zzz", "zzzval")?;
assert!(doc.value(ROOT, "items")?.unwrap().1 == list_id); assert!(doc.value(&ROOT, "items")?.unwrap().1 == list_id);
doc.insert(list_id, 0, "a")?; doc.insert(&list_id, 0, "a")?;
doc.insert(list_id, 0, "b")?; doc.insert(&list_id, 0, "b")?;
doc.insert(list_id, 2, "c")?; doc.insert(&list_id, 2, "c")?;
doc.insert(list_id, 1, "d")?; doc.insert(&list_id, 1, "d")?;
assert!(doc.value(list_id, 0)?.unwrap().0 == "b".into()); assert!(doc.value(&list_id, 0)?.unwrap().0 == "b".into());
assert!(doc.value(list_id, 1)?.unwrap().0 == "d".into()); assert!(doc.value(&list_id, 1)?.unwrap().0 == "d".into());
assert!(doc.value(list_id, 2)?.unwrap().0 == "a".into()); assert!(doc.value(&list_id, 2)?.unwrap().0 == "a".into());
assert!(doc.value(list_id, 3)?.unwrap().0 == "c".into()); assert!(doc.value(&list_id, 3)?.unwrap().0 == "c".into());
assert!(doc.length(list_id) == 4); assert!(doc.length(&list_id) == 4);
doc.save()?; doc.save()?;
Ok(()) Ok(())
} }
@ -1153,22 +1140,22 @@ mod tests {
fn test_del() -> Result<(), AutomergeError> { fn test_del() -> Result<(), AutomergeError> {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
doc.set_actor(ActorId::random()); doc.set_actor(ActorId::random());
doc.set(ROOT, "xxx", "xxx")?; doc.set(&ROOT, "xxx", "xxx")?;
assert!(!doc.values(ROOT, "xxx")?.is_empty()); assert!(!doc.values(&ROOT, "xxx")?.is_empty());
doc.del(ROOT, "xxx")?; doc.del(&ROOT, "xxx")?;
assert!(doc.values(ROOT, "xxx")?.is_empty()); assert!(doc.values(&ROOT, "xxx")?.is_empty());
Ok(()) Ok(())
} }
#[test] #[test]
fn test_inc() -> Result<(), AutomergeError> { fn test_inc() -> Result<(), AutomergeError> {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
let id = doc.set(ROOT, "counter", Value::counter(10))?.unwrap(); let id = doc.set(&ROOT, "counter", Value::counter(10))?.unwrap();
assert!(doc.value(ROOT, "counter")? == Some((Value::counter(10), id))); assert!(doc.value(&ROOT, "counter")? == Some((Value::counter(10), id.clone())));
doc.inc(ROOT, "counter", 10)?; doc.inc(&ROOT, "counter", 10)?;
assert!(doc.value(ROOT, "counter")? == Some((Value::counter(20), id))); assert!(doc.value(&ROOT, "counter")? == Some((Value::counter(20), id.clone())));
doc.inc(ROOT, "counter", -5)?; doc.inc(&ROOT, "counter", -5)?;
assert!(doc.value(ROOT, "counter")? == Some((Value::counter(15), id))); assert!(doc.value(&ROOT, "counter")? == Some((Value::counter(15), id.clone())));
Ok(()) Ok(())
} }
@ -1176,15 +1163,15 @@ mod tests {
fn test_save_incremental() -> Result<(), AutomergeError> { fn test_save_incremental() -> Result<(), AutomergeError> {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
doc.set(ROOT, "foo", 1)?; doc.set(&ROOT, "foo", 1)?;
let save1 = doc.save().unwrap(); let save1 = doc.save().unwrap();
doc.set(ROOT, "bar", 2)?; doc.set(&ROOT, "bar", 2)?;
let save2 = doc.save_incremental(); let save2 = doc.save_incremental();
doc.set(ROOT, "baz", 3)?; doc.set(&ROOT, "baz", 3)?;
let save3 = doc.save_incremental(); let save3 = doc.save_incremental();
@ -1202,7 +1189,7 @@ mod tests {
let mut doc_a = Automerge::load(&save_a)?; let mut doc_a = Automerge::load(&save_a)?;
let mut doc_b = Automerge::load(&save_b)?; let mut doc_b = Automerge::load(&save_b)?;
assert!(doc_a.values(ROOT, "baz")? == doc_b.values(ROOT, "baz")?); assert!(doc_a.values(&ROOT, "baz")? == doc_b.values(&ROOT, "baz")?);
assert!(doc_a.save().unwrap() == doc_b.save().unwrap()); assert!(doc_a.save().unwrap() == doc_b.save().unwrap());
@ -1212,17 +1199,17 @@ mod tests {
#[test] #[test]
fn test_save_text() -> Result<(), AutomergeError> { fn test_save_text() -> Result<(), AutomergeError> {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
let text = doc.set(ROOT, "text", Value::text())?.unwrap(); let text = doc.set(&ROOT, "text", Value::text())?.unwrap();
let heads1 = doc.commit(None, None); let heads1 = doc.commit(None, None);
doc.splice_text(text, 0, 0, "hello world")?; doc.splice_text(&text, 0, 0, "hello world")?;
let heads2 = doc.commit(None, None); let heads2 = doc.commit(None, None);
doc.splice_text(text, 6, 0, "big bad ")?; doc.splice_text(&text, 6, 0, "big bad ")?;
let heads3 = doc.commit(None, None); let heads3 = doc.commit(None, None);
assert!(&doc.text(text)? == "hello big bad world"); assert!(&doc.text(&text)? == "hello big bad world");
assert!(&doc.text_at(text, &heads1)?.is_empty()); assert!(&doc.text_at(&text, &heads1)?.is_empty());
assert!(&doc.text_at(text, &heads2)? == "hello world"); assert!(&doc.text_at(&text, &heads2)? == "hello world");
assert!(&doc.text_at(text, &heads3)? == "hello big bad world"); assert!(&doc.text_at(&text, &heads3)? == "hello big bad world");
Ok(()) Ok(())
} }
@ -1231,50 +1218,50 @@ mod tests {
fn test_props_vals_at() -> Result<(), AutomergeError> { fn test_props_vals_at() -> Result<(), AutomergeError> {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
doc.set_actor("aaaa".try_into().unwrap()); doc.set_actor("aaaa".try_into().unwrap());
doc.set(ROOT, "prop1", "val1")?; doc.set(&ROOT, "prop1", "val1")?;
doc.commit(None, None); doc.commit(None, None);
let heads1 = doc.get_heads(); let heads1 = doc.get_heads();
doc.set(ROOT, "prop1", "val2")?; doc.set(&ROOT, "prop1", "val2")?;
doc.commit(None, None); doc.commit(None, None);
let heads2 = doc.get_heads(); let heads2 = doc.get_heads();
doc.set(ROOT, "prop2", "val3")?; doc.set(&ROOT, "prop2", "val3")?;
doc.commit(None, None); doc.commit(None, None);
let heads3 = doc.get_heads(); let heads3 = doc.get_heads();
doc.del(ROOT, "prop1")?; doc.del(&ROOT, "prop1")?;
doc.commit(None, None); doc.commit(None, None);
let heads4 = doc.get_heads(); let heads4 = doc.get_heads();
doc.set(ROOT, "prop3", "val4")?; doc.set(&ROOT, "prop3", "val4")?;
doc.commit(None, None); doc.commit(None, None);
let heads5 = doc.get_heads(); let heads5 = doc.get_heads();
assert!(doc.keys_at(ROOT, &heads1) == vec!["prop1".to_owned()]); assert!(doc.keys_at(&ROOT, &heads1) == vec!["prop1".to_owned()]);
assert!(doc.value_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); assert!(doc.value_at(&ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1"));
assert!(doc.value_at(ROOT, "prop2", &heads1)? == None); assert!(doc.value_at(&ROOT, "prop2", &heads1)? == None);
assert!(doc.value_at(ROOT, "prop3", &heads1)? == None); assert!(doc.value_at(&ROOT, "prop3", &heads1)? == None);
assert!(doc.keys_at(ROOT, &heads2) == vec!["prop1".to_owned()]); assert!(doc.keys_at(&ROOT, &heads2) == vec!["prop1".to_owned()]);
assert!(doc.value_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); assert!(doc.value_at(&ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2"));
assert!(doc.value_at(ROOT, "prop2", &heads2)? == None); assert!(doc.value_at(&ROOT, "prop2", &heads2)? == None);
assert!(doc.value_at(ROOT, "prop3", &heads2)? == None); assert!(doc.value_at(&ROOT, "prop3", &heads2)? == None);
assert!(doc.keys_at(ROOT, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]); assert!(doc.keys_at(&ROOT, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]);
assert!(doc.value_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); assert!(doc.value_at(&ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2"));
assert!(doc.value_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3"));
assert!(doc.value_at(ROOT, "prop3", &heads3)? == None); assert!(doc.value_at(&ROOT, "prop3", &heads3)? == None);
assert!(doc.keys_at(ROOT, &heads4) == vec!["prop2".to_owned()]); assert!(doc.keys_at(&ROOT, &heads4) == vec!["prop2".to_owned()]);
assert!(doc.value_at(ROOT, "prop1", &heads4)? == None); assert!(doc.value_at(&ROOT, "prop1", &heads4)? == None);
assert!(doc.value_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3"));
assert!(doc.value_at(ROOT, "prop3", &heads4)? == None); assert!(doc.value_at(&ROOT, "prop3", &heads4)? == None);
assert!(doc.keys_at(ROOT, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]); assert!(doc.keys_at(&ROOT, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]);
assert!(doc.value_at(ROOT, "prop1", &heads5)? == None); assert!(doc.value_at(&ROOT, "prop1", &heads5)? == None);
assert!(doc.value_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); assert!(doc.value_at(&ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3"));
assert!(doc.value_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); assert!(doc.value_at(&ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4"));
assert!(doc.keys_at(ROOT, &[]).is_empty()); assert!(doc.keys_at(&ROOT, &[]).is_empty());
assert!(doc.value_at(ROOT, "prop1", &[])? == None); assert!(doc.value_at(&ROOT, "prop1", &[])? == None);
assert!(doc.value_at(ROOT, "prop2", &[])? == None); assert!(doc.value_at(&ROOT, "prop2", &[])? == None);
assert!(doc.value_at(ROOT, "prop3", &[])? == None); assert!(doc.value_at(&ROOT, "prop3", &[])? == None);
Ok(()) Ok(())
} }
@ -1283,47 +1270,47 @@ mod tests {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
doc.set_actor("aaaa".try_into().unwrap()); doc.set_actor("aaaa".try_into().unwrap());
let list = doc.set(ROOT, "list", Value::list())?.unwrap(); let list = doc.set(&ObjId::Root, "list", Value::list())?.unwrap();
let heads1 = doc.commit(None, None); let heads1 = doc.commit(None, None);
doc.insert(list, 0, Value::int(10))?; doc.insert(&list, 0, Value::int(10))?;
let heads2 = doc.commit(None, None); let heads2 = doc.commit(None, None);
doc.set(list, 0, Value::int(20))?; doc.set(&list, 0, Value::int(20))?;
doc.insert(list, 0, Value::int(30))?; doc.insert(&list, 0, Value::int(30))?;
let heads3 = doc.commit(None, None); let heads3 = doc.commit(None, None);
doc.set(list, 1, Value::int(40))?; doc.set(&list, 1, Value::int(40))?;
doc.insert(list, 1, Value::int(50))?; doc.insert(&list, 1, Value::int(50))?;
let heads4 = doc.commit(None, None); let heads4 = doc.commit(None, None);
doc.del(list, 2)?; doc.del(&list, 2)?;
let heads5 = doc.commit(None, None); let heads5 = doc.commit(None, None);
doc.del(list, 0)?; doc.del(&list, 0)?;
let heads6 = doc.commit(None, None); let heads6 = doc.commit(None, None);
assert!(doc.length_at(list, &heads1) == 0); assert!(doc.length_at(&list, &heads1) == 0);
assert!(doc.value_at(list, 0, &heads1)?.is_none()); assert!(doc.value_at(&list, 0, &heads1)?.is_none());
assert!(doc.length_at(list, &heads2) == 1); assert!(doc.length_at(&list, &heads2) == 1);
assert!(doc.value_at(list, 0, &heads2)?.unwrap().0 == Value::int(10)); assert!(doc.value_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10));
assert!(doc.length_at(list, &heads3) == 2); assert!(doc.length_at(&list, &heads3) == 2);
assert!(doc.value_at(list, 0, &heads3)?.unwrap().0 == Value::int(30)); assert!(doc.value_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30));
assert!(doc.value_at(list, 1, &heads3)?.unwrap().0 == Value::int(20)); assert!(doc.value_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20));
assert!(doc.length_at(list, &heads4) == 3); assert!(doc.length_at(&list, &heads4) == 3);
assert!(doc.value_at(list, 0, &heads4)?.unwrap().0 == Value::int(30)); assert!(doc.value_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30));
assert!(doc.value_at(list, 1, &heads4)?.unwrap().0 == Value::int(50)); assert!(doc.value_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50));
assert!(doc.value_at(list, 2, &heads4)?.unwrap().0 == Value::int(40)); assert!(doc.value_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40));
assert!(doc.length_at(list, &heads5) == 2); assert!(doc.length_at(&list, &heads5) == 2);
assert!(doc.value_at(list, 0, &heads5)?.unwrap().0 == Value::int(30)); assert!(doc.value_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30));
assert!(doc.value_at(list, 1, &heads5)?.unwrap().0 == Value::int(50)); assert!(doc.value_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50));
assert!(doc.length_at(list, &heads6) == 1); assert!(doc.length_at(&list, &heads6) == 1);
assert!(doc.value_at(list, 0, &heads6)?.unwrap().0 == Value::int(50)); assert!(doc.value_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50));
Ok(()) Ok(())
} }

View file

@ -1,8 +1,7 @@
use crate::op_tree::OpTreeInternal; use crate::op_tree::OpTreeInternal;
use crate::query::TreeQuery; use crate::query::TreeQuery;
use crate::{ActorId, IndexedCache, Key, ObjId, Op, OpId}; use crate::{ObjId, Op};
use fxhash::FxBuildHasher; use fxhash::FxBuildHasher;
use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
pub(crate) type OpSet = OpSetInternal<16>; pub(crate) type OpSet = OpSetInternal<16>;
@ -12,7 +11,6 @@ pub(crate) struct OpSetInternal<const B: usize> {
trees: HashMap<ObjId, OpTreeInternal<B>, FxBuildHasher>, trees: HashMap<ObjId, OpTreeInternal<B>, FxBuildHasher>,
objs: Vec<ObjId>, objs: Vec<ObjId>,
length: usize, length: usize,
pub m: OpSetMetadata,
} }
impl<const B: usize> OpSetInternal<B> { impl<const B: usize> OpSetInternal<B> {
@ -21,10 +19,6 @@ impl<const B: usize> OpSetInternal<B> {
trees: Default::default(), trees: Default::default(),
objs: Default::default(), objs: Default::default(),
length: 0, length: 0,
m: OpSetMetadata {
actors: IndexedCache::new(),
props: IndexedCache::new(),
},
} }
} }
@ -36,34 +30,34 @@ impl<const B: usize> OpSetInternal<B> {
} }
} }
pub fn search<Q>(&self, obj: ObjId, query: Q) -> Q pub fn search<Q>(&self, obj: &ObjId, query: Q) -> Q
where where
Q: TreeQuery<B>, Q: TreeQuery<B>,
{ {
if let Some(tree) = self.trees.get(&obj) { if let Some(tree) = self.trees.get(obj) {
tree.search(query, &self.m) tree.search(query)
} else { } else {
query query
} }
} }
pub fn replace<F>(&mut self, obj: ObjId, index: usize, f: F) -> Option<Op> pub fn replace<F>(&mut self, obj: &ObjId, index: usize, f: F) -> Option<Op>
where where
F: FnMut(&mut Op), F: FnMut(&mut Op),
{ {
if let Some(tree) = self.trees.get_mut(&obj) { if let Some(tree) = self.trees.get_mut(obj) {
tree.replace(index, f) tree.replace(index, f)
} else { } else {
None None
} }
} }
pub fn remove(&mut self, obj: ObjId, index: usize) -> Op { pub fn remove(&mut self, obj: &ObjId, index: usize) -> Op {
let tree = self.trees.get_mut(&obj).unwrap(); let tree = self.trees.get_mut(obj).unwrap();
self.length -= 1; self.length -= 1;
let op = tree.remove(index); let op = tree.remove(index);
if tree.is_empty() { if tree.is_empty() {
self.trees.remove(&obj); self.trees.remove(obj);
} }
op op
} }
@ -76,16 +70,15 @@ impl<const B: usize> OpSetInternal<B> {
let Self { let Self {
ref mut trees, ref mut trees,
ref mut objs, ref mut objs,
ref mut m,
.. ..
} = self; } = self;
trees trees
.entry(element.obj) .entry(element.obj.clone())
.or_insert_with(|| { .or_insert_with(|| {
let pos = objs let pos = objs
.binary_search_by(|probe| m.lamport_cmp(probe.0, element.obj.0)) .binary_search_by(|probe| probe.cmp(&element.obj))
.unwrap_err(); .unwrap_err();
objs.insert(pos, element.obj); objs.insert(pos, element.obj.clone());
Default::default() Default::default()
}) })
.insert(index, element); .insert(index, element);
@ -147,29 +140,3 @@ impl<'a, const B: usize> Iterator for Iter<'a, B> {
} }
} }
} }
#[derive(Clone, Debug)]
pub(crate) struct OpSetMetadata {
pub actors: IndexedCache<ActorId>,
pub props: IndexedCache<String>,
}
impl OpSetMetadata {
pub fn key_cmp(&self, left: &Key, right: &Key) -> Ordering {
match (left, right) {
(Key::Map(a), Key::Map(b)) => self.props[*a].cmp(&self.props[*b]),
_ => panic!("can only compare map keys"),
}
}
pub fn lamport_cmp(&self, left: OpId, right: OpId) -> Ordering {
match (left, right) {
(OpId(0, _), OpId(0, _)) => Ordering::Equal,
(OpId(0, _), OpId(_, _)) => Ordering::Less,
(OpId(_, _), OpId(0, _)) => Ordering::Greater,
// FIXME - this one seems backwards to me - why - is values() returning in the wrong order?
(OpId(a, x), OpId(b, y)) if a == b => self.actors[y].cmp(&self.actors[x]),
(OpId(a, _), OpId(b, _)) => a.cmp(&b),
}
}
}

View file

@ -4,7 +4,6 @@ use std::{
mem, mem,
}; };
pub(crate) use crate::op_set::OpSetMetadata;
use crate::query::{Index, QueryResult, TreeQuery}; use crate::query::{Index, QueryResult, TreeQuery};
use crate::{Op, OpId}; use crate::{Op, OpId};
use std::collections::HashSet; use std::collections::HashSet;
@ -36,14 +35,14 @@ impl<const B: usize> OpTreeInternal<B> {
self.root_node.as_ref().map_or(0, |n| n.len()) self.root_node.as_ref().map_or(0, |n| n.len())
} }
pub fn search<Q>(&self, mut query: Q, m: &OpSetMetadata) -> Q pub fn search<Q>(&self, mut query: Q) -> Q
where where
Q: TreeQuery<B>, Q: TreeQuery<B>,
{ {
self.root_node self.root_node
.as_ref() .as_ref()
.map(|root| match query.query_node_with_metadata(root, m) { .map(|root| match query.query_node(root) {
QueryResult::Decend => root.search(&mut query, m), QueryResult::Decend => root.search(&mut query),
_ => true, _ => true,
}); });
query query
@ -177,22 +176,22 @@ impl<const B: usize> OpTreeNode<B> {
} }
} }
pub fn search<Q>(&self, query: &mut Q, m: &OpSetMetadata) -> bool pub fn search<Q>(&self, query: &mut Q) -> bool
where where
Q: TreeQuery<B>, Q: TreeQuery<B>,
{ {
if self.is_leaf() { if self.is_leaf() {
for e in &self.elements { for e in &self.elements {
if query.query_element_with_metadata(e, m) == QueryResult::Finish { if query.query_element(e) == QueryResult::Finish {
return true; return true;
} }
} }
false false
} else { } else {
for (child_index, child) in self.children.iter().enumerate() { for (child_index, child) in self.children.iter().enumerate() {
match query.query_node_with_metadata(child, m) { match query.query_node(child) {
QueryResult::Decend => { QueryResult::Decend => {
if child.search(query, m) { if child.search(query) {
return true; return true;
} }
} }
@ -200,7 +199,7 @@ impl<const B: usize> OpTreeNode<B> {
QueryResult::Next => (), QueryResult::Next => (),
} }
if let Some(e) = self.elements.get(child_index) { if let Some(e) = self.elements.get(child_index) {
if query.query_element_with_metadata(e, m) == QueryResult::Finish { if query.query_element(e) == QueryResult::Finish {
return true; return true;
} }
} }
@ -627,13 +626,14 @@ struct CounterData {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
/* FIXME
use crate::legacy as amp; use crate::legacy as amp;
use crate::{Op, OpId}; use crate::{Op, OpId};
use super::*; use super::*;
fn op(n: usize) -> Op { fn op(n: usize) -> Op {
let zero = OpId(0, 0); let zero = OpId(0,0);
Op { Op {
change: n, change: n,
id: zero, id: zero,
@ -680,4 +680,5 @@ mod tests {
assert_eq!(v, t.iter().cloned().collect::<Vec<_>>()) assert_eq!(v, t.iter().cloned().collect::<Vec<_>>())
} }
} }
*/
} }

View file

@ -1,4 +1,4 @@
use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::op_tree::OpTreeNode;
use crate::{Clock, ElemId, Op, OpId, OpType, ScalarValue}; use crate::{Clock, ElemId, Op, OpId, OpType, ScalarValue};
use fxhash::FxBuildHasher; use fxhash::FxBuildHasher;
use std::cmp::Ordering; use std::cmp::Ordering;
@ -40,24 +40,10 @@ pub(crate) struct CounterData {
} }
pub(crate) trait TreeQuery<const B: usize> { pub(crate) trait TreeQuery<const B: usize> {
#[inline(always)]
fn query_node_with_metadata(
&mut self,
child: &OpTreeNode<B>,
_m: &OpSetMetadata,
) -> QueryResult {
self.query_node(child)
}
fn query_node(&mut self, _child: &OpTreeNode<B>) -> QueryResult { fn query_node(&mut self, _child: &OpTreeNode<B>) -> QueryResult {
QueryResult::Decend QueryResult::Decend
} }
#[inline(always)]
fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult {
self.query_element(element)
}
fn query_element(&mut self, _element: &Op) -> QueryResult { fn query_element(&mut self, _element: &Op) -> QueryResult {
panic!("invalid element query") panic!("invalid element query")
} }
@ -97,7 +83,7 @@ impl Index {
pub fn replace(&mut self, old: &Op, new: &Op) { pub fn replace(&mut self, old: &Op, new: &Op) {
if old.id != new.id { if old.id != new.id {
self.ops.remove(&old.id); self.ops.remove(&old.id);
self.ops.insert(new.id); self.ops.insert(new.id.clone());
} }
assert!(new.key == old.key); assert!(new.key == old.key);
@ -127,7 +113,7 @@ impl Index {
} }
pub fn insert(&mut self, op: &Op) { pub fn insert(&mut self, op: &Op) {
self.ops.insert(op.id); self.ops.insert(op.id.clone());
if op.succ.is_empty() { if op.succ.is_empty() {
if let Some(elem) = op.elemid() { if let Some(elem) = op.elemid() {
match self.visible.get(&elem).copied() { match self.visible.get(&elem).copied() {
@ -163,16 +149,16 @@ impl Index {
pub fn merge(&mut self, other: &Index) { pub fn merge(&mut self, other: &Index) {
for id in &other.ops { for id in &other.ops {
self.ops.insert(*id); self.ops.insert(id.clone());
} }
for (elem, n) in other.visible.iter() { for (elem, n) in other.visible.iter() {
match self.visible.get(elem).cloned() { match self.visible.get(elem).cloned() {
None => { None => {
self.visible.insert(*elem, 1); self.visible.insert(elem.clone(), 1);
self.len += 1; self.len += 1;
} }
Some(m) => { Some(m) => {
self.visible.insert(*elem, m + n); self.visible.insert(elem.clone(), m + n);
} }
} }
} }
@ -196,7 +182,7 @@ impl VisWindow {
match op.action { match op.action {
OpType::Set(ScalarValue::Counter(val)) => { OpType::Set(ScalarValue::Counter(val)) => {
self.counters.insert( self.counters.insert(
op.id, op.id.clone(),
CounterData { CounterData {
pos, pos,
val, val,
@ -238,7 +224,7 @@ impl VisWindow {
match op.action { match op.action {
OpType::Set(ScalarValue::Counter(val)) => { OpType::Set(ScalarValue::Counter(val)) => {
self.counters.insert( self.counters.insert(
op.id, op.id.clone(),
CounterData { CounterData {
pos, pos,
val, val,
@ -292,7 +278,7 @@ pub(crate) fn is_visible(op: &Op, pos: usize, counters: &mut HashMap<OpId, Count
match op.action { match op.action {
OpType::Set(ScalarValue::Counter(val)) => { OpType::Set(ScalarValue::Counter(val)) => {
counters.insert( counters.insert(
op.id, op.id.clone(),
CounterData { CounterData {
pos, pos,
val, val,

View file

@ -1,6 +1,6 @@
use crate::op_tree::OpTreeNode; use crate::op_tree::OpTreeNode;
use crate::query::{QueryResult, TreeQuery, VisWindow}; use crate::query::{QueryResult, TreeQuery, VisWindow};
use crate::{AutomergeError, ElemId, Key, Op, HEAD}; use crate::{AutomergeError, ElemId, Key, Op};
use std::fmt::Debug; use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
@ -27,9 +27,9 @@ impl<const B: usize> InsertNth<B> {
pub fn key(&self) -> Result<Key, AutomergeError> { pub fn key(&self) -> Result<Key, AutomergeError> {
if self.target == 0 { if self.target == 0 {
Ok(HEAD.into()) Ok(Key::Seq(ElemId::Head))
} else if self.seen == self.target && self.last_insert.is_some() { } else if self.seen == self.target && self.last_insert.is_some() {
Ok(Key::Seq(self.last_insert.unwrap())) Ok(Key::Seq(self.last_insert.clone().unwrap()))
} else { } else {
Err(AutomergeError::InvalidIndex(self.target)) Err(AutomergeError::InvalidIndex(self.target))
} }

View file

@ -24,9 +24,10 @@ impl<const B: usize> TreeQuery<B> for Keys<B> {
for i in 0..child.len() { for i in 0..child.len() {
let op = child.get(i).unwrap(); let op = child.get(i).unwrap();
let visible = self.window.visible(op, i); let visible = self.window.visible(op, i);
if Some(op.key) != last && visible { // FIXME - clone?
self.keys.push(op.key); if Some(op.key.clone()) != last && visible {
last = Some(op.key); self.keys.push(op.key.clone());
last = Some(op.key.clone());
} }
} }
QueryResult::Finish QueryResult::Finish

View file

@ -26,9 +26,9 @@ impl<const B: usize> KeysAt<B> {
impl<const B: usize> TreeQuery<B> for KeysAt<B> { impl<const B: usize> TreeQuery<B> for KeysAt<B> {
fn query_element(&mut self, op: &Op) -> QueryResult { fn query_element(&mut self, op: &Op) -> QueryResult {
let visible = self.window.visible_at(op, self.pos, &self.clock); let visible = self.window.visible_at(op, self.pos, &self.clock);
if Some(op.key) != self.last && visible { if Some(&op.key) != self.last.as_ref() && visible {
self.keys.push(op.key); self.keys.push(op.key.clone());
self.last = Some(op.key); self.last = Some(op.key.clone());
} }
self.pos += 1; self.pos += 1;
QueryResult::Next QueryResult::Next

View file

@ -1,17 +1,15 @@
use crate::op_tree::OpTreeNode; use crate::op_tree::OpTreeNode;
use crate::query::{QueryResult, TreeQuery}; use crate::query::{QueryResult, TreeQuery};
use crate::ObjId;
use std::fmt::Debug; use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub(crate) struct Len<const B: usize> { pub(crate) struct Len<const B: usize> {
obj: ObjId,
pub len: usize, pub len: usize,
} }
impl<const B: usize> Len<B> { impl<const B: usize> Len<B> {
pub fn new(obj: ObjId) -> Self { pub fn new() -> Self {
Len { obj, len: 0 } Len { len: 0 }
} }
} }

View file

@ -1,19 +1,17 @@
use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::op_tree::OpTreeNode;
use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; use crate::query::{is_visible, visible_op, QueryResult, TreeQuery};
use crate::{ElemId, ObjId, Op}; use crate::{ElemId, Op};
use std::fmt::Debug; use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub(crate) struct ListVals { pub(crate) struct ListVals {
obj: ObjId,
last_elem: Option<ElemId>, last_elem: Option<ElemId>,
pub ops: Vec<Op>, pub ops: Vec<Op>,
} }
impl ListVals { impl ListVals {
pub fn new(obj: ObjId) -> Self { pub fn new() -> Self {
ListVals { ListVals {
obj,
last_elem: None, last_elem: None,
ops: vec![], ops: vec![],
} }
@ -21,18 +19,10 @@ impl ListVals {
} }
impl<const B: usize> TreeQuery<B> for ListVals { impl<const B: usize> TreeQuery<B> for ListVals {
fn query_node_with_metadata( fn query_node(&mut self, child: &OpTreeNode<B>) -> QueryResult {
&mut self,
child: &OpTreeNode<B>,
m: &OpSetMetadata,
) -> QueryResult {
let start = binary_search_by(child, |op| m.lamport_cmp(op.obj.0, self.obj.0));
let mut counters = Default::default(); let mut counters = Default::default();
for pos in start..child.len() { for pos in 0..child.len() {
let op = child.get(pos).unwrap(); let op = child.get(pos).unwrap();
if op.obj != self.obj {
break;
}
if op.insert { if op.insert {
self.last_elem = None; self.last_elem = None;
} }

View file

@ -30,8 +30,8 @@ impl<const B: usize> Nth<B> {
} }
pub fn key(&self) -> Result<Key, AutomergeError> { pub fn key(&self) -> Result<Key, AutomergeError> {
if let Some(e) = self.last_elem { if let Some(e) = &self.last_elem {
Ok(Key::Seq(e)) Ok(Key::Seq(e.clone()))
} else { } else {
Err(AutomergeError::InvalidIndex(self.target)) Err(AutomergeError::InvalidIndex(self.target))
} }

View file

@ -1,11 +1,10 @@
use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::op_tree::OpTreeNode;
use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery};
use crate::{Key, ObjId, Op}; use crate::{Key, Op};
use std::fmt::Debug; use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub(crate) struct Prop { pub(crate) struct Prop {
obj: ObjId,
key: Key, key: Key,
pub ops: Vec<Op>, pub ops: Vec<Op>,
pub ops_pos: Vec<usize>, pub ops_pos: Vec<usize>,
@ -13,9 +12,8 @@ pub(crate) struct Prop {
} }
impl Prop { impl Prop {
pub fn new(obj: ObjId, prop: usize) -> Self { pub fn new(prop: String) -> Self {
Prop { Prop {
obj,
key: Key::Map(prop), key: Key::Map(prop),
ops: vec![], ops: vec![],
ops_pos: vec![], ops_pos: vec![],
@ -25,20 +23,13 @@ impl Prop {
} }
impl<const B: usize> TreeQuery<B> for Prop { impl<const B: usize> TreeQuery<B> for Prop {
fn query_node_with_metadata( fn query_node(&mut self, child: &OpTreeNode<B>) -> QueryResult {
&mut self, let start = binary_search_by(child, |op| op.key.cmp(&self.key));
child: &OpTreeNode<B>,
m: &OpSetMetadata,
) -> QueryResult {
let start = binary_search_by(child, |op| {
m.lamport_cmp(op.obj.0, self.obj.0)
.then_with(|| m.key_cmp(&op.key, &self.key))
});
let mut counters = Default::default(); let mut counters = Default::default();
self.pos = start; self.pos = start;
for pos in start..child.len() { for pos in start..child.len() {
let op = child.get(pos).unwrap(); let op = child.get(pos).unwrap();
if !(op.obj == self.obj && op.key == self.key) { if op.key != self.key {
break; break;
} }
if is_visible(op, pos, &mut counters) { if is_visible(op, pos, &mut counters) {

View file

@ -1,4 +1,4 @@
use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::op_tree::OpTreeNode;
use crate::query::{binary_search_by, QueryResult, TreeQuery, VisWindow}; use crate::query::{binary_search_by, QueryResult, TreeQuery, VisWindow};
use crate::{Clock, Key, Op}; use crate::{Clock, Key, Op};
use std::fmt::Debug; use std::fmt::Debug;
@ -13,7 +13,7 @@ pub(crate) struct PropAt {
} }
impl PropAt { impl PropAt {
pub fn new(prop: usize, clock: Clock) -> Self { pub fn new(prop: String, clock: Clock) -> Self {
PropAt { PropAt {
clock, clock,
key: Key::Map(prop), key: Key::Map(prop),
@ -25,12 +25,8 @@ impl PropAt {
} }
impl<const B: usize> TreeQuery<B> for PropAt { impl<const B: usize> TreeQuery<B> for PropAt {
fn query_node_with_metadata( fn query_node(&mut self, child: &OpTreeNode<B>) -> QueryResult {
&mut self, let start = binary_search_by(child, |op| op.key.cmp(&self.key));
child: &OpTreeNode<B>,
m: &OpSetMetadata,
) -> QueryResult {
let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key));
let mut window: VisWindow = Default::default(); let mut window: VisWindow = Default::default();
self.pos = start; self.pos = start;
for pos in start..child.len() { for pos in start..child.len() {

View file

@ -1,6 +1,6 @@
use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::op_tree::OpTreeNode;
use crate::query::{binary_search_by, QueryResult, TreeQuery}; use crate::query::{binary_search_by, QueryResult, TreeQuery};
use crate::{Key, Op, HEAD}; use crate::{ElemId, Key, Op};
use std::cmp::Ordering; use std::cmp::Ordering;
use std::fmt::Debug; use std::fmt::Debug;
@ -26,12 +26,12 @@ impl<const B: usize> SeekOp<B> {
op.obj != self.op.obj op.obj != self.op.obj
} }
fn lesser_insert(&self, op: &Op, m: &OpSetMetadata) -> bool { fn lesser_insert(&self, op: &Op) -> bool {
op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less op.insert && op.id.cmp(&self.op.id) == Ordering::Less
} }
fn greater_opid(&self, op: &Op, m: &OpSetMetadata) -> bool { fn greater_opid(&self, op: &Op) -> bool {
m.lamport_cmp(op.id, self.op.id) == Ordering::Greater op.id.cmp(&self.op.id) == Ordering::Greater
} }
fn is_target_insert(&self, op: &Op) -> bool { fn is_target_insert(&self, op: &Op) -> bool {
@ -47,30 +47,26 @@ impl<const B: usize> SeekOp<B> {
} }
impl<const B: usize> TreeQuery<B> for SeekOp<B> { impl<const B: usize> TreeQuery<B> for SeekOp<B> {
fn query_node_with_metadata( fn query_node(&mut self, child: &OpTreeNode<B>) -> QueryResult {
&mut self,
child: &OpTreeNode<B>,
m: &OpSetMetadata,
) -> QueryResult {
if self.found { if self.found {
return QueryResult::Decend; return QueryResult::Decend;
} }
match self.op.key { match &self.op.key {
Key::Seq(e) if e == HEAD => { Key::Seq(ElemId::Head) => {
while self.pos < child.len() { while self.pos < child.len() {
let op = child.get(self.pos).unwrap(); let op = child.get(self.pos).unwrap();
if self.op.overwrites(op) { if self.op.overwrites(op) {
self.succ.push(self.pos); self.succ.push(self.pos);
} }
if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { if op.insert && op.id.cmp(&self.op.id) == Ordering::Less {
break; break;
} }
self.pos += 1; self.pos += 1;
} }
QueryResult::Finish QueryResult::Finish
} }
Key::Seq(e) => { Key::Seq(ElemId::Id(id)) => {
if self.found || child.index.ops.contains(&e.0) { if self.found || child.index.ops.contains(id) {
QueryResult::Decend QueryResult::Decend
} else { } else {
self.pos += child.len(); self.pos += child.len();
@ -78,7 +74,7 @@ impl<const B: usize> TreeQuery<B> for SeekOp<B> {
} }
} }
Key::Map(_) => { Key::Map(_) => {
self.pos = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); self.pos = binary_search_by(child, |op| op.key.cmp(&self.op.key));
while self.pos < child.len() { while self.pos < child.len() {
let op = child.get(self.pos).unwrap(); let op = child.get(self.pos).unwrap();
if op.key != self.op.key { if op.key != self.op.key {
@ -87,7 +83,7 @@ impl<const B: usize> TreeQuery<B> for SeekOp<B> {
if self.op.overwrites(op) { if self.op.overwrites(op) {
self.succ.push(self.pos); self.succ.push(self.pos);
} }
if m.lamport_cmp(op.id, self.op.id) == Ordering::Greater { if op.id.cmp(&self.op.id) == Ordering::Greater {
break; break;
} }
self.pos += 1; self.pos += 1;
@ -97,7 +93,7 @@ impl<const B: usize> TreeQuery<B> for SeekOp<B> {
} }
} }
fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { fn query_element(&mut self, e: &Op) -> QueryResult {
if !self.found { if !self.found {
if self.is_target_insert(e) { if self.is_target_insert(e) {
self.found = true; self.found = true;
@ -112,13 +108,13 @@ impl<const B: usize> TreeQuery<B> for SeekOp<B> {
self.succ.push(self.pos); self.succ.push(self.pos);
} }
if self.op.insert { if self.op.insert {
if self.different_obj(e) || self.lesser_insert(e, m) { if self.different_obj(e) || self.lesser_insert(e) {
QueryResult::Finish QueryResult::Finish
} else { } else {
self.pos += 1; self.pos += 1;
QueryResult::Next QueryResult::Next
} }
} else if e.insert || self.different_obj(e) || self.greater_opid(e, m) { } else if e.insert || self.different_obj(e) || self.greater_opid(e) {
QueryResult::Finish QueryResult::Finish
} else { } else {
self.pos += 1; self.pos += 1;

View file

@ -3,18 +3,15 @@ use crate::legacy as amp;
use crate::ScalarValue; use crate::ScalarValue;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::cmp::Eq; use std::cmp::Eq;
use std::cmp::Ordering;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::convert::TryInto; use std::convert::TryInto;
use std::fmt; use std::fmt;
use std::hash::{Hash, Hasher};
use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
use tinyvec::{ArrayVec, TinyVec}; use tinyvec::{ArrayVec, TinyVec};
pub(crate) const HEAD: ElemId = ElemId(OpId(0, 0));
pub const ROOT: OpId = OpId(0, 0);
const ROOT_STR: &str = "_root";
const HEAD_STR: &str = "_head";
/// An actor id is a sequence of bytes. By default we use a uuid which can be nicely stack /// An actor id is a sequence of bytes. By default we use a uuid which can be nicely stack
/// allocated. /// allocated.
/// ///
@ -107,6 +104,39 @@ impl fmt::Display for ActorId {
} }
} }
impl fmt::Display for OpId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}@{}", &self.counter, &self.actor)
}
}
impl fmt::Display for ObjId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ObjId::Root => write!(f, "_root"),
ObjId::Id(id) => write!(f, "{}", id),
}
}
}
impl fmt::Display for ElemId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ElemId::Head => write!(f, "_head"),
ElemId::Id(id) => write!(f, "{}", id),
}
}
}
impl fmt::Display for Key {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Key::Map(s) => write!(f, "{}", s),
Key::Seq(id) => write!(f, "{}", id),
}
}
}
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Copy, Hash)] #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Copy, Hash)]
#[serde(rename_all = "camelCase", untagged)] #[serde(rename_all = "camelCase", untagged)]
pub enum ObjType { pub enum ObjType {
@ -160,128 +190,103 @@ pub enum OpType {
Set(ScalarValue), Set(ScalarValue),
} }
#[derive(Debug)]
pub enum Export {
Id(OpId),
Special(String),
Prop(usize),
}
pub trait Exportable {
fn export(&self) -> Export;
}
pub trait Importable {
fn wrap(id: OpId) -> Self;
fn from(s: &str) -> Option<Self>
where
Self: std::marker::Sized;
}
impl OpId {
#[inline]
pub fn counter(&self) -> u64 {
self.0
}
#[inline]
pub fn actor(&self) -> usize {
self.1
}
}
impl Exportable for ObjId {
fn export(&self) -> Export {
if self.0 == ROOT {
Export::Special(ROOT_STR.to_owned())
} else {
Export::Id(self.0)
}
}
}
impl Exportable for &ObjId {
fn export(&self) -> Export {
if self.0 == ROOT {
Export::Special(ROOT_STR.to_owned())
} else {
Export::Id(self.0)
}
}
}
impl Exportable for ElemId {
fn export(&self) -> Export {
if self == &HEAD {
Export::Special(HEAD_STR.to_owned())
} else {
Export::Id(self.0)
}
}
}
impl Exportable for OpId {
fn export(&self) -> Export {
Export::Id(*self)
}
}
impl Exportable for Key {
fn export(&self) -> Export {
match self {
Key::Map(p) => Export::Prop(*p),
Key::Seq(e) => e.export(),
}
}
}
impl Importable for ObjId {
fn wrap(id: OpId) -> Self {
ObjId(id)
}
fn from(s: &str) -> Option<Self> {
if s == ROOT_STR {
Some(ROOT.into())
} else {
None
}
}
}
impl Importable for OpId {
fn wrap(id: OpId) -> Self {
id
}
fn from(s: &str) -> Option<Self> {
if s == ROOT_STR {
Some(ROOT)
} else {
None
}
}
}
impl Importable for ElemId {
fn wrap(id: OpId) -> Self {
ElemId(id)
}
fn from(s: &str) -> Option<Self> {
if s == HEAD_STR {
Some(HEAD)
} else {
None
}
}
}
impl From<OpId> for ObjId { impl From<OpId> for ObjId {
fn from(o: OpId) -> Self { fn from(o: OpId) -> Self {
ObjId(o) ObjId::Id(o)
}
}
impl From<&OpId> for ObjId {
fn from(o: &OpId) -> Self {
ObjId::Id(o.clone())
}
}
impl From<OpId> for amp::OpId {
fn from(o: OpId) -> Self {
amp::OpId(o.counter, o.actor.as_ref().clone())
}
}
impl From<&OpId> for amp::OpId {
fn from(o: &OpId) -> Self {
amp::OpId(o.counter, o.actor.as_ref().clone())
}
}
impl From<Key> for amp::Key {
fn from(k: Key) -> Self {
match k {
Key::Map(s) => amp::Key::Map(s.into()),
Key::Seq(e) => amp::Key::Seq(e.into()),
}
}
}
impl From<&Key> for amp::Key {
fn from(k: &Key) -> Self {
match k {
Key::Map(s) => amp::Key::Map(s.into()),
Key::Seq(e) => amp::Key::Seq(e.into()),
}
}
}
impl From<ObjId> for amp::ObjectId {
fn from(o: ObjId) -> Self {
match o {
ObjId::Root => amp::ObjectId::Root,
ObjId::Id(id) => amp::ObjectId::Id(id.into()),
}
}
}
impl From<&ObjId> for amp::ObjectId {
fn from(o: &ObjId) -> Self {
match o {
ObjId::Root => amp::ObjectId::Root,
ObjId::Id(id) => amp::ObjectId::Id(id.into()),
}
}
}
impl From<&ElemId> for amp::ElementId {
fn from(o: &ElemId) -> Self {
match o {
ElemId::Head => amp::ElementId::Head,
ElemId::Id(id) => amp::ElementId::Id(id.into()),
}
}
}
impl From<ElemId> for amp::ElementId {
fn from(o: ElemId) -> Self {
match o {
ElemId::Head => amp::ElementId::Head,
ElemId::Id(id) => amp::ElementId::Id(id.into()),
}
}
}
impl From<&Op> for amp::Op {
fn from(op: &Op) -> Self {
let action = op.action.clone();
let key = (&op.key).into();
let obj = (&op.obj).into();
let pred = op.pred.iter().map(|id| id.into()).collect();
amp::Op {
action,
obj,
insert: op.insert,
pred,
key,
}
} }
} }
impl From<OpId> for ElemId { impl From<OpId> for ElemId {
fn from(o: OpId) -> Self { fn from(o: OpId) -> Self {
ElemId(o) ElemId::Id(o)
} }
} }
@ -317,7 +322,7 @@ impl From<f64> for Prop {
impl From<OpId> for Key { impl From<OpId> for Key {
fn from(id: OpId) -> Self { fn from(id: OpId) -> Self {
Key::Seq(ElemId(id)) Key::Seq(ElemId::Id(id))
} }
} }
@ -327,9 +332,9 @@ impl From<ElemId> for Key {
} }
} }
#[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone, Copy, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub(crate) enum Key { pub(crate) enum Key {
Map(usize), Map(String),
Seq(ElemId), Seq(ElemId),
} }
@ -346,19 +351,50 @@ impl Key {
pub fn elemid(&self) -> Option<ElemId> { pub fn elemid(&self) -> Option<ElemId> {
match self { match self {
Key::Map(_) => None, Key::Map(_) => None,
Key::Seq(id) => Some(*id), Key::Seq(id) => Some(id.clone()),
} }
} }
} }
#[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] #[derive(Debug, Clone, Eq, PartialEq)]
pub struct OpId(pub u64, pub usize); pub struct OpId {
pub counter: u64,
pub actor: Rc<ActorId>,
hash: u64,
}
#[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] impl Hash for OpId {
pub(crate) struct ObjId(pub OpId); fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_u64(self.hash);
}
}
#[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] impl OpId {
pub(crate) struct ElemId(pub OpId); pub(crate) fn at(counter: u64, actor: Rc<ActorId>) -> OpId {
use fxhash::FxHasher;
let mut hasher = FxHasher::default();
(counter, &actor).hash(&mut hasher);
let hash = hasher.finish();
OpId {
counter,
actor: actor.clone(),
hash,
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum ObjId {
Root,
Id(OpId),
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub(crate) enum ElemId {
Head,
Id(OpId),
}
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub(crate) struct Op { pub(crate) struct Op {
@ -383,7 +419,7 @@ impl Op {
pub fn elemid(&self) -> Option<ElemId> { pub fn elemid(&self) -> Option<ElemId> {
if self.insert { if self.insert {
Some(ElemId(self.id)) Some(ElemId::Id(self.id.clone()))
} else { } else {
self.key.elemid() self.key.elemid()
} }
@ -451,3 +487,68 @@ impl TryFrom<&[u8]> for ChangeHash {
} }
} }
} }
impl Ord for OpId {
fn cmp(&self, other: &Self) -> Ordering {
match self.counter.cmp(&other.counter) {
Ordering::Equal => other.actor.cmp(&self.actor),
order => order,
}
}
}
impl PartialOrd for OpId {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialOrd for Key {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialOrd for ElemId {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialOrd for ObjId {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ElemId {
fn cmp(&self, other: &Self) -> Ordering {
match (self, other) {
(ElemId::Head, ElemId::Head) => Ordering::Equal,
(ElemId::Head, _) => Ordering::Less,
(_, ElemId::Head) => Ordering::Greater,
(ElemId::Id(a), ElemId::Id(b)) => a.cmp(b),
}
}
}
impl Ord for ObjId {
fn cmp(&self, other: &Self) -> Ordering {
match (self, other) {
(ObjId::Root, ObjId::Root) => Ordering::Equal,
(ObjId::Root, _) => Ordering::Less,
(_, ObjId::Root) => Ordering::Greater,
(ObjId::Id(a), ObjId::Id(b)) => a.cmp(b),
}
}
}
impl Ord for Key {
fn cmp(&self, other: &Self) -> Ordering {
match (self, other) {
(Key::Map(a), Key::Map(b)) => a.cmp(b),
(Key::Seq(a), Key::Seq(b)) => a.cmp(b),
(_, _) => panic!("comparing seq key to map key"),
}
}
}

View file

@ -1,4 +1,4 @@
use crate::{error, ObjType, Op, OpId, OpType}; use crate::{error, ObjId, ObjType, Op, OpType};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use smol_str::SmolStr; use smol_str::SmolStr;
use std::convert::TryFrom; use std::convert::TryFrom;
@ -110,21 +110,21 @@ impl From<ScalarValue> for Value {
} }
} }
impl From<&Op> for (Value, OpId) { impl From<&Op> for (Value, ObjId) {
fn from(op: &Op) -> Self { fn from(op: &Op) -> Self {
match &op.action { match &op.action {
OpType::Make(obj_type) => (Value::Object(*obj_type), op.id), OpType::Make(obj_type) => (Value::Object(*obj_type), op.id.clone().into()),
OpType::Set(scalar) => (Value::Scalar(scalar.clone()), op.id), OpType::Set(scalar) => (Value::Scalar(scalar.clone()), op.id.clone().into()),
_ => panic!("cant convert op into a value - {:?}", op), _ => panic!("cant convert op into a value - {:?}", op),
} }
} }
} }
impl From<Op> for (Value, OpId) { impl From<Op> for (Value, ObjId) {
fn from(op: Op) -> Self { fn from(op: Op) -> Self {
match &op.action { match &op.action {
OpType::Make(obj_type) => (Value::Object(*obj_type), op.id), OpType::Make(obj_type) => (Value::Object(*obj_type), op.id.clone().into()),
OpType::Set(scalar) => (Value::Scalar(scalar.clone()), op.id), OpType::Set(scalar) => (Value::Scalar(scalar.clone()), op.id.clone().into()),
_ => panic!("cant convert op into a value - {:?}", op), _ => panic!("cant convert op into a value - {:?}", op),
} }
} }

View file

@ -1,943 +0,0 @@
use automerge::Automerge;
mod helpers;
#[allow(unused_imports)]
use helpers::{
mk_counter, new_doc, new_doc_with_actor, pretty_print, realize, realize_obj, sorted_actors,
translate_obj_id, OpIdExt, RealizedObject,
};
#[test]
fn no_conflict_on_repeated_assignment() {
let mut doc = Automerge::new();
doc.set(automerge::ROOT, "foo", 1).unwrap();
let op = doc.set(automerge::ROOT, "foo", 2).unwrap().unwrap();
assert_doc!(
&doc,
map! {
"foo" => { op => 2},
}
);
}
#[test]
fn no_change_on_repeated_map_set() {
let mut doc = new_doc();
doc.set(automerge::ROOT, "foo", 1).unwrap();
assert!(doc.set(automerge::ROOT, "foo", 1).unwrap().is_none());
}
#[test]
fn no_change_on_repeated_list_set() {
let mut doc = new_doc();
let list_id = doc
.set(automerge::ROOT, "list", automerge::Value::list())
.unwrap()
.unwrap();
doc.insert(list_id, 0, 1).unwrap();
doc.set(list_id, 0, 1).unwrap();
assert!(doc.set(list_id, 0, 1).unwrap().is_none());
}
#[test]
fn no_change_on_list_insert_followed_by_set_of_same_value() {
let mut doc = new_doc();
let list_id = doc
.set(automerge::ROOT, "list", automerge::Value::list())
.unwrap()
.unwrap();
doc.insert(list_id, 0, 1).unwrap();
assert!(doc.set(list_id, 0, 1).unwrap().is_none());
}
#[test]
fn repeated_map_assignment_which_resolves_conflict_not_ignored() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
doc1.set(automerge::ROOT, "field", 123).unwrap();
doc2.merge(&mut doc1);
doc2.set(automerge::ROOT, "field", 456).unwrap();
doc1.set(automerge::ROOT, "field", 789).unwrap();
doc1.merge(&mut doc2);
assert_eq!(doc1.values(automerge::ROOT, "field").unwrap().len(), 2);
let op = doc1.set(automerge::ROOT, "field", 123).unwrap().unwrap();
assert_doc!(
&doc1,
map! {
"field" => {
op => 123
}
}
);
}
#[test]
fn repeated_list_assignment_which_resolves_conflict_not_ignored() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let list_id = doc1
.set(automerge::ROOT, "list", automerge::Value::list())
.unwrap()
.unwrap();
doc1.insert(list_id, 0, 123).unwrap();
doc2.merge(&mut doc1);
let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id);
doc2.set(list_id_in_doc2, 0, 456).unwrap().unwrap();
doc1.merge(&mut doc2);
let doc1_op = doc1.set(list_id, 0, 789).unwrap().unwrap();
assert_doc!(
&doc1,
map! {
"list" => {
list_id => list![
{ doc1_op => 789 },
]
}
}
);
}
#[test]
fn list_deletion() {
let mut doc = new_doc();
let list_id = doc
.set(automerge::ROOT, "list", automerge::Value::list())
.unwrap()
.unwrap();
let op1 = doc.insert(list_id, 0, 123).unwrap();
doc.insert(list_id, 1, 456).unwrap();
let op3 = doc.insert(list_id, 2, 789).unwrap();
doc.del(list_id, 1).unwrap();
assert_doc!(
&doc,
map! {
"list" => {list_id => list![
{ op1 => 123 },
{ op3 => 789 },
]}
}
)
}
#[test]
fn merge_concurrent_map_prop_updates() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let op1 = doc1.set(automerge::ROOT, "foo", "bar").unwrap().unwrap();
let hello = doc2
.set(automerge::ROOT, "hello", "world")
.unwrap()
.unwrap();
doc1.merge(&mut doc2);
assert_eq!(
doc1.value(automerge::ROOT, "foo").unwrap().unwrap().0,
"bar".into()
);
assert_doc!(
&doc1,
map! {
"foo" => { op1 => "bar" },
"hello" => { hello.translate(&doc2) => "world" },
}
);
doc2.merge(&mut doc1);
assert_doc!(
&doc2,
map! {
"foo" => { op1.translate(&doc1) => "bar" },
"hello" => { hello => "world" },
}
);
assert_eq!(realize(&doc1), realize(&doc2));
}
#[test]
fn add_concurrent_increments_of_same_property() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let counter_id = doc1
.set(automerge::ROOT, "counter", mk_counter(0))
.unwrap()
.unwrap();
doc2.merge(&mut doc1);
doc1.inc(automerge::ROOT, "counter", 1).unwrap();
doc2.inc(automerge::ROOT, "counter", 2).unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"counter" => {
counter_id => mk_counter(3)
}
}
);
}
#[test]
fn add_increments_only_to_preceeded_values() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
// create a counter in doc1
let doc1_counter_id = doc1
.set(automerge::ROOT, "counter", mk_counter(0))
.unwrap()
.unwrap();
doc1.inc(automerge::ROOT, "counter", 1).unwrap();
// create a counter in doc2
let doc2_counter_id = doc2
.set(automerge::ROOT, "counter", mk_counter(0))
.unwrap()
.unwrap();
doc2.inc(automerge::ROOT, "counter", 3).unwrap();
// The two values should be conflicting rather than added
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"counter" => {
doc1_counter_id.native() => mk_counter(1),
doc2_counter_id.translate(&doc2) => mk_counter(3),
}
}
);
}
#[test]
fn concurrent_updates_of_same_field() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let set_one_opid = doc1.set(automerge::ROOT, "field", "one").unwrap().unwrap();
let set_two_opid = doc2.set(automerge::ROOT, "field", "two").unwrap().unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"field" => {
set_one_opid.native() => "one",
set_two_opid.translate(&doc2) => "two",
}
}
);
}
#[test]
fn concurrent_updates_of_same_list_element() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let list_id = doc1
.set(automerge::ROOT, "birds", automerge::Value::list())
.unwrap()
.unwrap();
doc1.insert(list_id, 0, "finch").unwrap();
doc2.merge(&mut doc1);
let set_one_op = doc1.set(list_id, 0, "greenfinch").unwrap().unwrap();
let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id);
let set_op_two = doc2.set(list_id_in_doc2, 0, "goldfinch").unwrap().unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"birds" => {
list_id => list![{
set_one_op.native() => "greenfinch",
set_op_two.translate(&doc2) => "goldfinch",
}]
}
}
);
}
#[test]
fn assignment_conflicts_of_different_types() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let mut doc3 = new_doc();
let op_one = doc1
.set(automerge::ROOT, "field", "string")
.unwrap()
.unwrap();
let op_two = doc2
.set(automerge::ROOT, "field", automerge::Value::list())
.unwrap()
.unwrap();
let op_three = doc3
.set(automerge::ROOT, "field", automerge::Value::map())
.unwrap()
.unwrap();
doc1.merge(&mut doc2);
doc1.merge(&mut doc3);
assert_doc!(
&doc1,
map! {
"field" => {
op_one.native() => "string",
op_two.translate(&doc2) => list!{},
op_three.translate(&doc3) => map!{},
}
}
);
}
#[test]
fn changes_within_conflicting_map_field() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let op_one = doc1
.set(automerge::ROOT, "field", "string")
.unwrap()
.unwrap();
let map_id = doc2
.set(automerge::ROOT, "field", automerge::Value::map())
.unwrap()
.unwrap();
let set_in_doc2 = doc2.set(map_id, "innerKey", 42).unwrap().unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"field" => {
op_one.native() => "string",
map_id.translate(&doc2) => map!{
"innerKey" => {
set_in_doc2.translate(&doc2) => 42,
}
}
}
}
);
}
#[test]
fn changes_within_conflicting_list_element() {
let (actor1, actor2) = sorted_actors();
let mut doc1 = new_doc_with_actor(actor1);
let mut doc2 = new_doc_with_actor(actor2);
let list_id = doc1
.set(automerge::ROOT, "list", automerge::Value::list())
.unwrap()
.unwrap();
doc1.insert(list_id, 0, "hello").unwrap();
doc2.merge(&mut doc1);
let map_in_doc1 = doc1
.set(list_id, 0, automerge::Value::map())
.unwrap()
.unwrap();
let set_map1 = doc1.set(map_in_doc1, "map1", true).unwrap().unwrap();
let set_key1 = doc1.set(map_in_doc1, "key", 1).unwrap().unwrap();
let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id);
let map_in_doc2 = doc2
.set(list_id_in_doc2, 0, automerge::Value::map())
.unwrap()
.unwrap();
doc1.merge(&mut doc2);
let set_map2 = doc2.set(map_in_doc2, "map2", true).unwrap().unwrap();
let set_key2 = doc2.set(map_in_doc2, "key", 2).unwrap().unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"list" => {
list_id => list![
{
map_in_doc2.translate(&doc2) => map!{
"map2" => { set_map2.translate(&doc2) => true },
"key" => { set_key2.translate(&doc2) => 2 },
},
map_in_doc1.native() => map!{
"key" => { set_key1.native() => 1 },
"map1" => { set_map1.native() => true },
}
}
]
}
}
);
}
#[test]
fn concurrently_assigned_nested_maps_should_not_merge() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let doc1_map_id = doc1
.set(automerge::ROOT, "config", automerge::Value::map())
.unwrap()
.unwrap();
let doc1_field = doc1
.set(doc1_map_id, "background", "blue")
.unwrap()
.unwrap();
let doc2_map_id = doc2
.set(automerge::ROOT, "config", automerge::Value::map())
.unwrap()
.unwrap();
let doc2_field = doc2
.set(doc2_map_id, "logo_url", "logo.png")
.unwrap()
.unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"config" => {
doc1_map_id.native() => map!{
"background" => {doc1_field.native() => "blue"}
},
doc2_map_id.translate(&doc2) => map!{
"logo_url" => {doc2_field.translate(&doc2) => "logo.png"}
}
}
}
);
}
#[test]
fn concurrent_insertions_at_different_list_positions() {
let (actor1, actor2) = sorted_actors();
let mut doc1 = new_doc_with_actor(actor1);
let mut doc2 = new_doc_with_actor(actor2);
assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap());
let list_id = doc1
.set(automerge::ROOT, "list", automerge::Value::list())
.unwrap()
.unwrap();
let one = doc1.insert(list_id, 0, "one").unwrap();
let three = doc1.insert(list_id, 1, "three").unwrap();
doc2.merge(&mut doc1);
let two = doc1.splice(list_id, 1, 0, vec!["two".into()]).unwrap()[0];
let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id);
let four = doc2.insert(list_id_in_doc2, 2, "four").unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"list" => {
list_id => list![
{one.native() => "one"},
{two.native() => "two"},
{three.native() => "three"},
{four.translate(&doc2) => "four"},
]
}
}
);
}
#[test]
fn concurrent_insertions_at_same_list_position() {
let (actor1, actor2) = sorted_actors();
let mut doc1 = new_doc_with_actor(actor1);
let mut doc2 = new_doc_with_actor(actor2);
assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap());
let list_id = doc1
.set(automerge::ROOT, "birds", automerge::Value::list())
.unwrap()
.unwrap();
let parakeet = doc1.insert(list_id, 0, "parakeet").unwrap();
doc2.merge(&mut doc1);
let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id);
let starling = doc1.insert(list_id, 1, "starling").unwrap();
let chaffinch = doc2.insert(list_id_in_doc2, 1, "chaffinch").unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"birds" => {
list_id => list![
{
parakeet.native() => "parakeet",
},
{
starling.native() => "starling",
},
{
chaffinch.translate(&doc2) => "chaffinch",
},
]
},
}
);
}
#[test]
fn concurrent_assignment_and_deletion_of_a_map_entry() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
doc1.set(automerge::ROOT, "bestBird", "robin").unwrap();
doc2.merge(&mut doc1);
doc1.del(automerge::ROOT, "bestBird").unwrap();
let set_two = doc2
.set(automerge::ROOT, "bestBird", "magpie")
.unwrap()
.unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"bestBird" => {
set_two.translate(&doc2) => "magpie",
}
}
);
}
#[test]
fn concurrent_assignment_and_deletion_of_list_entry() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let list_id = doc1
.set(automerge::ROOT, "birds", automerge::Value::list())
.unwrap()
.unwrap();
let blackbird = doc1.insert(list_id, 0, "blackbird").unwrap();
doc1.insert(list_id, 1, "thrush").unwrap();
let goldfinch = doc1.insert(list_id, 2, "goldfinch").unwrap();
doc2.merge(&mut doc1);
let starling = doc1.set(list_id, 1, "starling").unwrap().unwrap();
let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id);
doc2.del(list_id_in_doc2, 1).unwrap();
assert_doc!(
&doc2,
map! {
"birds" => {list_id.translate(&doc1) => list![
{ blackbird.translate(&doc1) => "blackbird"},
{ goldfinch.translate(&doc1) => "goldfinch"},
]}
}
);
assert_doc!(
&doc1,
map! {
"birds" => {list_id => list![
{ blackbird => "blackbird" },
{ starling => "starling" },
{ goldfinch => "goldfinch" },
]}
}
);
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"birds" => {list_id => list![
{ blackbird => "blackbird" },
{ starling => "starling" },
{ goldfinch => "goldfinch" },
]}
}
);
}
#[test]
fn insertion_after_a_deleted_list_element() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let list_id = doc1
.set(automerge::ROOT, "birds", automerge::Value::list())
.unwrap()
.unwrap();
let blackbird = doc1.insert(list_id, 0, "blackbird").unwrap();
doc1.insert(list_id, 1, "thrush").unwrap();
doc1.insert(list_id, 2, "goldfinch").unwrap();
doc2.merge(&mut doc1);
doc1.splice(list_id, 1, 2, Vec::new()).unwrap();
let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id);
let starling = doc2
.splice(list_id_in_doc2, 2, 0, vec!["starling".into()])
.unwrap()[0];
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"birds" => {list_id => list![
{ blackbird.native() => "blackbird" },
{ starling.translate(&doc2) => "starling" }
]}
}
);
doc2.merge(&mut doc1);
assert_doc!(
&doc2,
map! {
"birds" => {list_id.translate(&doc1) => list![
{ blackbird.translate(&doc1) => "blackbird" },
{ starling.native() => "starling" }
]}
}
);
}
#[test]
fn concurrent_deletion_of_same_list_element() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let list_id = doc1
.set(automerge::ROOT, "birds", automerge::Value::list())
.unwrap()
.unwrap();
let albatross = doc1.insert(list_id, 0, "albatross").unwrap();
doc1.insert(list_id, 1, "buzzard").unwrap();
let cormorant = doc1.insert(list_id, 2, "cormorant").unwrap();
doc2.merge(&mut doc1);
doc1.del(list_id, 1).unwrap();
let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id);
doc2.del(list_id_in_doc2, 1).unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"birds" => {list_id => list![
{ albatross => "albatross" },
{ cormorant => "cormorant" }
]}
}
);
doc2.merge(&mut doc1);
assert_doc!(
&doc2,
map! {
"birds" => {list_id.translate(&doc1) => list![
{ albatross.translate(&doc1) => "albatross" },
{ cormorant.translate(&doc1) => "cormorant" }
]}
}
);
}
#[test]
fn concurrent_updates_at_different_levels() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let animals = doc1
.set(automerge::ROOT, "animals", automerge::Value::map())
.unwrap()
.unwrap();
let birds = doc1
.set(animals, "birds", automerge::Value::map())
.unwrap()
.unwrap();
doc1.set(birds, "pink", "flamingo").unwrap().unwrap();
doc1.set(birds, "black", "starling").unwrap().unwrap();
let mammals = doc1
.set(animals, "mammals", automerge::Value::list())
.unwrap()
.unwrap();
let badger = doc1.insert(mammals, 0, "badger").unwrap();
doc2.merge(&mut doc1);
doc1.set(birds, "brown", "sparrow").unwrap().unwrap();
let animals_in_doc2 = translate_obj_id(&doc1, &doc2, animals);
doc2.del(animals_in_doc2, "birds").unwrap();
doc1.merge(&mut doc2);
assert_obj!(
&doc1,
automerge::ROOT,
"animals",
map! {
"mammals" => {
mammals => list![{ badger => "badger" }],
}
}
);
assert_obj!(
&doc2,
automerge::ROOT,
"animals",
map! {
"mammals" => {
mammals.translate(&doc1) => list![{ badger.translate(&doc1) => "badger" }],
}
}
);
}
#[test]
fn concurrent_updates_of_concurrently_deleted_objects() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let birds = doc1
.set(automerge::ROOT, "birds", automerge::Value::map())
.unwrap()
.unwrap();
let blackbird = doc1
.set(birds, "blackbird", automerge::Value::map())
.unwrap()
.unwrap();
doc1.set(blackbird, "feathers", "black").unwrap().unwrap();
doc2.merge(&mut doc1);
doc1.del(birds, "blackbird").unwrap();
translate_obj_id(&doc1, &doc2, blackbird);
doc2.set(blackbird, "beak", "orange").unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"birds" => {
birds => map!{},
}
}
);
}
#[test]
fn does_not_interleave_sequence_insertions_at_same_position() {
let (actor1, actor2) = sorted_actors();
let mut doc1 = new_doc_with_actor(actor1);
let mut doc2 = new_doc_with_actor(actor2);
let wisdom = doc1
.set(automerge::ROOT, "wisdom", automerge::Value::list())
.unwrap()
.unwrap();
doc2.merge(&mut doc1);
let doc1elems = doc1
.splice(
wisdom,
0,
0,
vec![
"to".into(),
"be".into(),
"is".into(),
"to".into(),
"do".into(),
],
)
.unwrap();
let wisdom_in_doc2 = translate_obj_id(&doc1, &doc2, wisdom);
let doc2elems = doc2
.splice(
wisdom_in_doc2,
0,
0,
vec![
"to".into(),
"do".into(),
"is".into(),
"to".into(),
"be".into(),
],
)
.unwrap();
doc1.merge(&mut doc2);
assert_doc!(
&doc1,
map! {
"wisdom" => {wisdom => list![
{doc1elems[0].native() => "to"},
{doc1elems[1].native() => "be"},
{doc1elems[2].native() => "is"},
{doc1elems[3].native() => "to"},
{doc1elems[4].native() => "do"},
{doc2elems[0].translate(&doc2) => "to"},
{doc2elems[1].translate(&doc2) => "do"},
{doc2elems[2].translate(&doc2) => "is"},
{doc2elems[3].translate(&doc2) => "to"},
{doc2elems[4].translate(&doc2) => "be"},
]}
}
);
}
#[test]
fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id() {
let (actor1, actor2) = sorted_actors();
assert!(actor2 > actor1);
let mut doc1 = new_doc_with_actor(actor1);
let mut doc2 = new_doc_with_actor(actor2);
let list = doc1
.set(automerge::ROOT, "list", automerge::Value::list())
.unwrap()
.unwrap();
let two = doc1.insert(list, 0, "two").unwrap();
doc2.merge(&mut doc1);
let list_in_doc2 = translate_obj_id(&doc1, &doc2, list);
let one = doc2.insert(list_in_doc2, 0, "one").unwrap();
assert_doc!(
&doc2,
map! {
"list" => { list.translate(&doc1) => list![
{ one.native() => "one" },
{ two.translate(&doc1) => "two" },
]}
}
);
}
#[test]
fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() {
let (actor2, actor1) = sorted_actors();
assert!(actor2 < actor1);
let mut doc1 = new_doc_with_actor(actor1);
let mut doc2 = new_doc_with_actor(actor2);
let list = doc1
.set(automerge::ROOT, "list", automerge::Value::list())
.unwrap()
.unwrap();
let two = doc1.insert(list, 0, "two").unwrap();
doc2.merge(&mut doc1);
let list_in_doc2 = translate_obj_id(&doc1, &doc2, list);
let one = doc2.insert(list_in_doc2, 0, "one").unwrap();
assert_doc!(
&doc2,
map! {
"list" => { list.translate(&doc1) => list![
{ one.native() => "one" },
{ two.translate(&doc1) => "two" },
]}
}
);
}
#[test]
fn insertion_consistent_with_causality() {
let mut doc1 = new_doc();
let mut doc2 = new_doc();
let list = doc1
.set(automerge::ROOT, "list", automerge::Value::list())
.unwrap()
.unwrap();
let four = doc1.insert(list, 0, "four").unwrap();
doc2.merge(&mut doc1);
let list_in_doc2 = translate_obj_id(&doc1, &doc2, list);
let three = doc2.insert(list_in_doc2, 0, "three").unwrap();
doc1.merge(&mut doc2);
let two = doc1.insert(list, 0, "two").unwrap();
doc2.merge(&mut doc1);
let one = doc2.insert(list_in_doc2, 0, "one").unwrap();
assert_doc!(
&doc2,
map! {
"list" => {list.translate(&doc1) => list![
{one.native() => "one"},
{two.translate(&doc1) => "two"},
{three.native() => "three" },
{four.translate(&doc1) => "four"},
]}
}
);
}
#[test]
fn save_and_restore_empty() {
let mut doc = new_doc();
let loaded = Automerge::load(&doc.save().unwrap()).unwrap();
assert_doc!(&loaded, map! {});
}
#[test]
fn save_restore_complex() {
let mut doc1 = new_doc();
let todos = doc1
.set(automerge::ROOT, "todos", automerge::Value::list())
.unwrap()
.unwrap();
let first_todo = doc1.insert(todos, 0, automerge::Value::map()).unwrap();
doc1.set(first_todo, "title", "water plants")
.unwrap()
.unwrap();
let first_done = doc1.set(first_todo, "done", false).unwrap().unwrap();
let mut doc2 = new_doc();
doc2.merge(&mut doc1);
let first_todo_in_doc2 = translate_obj_id(&doc1, &doc2, first_todo);
let weed_title = doc2
.set(first_todo_in_doc2, "title", "weed plants")
.unwrap()
.unwrap();
let kill_title = doc1
.set(first_todo, "title", "kill plants")
.unwrap()
.unwrap();
doc1.merge(&mut doc2);
let reloaded = Automerge::load(&doc1.save().unwrap()).unwrap();
assert_doc!(
&reloaded,
map! {
"todos" => {todos.translate(&doc1) => list![
{first_todo.translate(&doc1) => map!{
"title" => {
weed_title.translate(&doc2) => "weed plants",
kill_title.translate(&doc1) => "kill plants",
},
"done" => {first_done.translate(&doc1) => false},
}}
]}
}
);
}

View file

@ -6,6 +6,10 @@ license = "MIT"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[[bin]]
name = "edit-trace"
bench = false
[dependencies] [dependencies]
automerge = { path = "../automerge" } automerge = { path = "../automerge" }
criterion = "0.3.5" criterion = "0.3.5"

View file

@ -5,9 +5,9 @@ use std::fs;
fn replay_trace(commands: Vec<(usize, usize, Vec<Value>)>) -> Automerge { fn replay_trace(commands: Vec<(usize, usize, Vec<Value>)>) -> Automerge {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap();
for (pos, del, vals) in commands { for (pos, del, vals) in commands {
doc.splice(text, pos, del, vals).unwrap(); doc.splice(&text, pos, del, vals).unwrap();
} }
doc.commit(None, None); doc.commit(None, None);
doc doc

View file

@ -19,12 +19,12 @@ fn main() -> Result<(), AutomergeError> {
let mut doc = Automerge::new(); let mut doc = Automerge::new();
let now = Instant::now(); let now = Instant::now();
let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap();
for (i, (pos, del, vals)) in commands.into_iter().enumerate() { for (i, (pos, del, vals)) in commands.into_iter().enumerate() {
if i % 1000 == 0 { if i % 1000 == 0 {
println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); println!("Processed {} edits in {} ms", i, now.elapsed().as_millis());
} }
doc.splice(text, pos, del, vals)?; doc.splice(&text, pos, del, vals)?;
} }
let _ = doc.save(); let _ = doc.save();
println!("Done in {} ms", now.elapsed().as_millis()); println!("Done in {} ms", now.elapsed().as_millis());