diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs index 80a3d65f..d0c6232d 100644 --- a/automerge-wasm/src/lib.rs +++ b/automerge-wasm/src/lib.rs @@ -1,6 +1,6 @@ extern crate web_sys; use automerge as am; -use automerge::{Change, ChangeHash, Prop, Value}; +use automerge::{Change, ChangeHash, ObjId, Prop, Value}; use js_sys::{Array, Object, Reflect, Uint8Array}; use serde::de::DeserializeOwned; use serde::Serialize; @@ -151,9 +151,9 @@ impl Automerge { pub fn keys(&mut self, obj: JsValue, heads: JsValue) -> Result { let obj = self.import(obj)?; let result = if let Some(heads) = get_heads(heads) { - self.0.keys_at(obj, &heads) + self.0.keys_at(&obj, &heads) } else { - self.0.keys(obj) + self.0.keys(&obj) } .iter() .map(|s| JsValue::from_str(s)) @@ -164,9 +164,9 @@ impl Automerge { pub fn text(&mut self, obj: JsValue, heads: JsValue) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { - self.0.text_at(obj, &heads) + self.0.text_at(&obj, &heads) } else { - self.0.text(obj) + self.0.text(&obj) } .map_err(to_js_err) .map(|t| t.into()) @@ -185,7 +185,7 @@ impl Automerge { let mut vals = vec![]; if let Some(t) = text.as_string() { self.0 - .splice_text(obj, start, delete_count, &t) + .splice_text(&obj, start, delete_count, &t) .map_err(to_js_err)?; } else { if let Ok(array) = text.dyn_into::() { @@ -201,7 +201,7 @@ impl Automerge { } } self.0 - .splice(obj, start, delete_count, vals) + .splice(&obj, start, delete_count, vals) .map_err(to_js_err)?; } Ok(()) @@ -223,9 +223,12 @@ impl Automerge { let value = self.import_value(value, datatype)?; let opid = self .0 - .insert(obj, index as usize, value) + .insert(&obj, index as usize, value) .map_err(to_js_err)?; - Ok(self.export(opid)) + match opid { + Some(opid) => Ok(self.export(opid)), + None => Ok(JsValue::null()), + } } pub fn set( @@ -238,7 +241,7 @@ impl Automerge { let obj = self.import(obj)?; let prop = self.import_prop(prop)?; let value = self.import_value(value, datatype)?; - let opid = self.0.set(obj, prop, value).map_err(to_js_err)?; + let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?; match opid { Some(opid) => Ok(self.export(opid)), None => Ok(JsValue::null()), @@ -252,7 +255,7 @@ impl Automerge { .as_f64() .ok_or("inc needs a numberic value") .map_err(to_js_err)?; - self.0.inc(obj, prop, value as i64).map_err(to_js_err)?; + self.0.inc(&obj, prop, value as i64).map_err(to_js_err)?; Ok(()) } @@ -263,9 +266,9 @@ impl Automerge { let heads = get_heads(heads); if let Ok(prop) = prop { let value = if let Some(h) = heads { - self.0.value_at(obj, prop, &h) + self.0.value_at(&obj, prop, &h) } else { - self.0.value(obj, prop) + self.0.value(&obj, prop) } .map_err(to_js_err)?; match value { @@ -289,9 +292,9 @@ impl Automerge { let prop = to_prop(arg); if let Ok(prop) = prop { let values = if let Some(heads) = get_heads(heads) { - self.0.values_at(obj, prop, &heads) + self.0.values_at(&obj, prop, &heads) } else { - self.0.values(obj, prop) + self.0.values(&obj, prop) } .map_err(to_js_err)?; for value in values { @@ -318,16 +321,16 @@ impl Automerge { pub fn length(&mut self, obj: JsValue, heads: JsValue) -> Result { let obj = self.import(obj)?; if let Some(heads) = get_heads(heads) { - Ok((self.0.length_at(obj, &heads) as f64).into()) + Ok((self.0.length_at(&obj, &heads) as f64).into()) } else { - Ok((self.0.length(obj) as f64).into()) + Ok((self.0.length(&obj) as f64).into()) } } pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { let obj = self.import(obj)?; let prop = to_prop(prop)?; - self.0.del(obj, prop).map_err(to_js_err)?; + self.0.del(&obj, prop).map_err(to_js_err)?; Ok(()) } @@ -442,11 +445,11 @@ impl Automerge { } } - fn export(&self, val: E) -> JsValue { - self.0.export(val).into() + fn export(&self, val: ObjId) -> JsValue { + val.to_string().into() } - fn import(&self, id: JsValue) -> Result { + fn import(&self, id: JsValue) -> Result { let id_str = id .as_string() .ok_or("invalid opid/objid/elemid") diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml index 6a0f81e7..2212cb02 100644 --- a/automerge/Cargo.toml +++ b/automerge/Cargo.toml @@ -36,3 +36,4 @@ pretty_assertions = "1.0.0" proptest = { version = "^1.0.0", default-features = false, features = ["std"] } serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } maplit = { version = "^1.0" } +decorum = "0.3.1" diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs new file mode 100644 index 00000000..9b70ce9f --- /dev/null +++ b/automerge/src/automerge.rs @@ -0,0 +1,1338 @@ +use std::collections::{HashMap, HashSet, VecDeque}; +use unicode_segmentation::UnicodeSegmentation; + +use crate::change::{encode_document, export_change}; +use crate::exid::ExId; +use crate::op_set::OpSet; +use crate::types::{ + ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, Patch, + ScalarValue, Value, +}; +use crate::{legacy, query, types}; +use crate::{AutomergeError, Change, Prop}; + +#[derive(Debug, Clone)] +pub struct Automerge { + queue: Vec, + history: Vec, + history_index: HashMap, + states: HashMap>, + deps: HashSet, + saved: Vec, + ops: OpSet, + actor: Option, + max_op: u64, + transaction: Option, +} + +impl Automerge { + pub fn new() -> Self { + Automerge { + queue: vec![], + history: vec![], + history_index: HashMap::new(), + states: HashMap::new(), + ops: Default::default(), + deps: Default::default(), + saved: Default::default(), + actor: None, + max_op: 0, + transaction: None, + } + } + + pub fn set_actor(&mut self, actor: ActorId) { + self.ensure_transaction_closed(); + self.actor = Some(self.ops.m.actors.cache(actor)) + } + + fn random_actor(&mut self) -> ActorId { + let actor = ActorId::from(uuid::Uuid::new_v4().as_bytes().to_vec()); + self.actor = Some(self.ops.m.actors.cache(actor.clone())); + actor + } + + pub fn get_actor(&mut self) -> ActorId { + if let Some(actor) = self.actor { + self.ops.m.actors[actor].clone() + } else { + self.random_actor() + } + } + + pub fn maybe_get_actor(&self) -> Option { + self.actor.map(|i| self.ops.m.actors[i].clone()) + } + + fn get_actor_index(&mut self) -> usize { + if let Some(actor) = self.actor { + actor + } else { + self.random_actor(); + self.actor.unwrap() // random_actor always sets actor to is_some() + } + } + + pub fn new_with_actor_id(actor: ActorId) -> Self { + let mut am = Automerge { + queue: vec![], + history: vec![], + history_index: HashMap::new(), + states: HashMap::new(), + ops: Default::default(), + deps: Default::default(), + saved: Default::default(), + actor: None, + max_op: 0, + transaction: None, + }; + am.actor = Some(am.ops.m.actors.cache(actor)); + am + } + + pub fn pending_ops(&self) -> u64 { + self.transaction + .as_ref() + .map(|t| t.operations.len() as u64) + .unwrap_or(0) + } + + fn tx(&mut self) -> &mut Transaction { + if self.transaction.is_none() { + let actor = self.get_actor_index(); + + let seq = self.states.entry(actor).or_default().len() as u64 + 1; + let mut deps = self.get_heads(); + if seq > 1 { + let last_hash = self.get_hash(actor, seq - 1).unwrap(); + if !deps.contains(&last_hash) { + deps.push(last_hash); + } + } + + self.transaction = Some(Transaction { + actor, + seq, + start_op: self.max_op + 1, + time: 0, + message: None, + extra_bytes: Default::default(), + hash: None, + operations: vec![], + deps, + }); + } + + self.transaction.as_mut().unwrap() + } + + pub fn commit(&mut self, message: Option, time: Option) -> Vec { + let tx = self.tx(); + + if message.is_some() { + tx.message = message; + } + + if let Some(t) = time { + tx.time = t; + } + + tx.operations.len(); + + self.ensure_transaction_closed(); + + self.get_heads() + } + + pub fn ensure_transaction_closed(&mut self) { + if let Some(tx) = self.transaction.take() { + self.update_history(export_change(&tx, &self.ops.m.actors, &self.ops.m.props)); + } + } + + pub fn rollback(&mut self) -> usize { + if let Some(tx) = self.transaction.take() { + let num = tx.operations.len(); + for op in &tx.operations { + for pred_id in &op.pred { + // FIXME - use query to make this fast + if let Some(p) = self.ops.iter().position(|o| o.id == *pred_id) { + self.ops + .replace(op.obj, p, |o| o.succ.retain(|i| i != pred_id)); + } + } + if let Some(pos) = self.ops.iter().position(|o| o.id == op.id) { + self.ops.remove(op.obj, pos); + } + } + num + } else { + 0 + } + } + + fn next_id(&mut self) -> OpId { + let tx = self.tx(); + OpId(tx.start_op + tx.operations.len() as u64, tx.actor) + } + + fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { + for succ in succ_pos { + self.ops.replace(op.obj, *succ, |old_op| { + old_op.succ.push(op.id); + }); + } + + if !op.is_del() { + self.ops.insert(pos, op.clone()); + } + + self.tx().operations.push(op); + } + + fn insert_op(&mut self, op: Op) -> Op { + let q = self.ops.search(op.obj, query::SeekOp::new(&op)); + + for i in q.succ { + self.ops + .replace(op.obj, i, |old_op| old_op.succ.push(op.id)); + } + + if !op.is_del() { + self.ops.insert(q.pos, op.clone()); + } + op + } + + // KeysAt::() + // LenAt::() + // PropAt::() + // NthAt::() + + pub fn keys(&self, obj: &ExId) -> Vec { + if let Ok(obj) = self.exid_to_obj(obj) { + let q = self.ops.search(obj, query::Keys::new()); + q.keys.iter().map(|k| self.to_string(*k)).collect() + } else { + vec![] + } + } + + pub fn keys_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Vec { + if let Ok(obj) = self.exid_to_obj(obj) { + let clock = self.clock_at(heads); + let q = self.ops.search(obj, query::KeysAt::new(clock)); + q.keys.iter().map(|k| self.to_string(*k)).collect() + } else { + vec![] + } + } + + pub fn length(&self, obj: &ExId) -> usize { + if let Ok(obj) = self.exid_to_obj(obj) { + self.ops.search(obj, query::Len::new()).len + } else { + 0 + } + } + + pub fn length_at(&self, obj: &ExId, heads: &[ChangeHash]) -> usize { + if let Ok(obj) = self.exid_to_obj(obj) { + let clock = self.clock_at(heads); + self.ops.search(obj, query::LenAt::new(clock)).len + } else { + 0 + } + } + + // set(obj, prop, value) - value can be scalar or objtype + // del(obj, prop) + // inc(obj, prop, value) + // insert(obj, index, value) + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub fn set, V: Into>( + &mut self, + obj: &ExId, + prop: P, + value: V, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj)?; + let value = value.into(); + if let Some(id) = self.local_op(obj, prop.into(), value.into())? { + Ok(Some(self.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn exid_to_obj(&self, id: &ExId) -> Result { + match id { + ExId::Root => Ok(ObjId::root()), + ExId::Id(ctr, actor, idx) => { + // do a direct get here b/c this could be foriegn and not be within the array + // bounds + if self.ops.m.actors.cache.get(*idx) == Some(actor) { + Ok(ObjId(OpId(*ctr, *idx))) + } else { + // FIXME - make a real error + let idx = self + .ops + .m + .actors + .lookup(actor) + .ok_or(AutomergeError::Fail)?; + Ok(ObjId(OpId(*ctr, idx))) + } + } + } + } + + fn id_to_exid(&self, id: OpId) -> ExId { + ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) + } + + pub fn insert>( + &mut self, + obj: &ExId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj)?; + if let Some(id) = self.do_insert(obj, index, value)? { + Ok(Some(self.id_to_exid(id))) + } else { + Ok(None) + } + } + + fn do_insert>( + &mut self, + obj: ObjId, + index: usize, + value: V, + ) -> Result, AutomergeError> { + let id = self.next_id(); + + let query = self.ops.search(obj, query::InsertNth::new(index)); + + let key = query.key()?; + let value = value.into(); + let action = value.into(); + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: self.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + + self.ops.insert(query.pos, op.clone()); + self.tx().operations.push(op); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub fn inc>( + &mut self, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + let obj = self.exid_to_obj(obj)?; + self.local_op(obj, prop.into(), OpType::Inc(value))?; + Ok(()) + } + + pub fn del>(&mut self, obj: &ExId, prop: P) -> Result<(), AutomergeError> { + let obj = self.exid_to_obj(obj)?; + self.local_op(obj, prop.into(), OpType::Del)?; + Ok(()) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + pub fn splice( + &mut self, + obj: &ExId, + mut pos: usize, + del: usize, + vals: Vec, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj)?; + for _ in 0..del { + // del() + self.local_op(obj, pos.into(), OpType::Del)?; + } + let mut results = Vec::new(); + for v in vals { + // insert() + let id = self.do_insert(obj, pos, v)?; + if let Some(id) = id { + results.push(self.id_to_exid(id)); + } + pos += 1; + } + Ok(results) + } + + pub fn splice_text( + &mut self, + obj: &ExId, + pos: usize, + del: usize, + text: &str, + ) -> Result, AutomergeError> { + let mut vals = vec![]; + for c in text.to_owned().graphemes(true) { + vals.push(c.into()); + } + self.splice(obj, pos, del, vals) + } + + pub fn text(&self, obj: &ExId) -> Result { + let obj = self.exid_to_obj(obj)?; + let query = self.ops.search(obj, query::ListVals::new(obj)); + let mut buffer = String::new(); + for q in &query.ops { + if let OpType::Set(ScalarValue::Str(s)) = &q.action { + buffer.push_str(s); + } + } + Ok(buffer) + } + + pub fn text_at(&self, obj: &ExId, heads: &[ChangeHash]) -> Result { + let obj = self.exid_to_obj(obj)?; + let clock = self.clock_at(heads); + let query = self.ops.search(obj, query::ListValsAt::new(clock)); + let mut buffer = String::new(); + for q in &query.ops { + if let OpType::Set(ScalarValue::Str(s)) = &q.action { + buffer.push_str(s); + } + } + Ok(buffer) + } + + // TODO - I need to return these OpId's here **only** to get + // the legacy conflicts format of { [opid]: value } + // Something better? + pub fn value>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + Ok(self.values(obj, prop.into())?.first().cloned()) + } + + pub fn value_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + Ok(self.values_at(obj, prop, heads)?.first().cloned()) + } + + pub fn values>( + &self, + obj: &ExId, + prop: P, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj)?; + let result = match prop.into() { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(obj, query::Prop::new(obj, p)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => self + .ops + .search(obj, query::Nth::new(n)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect(), + }; + Ok(result) + } + + pub fn values_at>( + &self, + obj: &ExId, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + let prop = prop.into(); + let obj = self.exid_to_obj(obj)?; + let clock = self.clock_at(heads); + let result = match prop { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(obj, query::PropAt::new(p, clock)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => self + .ops + .search(obj, query::NthAt::new(n, clock)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect(), + }; + Ok(result) + } + + pub fn load(data: &[u8]) -> Result { + let changes = Change::load_document(data)?; + let mut doc = Self::new(); + doc.apply_changes(&changes)?; + Ok(doc) + } + + pub fn load_incremental(&mut self, data: &[u8]) -> Result { + let changes = Change::load_document(data)?; + let start = self.ops.len(); + self.apply_changes(&changes)?; + let delta = self.ops.len() - start; + Ok(delta) + } + + pub fn apply_changes(&mut self, changes: &[Change]) -> Result { + self.ensure_transaction_closed(); + for c in changes { + if !self.history_index.contains_key(&c.hash) { + if self.is_causally_ready(c) { + self.apply_change(c.clone()); + } else { + self.queue.push(c.clone()); + while let Some(c) = self.pop_next_causally_ready_change() { + self.apply_change(c); + } + } + } + } + Ok(Patch {}) + } + + pub fn apply_change(&mut self, change: Change) { + self.ensure_transaction_closed(); + let ops = self.import_ops(&change, self.history.len()); + self.update_history(change); + for op in ops { + self.insert_op(op); + } + } + + fn local_op( + &mut self, + obj: ObjId, + prop: Prop, + action: OpType, + ) -> Result, AutomergeError> { + match prop { + Prop::Map(s) => self.local_map_op(obj, s, action), + Prop::Seq(n) => self.local_list_op(obj, n, action), + } + } + + fn local_map_op( + &mut self, + obj: ObjId, + prop: String, + action: OpType, + ) -> Result, AutomergeError> { + if prop.is_empty() { + return Err(AutomergeError::EmptyStringKey); + } + + let id = self.next_id(); + let prop = self.ops.m.props.cache(prop); + let query = self.ops.search(obj, query::Prop::new(obj, prop)); + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let pred = query.ops.iter().map(|op| op.id).collect(); + + let op = Op { + change: self.history.len(), + id, + action, + obj, + key: Key::Map(prop), + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + fn local_list_op( + &mut self, + obj: ObjId, + index: usize, + action: OpType, + ) -> Result, AutomergeError> { + let query = self.ops.search(obj, query::Nth::new(index)); + + let id = self.next_id(); + let pred = query.ops.iter().map(|op| op.id).collect(); + let key = query.key()?; + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + change: self.history.len(), + id, + action, + obj, + key, + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(op, query.pos, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + fn is_causally_ready(&self, change: &Change) -> bool { + change + .deps + .iter() + .all(|d| self.history_index.contains_key(d)) + } + + fn pop_next_causally_ready_change(&mut self) -> Option { + let mut index = 0; + while index < self.queue.len() { + if self.is_causally_ready(&self.queue[index]) { + return Some(self.queue.swap_remove(index)); + } + index += 1; + } + None + } + + fn import_ops(&mut self, change: &Change, change_id: usize) -> Vec { + change + .iter_ops() + .enumerate() + .map(|(i, c)| { + let actor = self.ops.m.actors.cache(change.actor_id().clone()); + let id = OpId(change.start_op + i as u64, actor); + let obj = match c.obj { + legacy::ObjectId::Root => ObjId::root(), + legacy::ObjectId::Id(id) => ObjId(OpId(id.0, self.ops.m.actors.cache(id.1))), + }; + let pred = c + .pred + .iter() + .map(|i| OpId(i.0, self.ops.m.actors.cache(i.1.clone()))) + .collect(); + let key = match &c.key { + legacy::Key::Map(n) => Key::Map(self.ops.m.props.cache(n.to_string())), + legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(types::HEAD), + legacy::Key::Seq(legacy::ElementId::Id(i)) => { + Key::Seq(ElemId(OpId(i.0, self.ops.m.actors.cache(i.1.clone())))) + } + }; + Op { + change: change_id, + id, + action: c.action, + obj, + key, + succ: Default::default(), + pred, + insert: c.insert, + } + }) + .collect() + } + + /// Takes all the changes in `other` which are not in `self` and applies them + pub fn merge(&mut self, other: &mut Self) { + // TODO: Make this fallible and figure out how to do this transactionally + other.ensure_transaction_closed(); + let changes = self + .get_changes_added(other) + .into_iter() + .cloned() + .collect::>(); + self.apply_changes(&changes).unwrap(); + } + + pub fn save(&mut self) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + // TODO - would be nice if I could pass an iterator instead of a collection here + let c: Vec<_> = self.history.iter().map(|c| c.decode()).collect(); + let ops: Vec<_> = self.ops.iter().cloned().collect(); + // TODO - can we make encode_document error free + let bytes = encode_document( + &c, + ops.as_slice(), + &self.ops.m.actors, + &self.ops.m.props.cache, + ); + if bytes.is_ok() { + self.saved = self.get_heads().iter().copied().collect(); + } + bytes + } + + // should this return an empty vec instead of None? + pub fn save_incremental(&mut self) -> Vec { + self.ensure_transaction_closed(); + let changes = self._get_changes(self.saved.as_slice()); + let mut bytes = vec![]; + for c in changes { + bytes.extend(c.raw_bytes()); + } + if !bytes.is_empty() { + self.saved = self._get_heads().iter().copied().collect(); + } + bytes + } + + /// Filter the changes down to those that are not transitive dependencies of the heads. + /// + /// Thus a graph with these heads has not seen the remaining changes. + pub(crate) fn filter_changes(&self, heads: &[ChangeHash], changes: &mut HashSet) { + // Reduce the working set to find to those which we may be able to find. + // This filters out those hashes that are successors of or concurrent with all of the + // heads. + // This can help in avoiding traversing the entire graph back to the roots when we try to + // search for a hash we can know won't be found there. + let max_head_index = heads + .iter() + .map(|h| self.history_index.get(h).unwrap_or(&0)) + .max() + .unwrap_or(&0); + let mut may_find: HashSet = changes + .iter() + .filter(|hash| { + let change_index = self.history_index.get(hash).unwrap_or(&0); + change_index <= max_head_index + }) + .copied() + .collect(); + + if may_find.is_empty() { + return; + } + + let mut queue: VecDeque<_> = heads.iter().collect(); + let mut seen = HashSet::new(); + while let Some(hash) = queue.pop_front() { + if seen.contains(hash) { + continue; + } + seen.insert(hash); + + let removed = may_find.remove(hash); + changes.remove(hash); + if may_find.is_empty() { + break; + } + + for dep in self + .history_index + .get(hash) + .and_then(|i| self.history.get(*i)) + .map(|c| c.deps.as_slice()) + .unwrap_or_default() + { + // if we just removed something from our hashes then it is likely there is more + // down here so do a quick inspection on the children. + // When we don't remove anything it is less likely that there is something down + // that chain so delay it. + if removed { + queue.push_front(dep); + } else { + queue.push_back(dep); + } + } + } + } + + pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec { + self.ensure_transaction_closed(); + self._get_missing_deps(heads) + } + + pub(crate) fn _get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { + let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect(); + let mut missing = HashSet::new(); + + for head in self.queue.iter().flat_map(|change| &change.deps) { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + for head in heads { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + let mut missing = missing + .into_iter() + .filter(|hash| !in_queue.contains(hash)) + .copied() + .collect::>(); + missing.sort(); + missing + } + + fn get_changes_fast(&self, have_deps: &[ChangeHash]) -> Option> { + if have_deps.is_empty() { + return Some(self.history.iter().collect()); + } + + let lowest_idx = have_deps + .iter() + .filter_map(|h| self.history_index.get(h)) + .min()? + + 1; + + let mut missing_changes = vec![]; + let mut has_seen: HashSet<_> = have_deps.iter().collect(); + for change in &self.history[lowest_idx..] { + let deps_seen = change.deps.iter().filter(|h| has_seen.contains(h)).count(); + if deps_seen > 0 { + if deps_seen != change.deps.len() { + // future change depends on something we haven't seen - fast path cant work + return None; + } + missing_changes.push(change); + has_seen.insert(&change.hash); + } + } + + // if we get to the end and there is a head we haven't seen then fast path cant work + if self._get_heads().iter().all(|h| has_seen.contains(h)) { + Some(missing_changes) + } else { + None + } + } + + fn get_changes_slow(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + let mut stack: Vec<_> = have_deps.iter().collect(); + let mut has_seen = HashSet::new(); + while let Some(hash) = stack.pop() { + if has_seen.contains(&hash) { + continue; + } + if let Some(change) = self + .history_index + .get(hash) + .and_then(|i| self.history.get(*i)) + { + stack.extend(change.deps.iter()); + } + has_seen.insert(hash); + } + self.history + .iter() + .filter(|change| !has_seen.contains(&change.hash)) + .collect() + } + + pub fn get_last_local_change(&mut self) -> Option<&Change> { + self.ensure_transaction_closed(); + if let Some(actor) = &self.actor { + let actor = &self.ops.m.actors[*actor]; + return self.history.iter().rev().find(|c| c.actor_id() == actor); + } + None + } + + pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> { + self.ensure_transaction_closed(); + self._get_changes(have_deps) + } + + pub(crate) fn _get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + if let Some(changes) = self.get_changes_fast(have_deps) { + changes + } else { + self.get_changes_slow(have_deps) + } + } + + fn clock_at(&self, heads: &[ChangeHash]) -> Clock { + let mut clock = Clock::new(); + let mut seen = HashSet::new(); + let mut to_see = heads.to_vec(); + // FIXME - faster + while let Some(hash) = to_see.pop() { + if let Some(c) = self._get_change_by_hash(&hash) { + for h in &c.deps { + if !seen.contains(h) { + to_see.push(*h); + } + } + let actor = self.ops.m.actors.lookup(c.actor_id()).unwrap(); + clock.include(actor, c.max_op()); + seen.insert(hash); + } + } + clock + } + + pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> { + self.ensure_transaction_closed(); + self._get_change_by_hash(hash) + } + + pub(crate) fn _get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { + self.history_index + .get(hash) + .and_then(|index| self.history.get(*index)) + } + + pub fn get_changes_added<'a>(&mut self, other: &'a Self) -> Vec<&'a Change> { + self.ensure_transaction_closed(); + self._get_changes_added(other) + } + + pub(crate) fn _get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { + // Depth-first traversal from the heads through the dependency graph, + // until we reach a change that is already present in other + let mut stack: Vec<_> = other._get_heads(); + let mut seen_hashes = HashSet::new(); + let mut added_change_hashes = Vec::new(); + while let Some(hash) = stack.pop() { + if !seen_hashes.contains(&hash) && self._get_change_by_hash(&hash).is_none() { + seen_hashes.insert(hash); + added_change_hashes.push(hash); + if let Some(change) = other._get_change_by_hash(&hash) { + stack.extend(&change.deps); + } + } + } + // Return those changes in the reverse of the order in which the depth-first search + // found them. This is not necessarily a topological sort, but should usually be close. + added_change_hashes.reverse(); + added_change_hashes + .into_iter() + .filter_map(|h| other._get_change_by_hash(&h)) + .collect() + } + + pub fn get_heads(&mut self) -> Vec { + self.ensure_transaction_closed(); + self._get_heads() + } + + pub(crate) fn _get_heads(&self) -> Vec { + let mut deps: Vec<_> = self.deps.iter().copied().collect(); + deps.sort_unstable(); + deps + } + + fn get_hash(&mut self, actor: usize, seq: u64) -> Result { + self.states + .get(&actor) + .and_then(|v| v.get(seq as usize - 1)) + .and_then(|&i| self.history.get(i)) + .map(|c| c.hash) + .ok_or(AutomergeError::InvalidSeq(seq)) + } + + fn update_history(&mut self, change: Change) -> usize { + self.max_op = std::cmp::max(self.max_op, change.start_op + change.len() as u64 - 1); + + self.update_deps(&change); + + let history_index = self.history.len(); + + self.states + .entry(self.ops.m.actors.cache(change.actor_id().clone())) + .or_default() + .push(history_index); + + self.history_index.insert(change.hash, history_index); + self.history.push(change); + + history_index + } + + fn update_deps(&mut self, change: &Change) { + for d in &change.deps { + self.deps.remove(d); + } + self.deps.insert(change.hash); + } + + pub fn import(&self, s: &str) -> Result { + if s == "_root" { + Ok(ExId::Root) + } else { + let n = s + .find('@') + .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; + let counter = s[0..n] + .parse() + .map_err(|_| AutomergeError::InvalidOpId(s.to_owned()))?; + let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap()); + let actor = self + .ops + .m + .actors + .lookup(&actor) + .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; + Ok(ExId::Id( + counter, + self.ops.m.actors.cache[actor].clone(), + actor, + )) + } + } + + fn to_string(&self, id: E) -> String { + match id.export() { + Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.actors[id.actor()]), + Export::Prop(index) => self.ops.m.props[index].clone(), + Export::Special(s) => s, + } + } + + pub fn dump(&self) { + log!( + " {:12} {:12} {:12} {} {} {}", + "id", + "obj", + "key", + "value", + "pred", + "succ" + ); + for i in self.ops.iter() { + let id = self.to_string(i.id); + let obj = self.to_string(i.obj); + let key = match i.key { + Key::Map(n) => self.ops.m.props[n].clone(), + Key::Seq(n) => self.to_string(n), + }; + let value: String = match &i.action { + OpType::Set(value) => format!("{}", value), + OpType::Make(obj) => format!("make{}", obj), + OpType::Inc(obj) => format!("inc{}", obj), + OpType::Del => format!("del{}", 0), + }; + let pred: Vec<_> = i.pred.iter().map(|id| self.to_string(*id)).collect(); + let succ: Vec<_> = i.succ.iter().map(|id| self.to_string(*id)).collect(); + log!( + " {:12} {:12} {:12} {} {:?} {:?}", + id, + obj, + key, + value, + pred, + succ + ); + } + } + + #[cfg(feature = "optree-visualisation")] + pub fn visualise_optree(&self) -> String { + self.ops.visualise() + } +} + +#[derive(Debug, Clone)] +pub(crate) struct Transaction { + pub actor: usize, + pub seq: u64, + pub start_op: u64, + pub time: i64, + pub message: Option, + pub extra_bytes: Vec, + pub hash: Option, + pub deps: Vec, + pub operations: Vec, +} + +impl Default for Automerge { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::*; + use std::convert::TryInto; + + #[test] + fn insert_op() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + doc.set(&ROOT, "hello", "world")?; + assert!(doc.pending_ops() == 1); + doc.value(&ROOT, "hello")?; + Ok(()) + } + + #[test] + fn test_list() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + let list_id = doc.set(&ROOT, "items", Value::list())?.unwrap(); + doc.set(&ROOT, "zzz", "zzzval")?; + assert!(doc.value(&ROOT, "items")?.unwrap().1 == list_id); + doc.insert(&list_id, 0, "a")?; + doc.insert(&list_id, 0, "b")?; + doc.insert(&list_id, 2, "c")?; + doc.insert(&list_id, 1, "d")?; + assert!(doc.value(&list_id, 0)?.unwrap().0 == "b".into()); + assert!(doc.value(&list_id, 1)?.unwrap().0 == "d".into()); + assert!(doc.value(&list_id, 2)?.unwrap().0 == "a".into()); + assert!(doc.value(&list_id, 3)?.unwrap().0 == "c".into()); + assert!(doc.length(&list_id) == 4); + doc.save()?; + Ok(()) + } + + #[test] + fn test_del() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + doc.set(&ROOT, "xxx", "xxx")?; + assert!(!doc.values(&ROOT, "xxx")?.is_empty()); + doc.del(&ROOT, "xxx")?; + assert!(doc.values(&ROOT, "xxx")?.is_empty()); + Ok(()) + } + + #[test] + fn test_inc() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set(&ROOT, "counter", Value::counter(10))?; + assert!(doc.value(&ROOT, "counter")?.unwrap().0 == Value::counter(10)); + doc.inc(&ROOT, "counter", 10)?; + assert!(doc.value(&ROOT, "counter")?.unwrap().0 == Value::counter(20)); + doc.inc(&ROOT, "counter", -5)?; + assert!(doc.value(&ROOT, "counter")?.unwrap().0 == Value::counter(15)); + Ok(()) + } + + #[test] + fn test_save_incremental() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + + doc.set(&ROOT, "foo", 1)?; + + let save1 = doc.save().unwrap(); + + doc.set(&ROOT, "bar", 2)?; + + let save2 = doc.save_incremental(); + + doc.set(&ROOT, "baz", 3)?; + + let save3 = doc.save_incremental(); + + let mut save_a: Vec = vec![]; + save_a.extend(&save1); + save_a.extend(&save2); + save_a.extend(&save3); + + assert!(doc.save_incremental().is_empty()); + + let save_b = doc.save().unwrap(); + + assert!(save_b.len() < save_a.len()); + + let mut doc_a = Automerge::load(&save_a)?; + let mut doc_b = Automerge::load(&save_b)?; + + assert!(doc_a.values(&ROOT, "baz")? == doc_b.values(&ROOT, "baz")?); + + assert!(doc_a.save().unwrap() == doc_b.save().unwrap()); + + Ok(()) + } + + #[test] + fn test_save_text() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + let text = doc.set(&ROOT, "text", Value::text())?.unwrap(); + let heads1 = doc.commit(None, None); + doc.splice_text(&text, 0, 0, "hello world")?; + let heads2 = doc.commit(None, None); + doc.splice_text(&text, 6, 0, "big bad ")?; + let heads3 = doc.commit(None, None); + + assert!(&doc.text(&text)? == "hello big bad world"); + assert!(&doc.text_at(&text, &heads1)?.is_empty()); + assert!(&doc.text_at(&text, &heads2)? == "hello world"); + assert!(&doc.text_at(&text, &heads3)? == "hello big bad world"); + + Ok(()) + } + + #[test] + fn test_props_vals_at() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor("aaaa".try_into().unwrap()); + doc.set(&ROOT, "prop1", "val1")?; + doc.commit(None, None); + let heads1 = doc.get_heads(); + doc.set(&ROOT, "prop1", "val2")?; + doc.commit(None, None); + let heads2 = doc.get_heads(); + doc.set(&ROOT, "prop2", "val3")?; + doc.commit(None, None); + let heads3 = doc.get_heads(); + doc.del(&ROOT, "prop1")?; + doc.commit(None, None); + let heads4 = doc.get_heads(); + doc.set(&ROOT, "prop3", "val4")?; + doc.commit(None, None); + let heads5 = doc.get_heads(); + assert!(doc.keys_at(&ROOT, &heads1) == vec!["prop1".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); + assert!(doc.value_at(&ROOT, "prop2", &heads1)? == None); + assert!(doc.value_at(&ROOT, "prop3", &heads1)? == None); + + assert!(doc.keys_at(&ROOT, &heads2) == vec!["prop1".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(&ROOT, "prop2", &heads2)? == None); + assert!(doc.value_at(&ROOT, "prop3", &heads2)? == None); + + assert!(doc.keys_at(&ROOT, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(&ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(&ROOT, "prop3", &heads3)? == None); + + assert!(doc.keys_at(&ROOT, &heads4) == vec!["prop2".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads4)? == None); + assert!(doc.value_at(&ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(&ROOT, "prop3", &heads4)? == None); + + assert!(doc.keys_at(&ROOT, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]); + assert!(doc.value_at(&ROOT, "prop1", &heads5)? == None); + assert!(doc.value_at(&ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(&ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); + + assert!(doc.keys_at(&ROOT, &[]).is_empty()); + assert!(doc.value_at(&ROOT, "prop1", &[])? == None); + assert!(doc.value_at(&ROOT, "prop2", &[])? == None); + assert!(doc.value_at(&ROOT, "prop3", &[])? == None); + Ok(()) + } + + #[test] + fn test_len_at() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor("aaaa".try_into().unwrap()); + + let list = doc.set(&ROOT, "list", Value::list())?.unwrap(); + let heads1 = doc.commit(None, None); + + doc.insert(&list, 0, Value::int(10))?; + let heads2 = doc.commit(None, None); + + doc.set(&list, 0, Value::int(20))?; + doc.insert(&list, 0, Value::int(30))?; + let heads3 = doc.commit(None, None); + + doc.set(&list, 1, Value::int(40))?; + doc.insert(&list, 1, Value::int(50))?; + let heads4 = doc.commit(None, None); + + doc.del(&list, 2)?; + let heads5 = doc.commit(None, None); + + doc.del(&list, 0)?; + let heads6 = doc.commit(None, None); + + assert!(doc.length_at(&list, &heads1) == 0); + assert!(doc.value_at(&list, 0, &heads1)?.is_none()); + + assert!(doc.length_at(&list, &heads2) == 1); + assert!(doc.value_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); + + assert!(doc.length_at(&list, &heads3) == 2); + assert!(doc.value_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); + + assert!(doc.length_at(&list, &heads4) == 3); + assert!(doc.value_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50)); + assert!(doc.value_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40)); + + assert!(doc.length_at(&list, &heads5) == 2); + assert!(doc.value_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); + + assert!(doc.length_at(&list, &heads6) == 1); + assert!(doc.value_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); + + Ok(()) + } +} diff --git a/automerge/src/change.rs b/automerge/src/change.rs index 4d3984e5..846cc71d 100644 --- a/automerge/src/change.rs +++ b/automerge/src/change.rs @@ -1,3 +1,4 @@ +use crate::automerge::Transaction; use crate::columnar::{ ChangeEncoder, ChangeIterator, ColumnEncoder, DepsIterator, DocChange, DocOp, DocOpEncoder, DocOpIterator, OperationIterator, COLUMN_TYPE_DEFLATE, @@ -5,11 +6,11 @@ use crate::columnar::{ use crate::decoding; use crate::decoding::{Decodable, InvalidChangeError}; use crate::encoding::{Encodable, DEFLATE_MIN_SIZE}; +use crate::error::AutomergeError; +use crate::indexed_cache::IndexedCache; use crate::legacy as amp; -use crate::{ - ActorId, AutomergeError, ElemId, IndexedCache, Key, ObjId, Op, OpId, OpType, Transaction, HEAD, - ROOT, -}; +use crate::types; +use crate::types::{ActorId, ElemId, Key, ObjId, Op, OpId, OpType}; use core::ops::Range; use flate2::{ bufread::{DeflateDecoder, DeflateEncoder}, @@ -417,7 +418,7 @@ fn increment_range_map(ranges: &mut HashMap>, len: usize) { } fn export_objid(id: &ObjId, actors: &IndexedCache) -> amp::ObjectId { - if id.0 == ROOT { + if id == &ObjId::root() { amp::ObjectId::Root } else { export_opid(&id.0, actors).into() @@ -425,7 +426,7 @@ fn export_objid(id: &ObjId, actors: &IndexedCache) -> amp::ObjectId { } fn export_elemid(id: &ElemId, actors: &IndexedCache) -> amp::ElementId { - if id == &HEAD { + if id == &types::HEAD { amp::ElementId::Head } else { export_opid(&id.0, actors).into() diff --git a/automerge/src/clock.rs b/automerge/src/clock.rs index 979885b3..d01c7748 100644 --- a/automerge/src/clock.rs +++ b/automerge/src/clock.rs @@ -1,4 +1,4 @@ -use crate::OpId; +use crate::types::OpId; use fxhash::FxBuildHasher; use std::cmp; use std::collections::HashMap; diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs index 3a1df3cb..c821b9bb 100644 --- a/automerge/src/columnar.rs +++ b/automerge/src/columnar.rs @@ -11,8 +11,7 @@ use std::{ str, }; -use crate::ROOT; -use crate::{ActorId, ElemId, Key, ObjId, ObjType, OpId, OpType, ScalarValue}; +use crate::types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue}; use crate::legacy as amp; use amp::SortedVec; @@ -20,10 +19,10 @@ use flate2::bufread::DeflateDecoder; use smol_str::SmolStr; use tracing::instrument; +use crate::indexed_cache::IndexedCache; use crate::{ decoding::{BooleanDecoder, Decodable, Decoder, DeltaDecoder, RleDecoder}, encoding::{BooleanEncoder, ColData, DeltaEncoder, Encodable, RleEncoder}, - IndexedCache, Op, }; impl Encodable for Action { @@ -846,7 +845,7 @@ impl ObjEncoder { fn append(&mut self, obj: &ObjId, actors: &[usize]) { match obj.0 { - ROOT => { + OpId(ctr, _) if ctr == 0 => { self.actor.append_null(); self.ctr.append_null(); } @@ -951,7 +950,7 @@ impl ChangeEncoder { index_by_hash.insert(hash, index); } self.actor - .append_value(actors.lookup(change.actor_id.clone()).unwrap()); //actors.iter().position(|a| a == &change.actor_id).unwrap()); + .append_value(actors.lookup(&change.actor_id).unwrap()); //actors.iter().position(|a| a == &change.actor_id).unwrap()); self.seq.append_value(change.seq); // FIXME iterops.count is crazy slow self.max_op diff --git a/automerge/src/error.rs b/automerge/src/error.rs index ddb7092b..32eb9d1d 100644 --- a/automerge/src/error.rs +++ b/automerge/src/error.rs @@ -1,6 +1,6 @@ use crate::decoding; +use crate::types::ScalarValue; use crate::value::DataType; -use crate::ScalarValue; use thiserror::Error; #[derive(Error, Debug)] @@ -17,6 +17,8 @@ pub enum AutomergeError { InvalidSeq(u64), #[error("index {0} is out of bounds")] InvalidIndex(usize), + #[error("generic automerge error")] + Fail, } impl From for AutomergeError { diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs new file mode 100644 index 00000000..d79b35ce --- /dev/null +++ b/automerge/src/exid.rs @@ -0,0 +1,33 @@ +use crate::ActorId; +use std::fmt; + +#[derive(Debug, Clone)] +pub enum ExId { + Root, + Id(u64, ActorId, usize), +} + +impl PartialEq for ExId { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (ExId::Root, ExId::Root) => true, + (ExId::Id(ctr1, actor1, _), ExId::Id(ctr2, actor2, _)) + if ctr1 == ctr2 && actor1 == actor2 => + { + true + } + _ => false, + } + } +} + +impl Eq for ExId {} + +impl fmt::Display for ExId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ExId::Root => write!(f, "_root"), + ExId::Id(ctr, actor, _) => write!(f, "{}@{}", ctr, actor), + } + } +} diff --git a/automerge/src/indexed_cache.rs b/automerge/src/indexed_cache.rs index 21ffd75b..b11f39ad 100644 --- a/automerge/src/indexed_cache.rs +++ b/automerge/src/indexed_cache.rs @@ -31,8 +31,8 @@ where } } - pub fn lookup(&self, item: T) -> Option { - self.lookup.get(&item).cloned() + pub fn lookup(&self, item: &T) -> Option { + self.lookup.get(item).cloned() } pub fn len(&self) -> usize { diff --git a/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs index 0968d290..91e07298 100644 --- a/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -2,8 +2,8 @@ mod serde_impls; mod utility_impls; use std::iter::FromIterator; +pub(crate) use crate::types::{ActorId, ChangeHash, ObjType, OpType, ScalarValue}; pub(crate) use crate::value::DataType; -pub(crate) use crate::{ActorId, ChangeHash, ObjType, OpType, ScalarValue}; use serde::{Deserialize, Serialize}; use smol_str::SmolStr; diff --git a/automerge/src/legacy/serde_impls/scalar_value.rs b/automerge/src/legacy/serde_impls/scalar_value.rs index c04d359a..7a08f697 100644 --- a/automerge/src/legacy/serde_impls/scalar_value.rs +++ b/automerge/src/legacy/serde_impls/scalar_value.rs @@ -1,7 +1,7 @@ use serde::{de, Deserialize, Deserializer}; use smol_str::SmolStr; -use crate::ScalarValue; +use crate::types::ScalarValue; impl<'de> Deserialize<'de> for ScalarValue { fn deserialize(deserializer: D) -> Result diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs index c2595c68..27de9c39 100644 --- a/automerge/src/lib.rs +++ b/automerge/src/lib.rs @@ -1,7 +1,3 @@ -extern crate hex; -extern crate uuid; -extern crate web_sys; - #[macro_export] macro_rules! log { ( $( $t:tt )* ) => { @@ -28,1303 +24,32 @@ macro_rules! __log { } } +mod automerge; mod change; mod clock; mod columnar; mod decoding; mod encoding; +mod error; +mod exid; mod indexed_cache; mod legacy; -mod sync; -#[cfg(feature = "optree-visualisation")] -mod visualisation; - -mod error; mod op_set; mod op_tree; mod query; +mod sync; mod types; mod value; +#[cfg(feature = "optree-visualisation")] +mod visualisation; -use change::{encode_document, export_change}; -use clock::Clock; -use indexed_cache::IndexedCache; -use op_set::OpSet; -use std::collections::{HashMap, HashSet, VecDeque}; -use types::{ElemId, Key, ObjId, Op, HEAD}; -use unicode_segmentation::UnicodeSegmentation; - +pub use crate::automerge::Automerge; pub use change::{decode_change, Change}; pub use error::AutomergeError; +pub use exid::ExId as ObjId; pub use legacy::Change as ExpandedChange; pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState}; -pub use types::{ - ActorId, ChangeHash, Export, Exportable, Importable, ObjType, OpId, OpType, Patch, Peer, Prop, - ROOT, -}; +pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; pub use value::{ScalarValue, Value}; -#[derive(Debug, Clone)] -pub struct Automerge { - queue: Vec, - history: Vec, - history_index: HashMap, - states: HashMap>, - deps: HashSet, - saved: Vec, - ops: OpSet, - actor: Option, - max_op: u64, - transaction: Option, -} - -impl Automerge { - pub fn new() -> Self { - Automerge { - queue: vec![], - history: vec![], - history_index: HashMap::new(), - states: HashMap::new(), - ops: Default::default(), - deps: Default::default(), - saved: Default::default(), - actor: None, - max_op: 0, - transaction: None, - } - } - - pub fn set_actor(&mut self, actor: ActorId) { - self.ensure_transaction_closed(); - self.actor = Some(self.ops.m.actors.cache(actor)) - } - - fn random_actor(&mut self) -> ActorId { - let actor = ActorId::from(uuid::Uuid::new_v4().as_bytes().to_vec()); - self.actor = Some(self.ops.m.actors.cache(actor.clone())); - actor - } - - pub fn get_actor(&mut self) -> ActorId { - if let Some(actor) = self.actor { - self.ops.m.actors[actor].clone() - } else { - self.random_actor() - } - } - - pub fn maybe_get_actor(&self) -> Option { - self.actor.map(|i| self.ops.m.actors[i].clone()) - } - - fn get_actor_index(&mut self) -> usize { - if let Some(actor) = self.actor { - actor - } else { - self.random_actor(); - self.actor.unwrap() // random_actor always sets actor to is_some() - } - } - - pub fn new_with_actor_id(actor: ActorId) -> Self { - let mut am = Automerge { - queue: vec![], - history: vec![], - history_index: HashMap::new(), - states: HashMap::new(), - ops: Default::default(), - deps: Default::default(), - saved: Default::default(), - actor: None, - max_op: 0, - transaction: None, - }; - am.actor = Some(am.ops.m.actors.cache(actor)); - am - } - - pub fn pending_ops(&self) -> u64 { - self.transaction - .as_ref() - .map(|t| t.operations.len() as u64) - .unwrap_or(0) - } - - fn tx(&mut self) -> &mut Transaction { - if self.transaction.is_none() { - let actor = self.get_actor_index(); - - let seq = self.states.entry(actor).or_default().len() as u64 + 1; - let mut deps = self.get_heads(); - if seq > 1 { - let last_hash = self.get_hash(actor, seq - 1).unwrap(); - if !deps.contains(&last_hash) { - deps.push(last_hash); - } - } - - self.transaction = Some(Transaction { - actor, - seq, - start_op: self.max_op + 1, - time: 0, - message: None, - extra_bytes: Default::default(), - hash: None, - operations: vec![], - deps, - }); - } - - self.transaction.as_mut().unwrap() - } - - pub fn commit(&mut self, message: Option, time: Option) -> Vec { - let tx = self.tx(); - - if message.is_some() { - tx.message = message; - } - - if let Some(t) = time { - tx.time = t; - } - - tx.operations.len(); - - self.ensure_transaction_closed(); - - self.get_heads() - } - - pub fn ensure_transaction_closed(&mut self) { - if let Some(tx) = self.transaction.take() { - self.update_history(export_change(&tx, &self.ops.m.actors, &self.ops.m.props)); - } - } - - pub fn rollback(&mut self) -> usize { - if let Some(tx) = self.transaction.take() { - let num = tx.operations.len(); - for op in &tx.operations { - for pred_id in &op.pred { - // FIXME - use query to make this fast - if let Some(p) = self.ops.iter().position(|o| o.id == *pred_id) { - self.ops - .replace(op.obj, p, |o| o.succ.retain(|i| i != pred_id)); - } - } - if let Some(pos) = self.ops.iter().position(|o| o.id == op.id) { - self.ops.remove(op.obj, pos); - } - } - num - } else { - 0 - } - } - - fn next_id(&mut self) -> OpId { - let tx = self.tx(); - OpId(tx.start_op + tx.operations.len() as u64, tx.actor) - } - - fn insert_local_op(&mut self, op: Op, pos: usize, succ_pos: &[usize]) { - for succ in succ_pos { - self.ops.replace(op.obj, *succ, |old_op| { - old_op.succ.push(op.id); - }); - } - - if !op.is_del() { - self.ops.insert(pos, op.clone()); - } - - self.tx().operations.push(op); - } - - fn insert_op(&mut self, op: Op) -> Op { - let q = self.ops.search(op.obj, query::SeekOp::new(&op)); - - for i in q.succ { - self.ops - .replace(op.obj, i, |old_op| old_op.succ.push(op.id)); - } - - if !op.is_del() { - self.ops.insert(q.pos, op.clone()); - } - op - } - - // KeysAt::() - // LenAt::() - // PropAt::() - // NthAt::() - - pub fn keys(&self, obj: OpId) -> Vec { - let q = self.ops.search(obj.into(), query::Keys::new()); - q.keys.iter().map(|k| self.export(*k)).collect() - } - - pub fn keys_at(&self, obj: OpId, heads: &[ChangeHash]) -> Vec { - let clock = self.clock_at(heads); - let q = self.ops.search(obj.into(), query::KeysAt::new(clock)); - q.keys.iter().map(|k| self.export(*k)).collect() - } - - pub fn length(&self, obj: OpId) -> usize { - self.ops.search(obj.into(), query::Len::new(obj.into())).len - } - - pub fn length_at(&self, obj: OpId, heads: &[ChangeHash]) -> usize { - let clock = self.clock_at(heads); - self.ops.search(obj.into(), query::LenAt::new(clock)).len - } - - // set(obj, prop, value) - value can be scalar or objtype - // del(obj, prop) - // inc(obj, prop, value) - // insert(obj, index, value) - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub fn set, V: Into>( - &mut self, - obj: OpId, - prop: P, - value: V, - ) -> Result, AutomergeError> { - let value = value.into(); - self.local_op(obj.into(), prop.into(), value.into()) - } - - pub fn insert>( - &mut self, - obj: OpId, - index: usize, - value: V, - ) -> Result { - let obj = obj.into(); - let id = self.next_id(); - - let query = self.ops.search(obj, query::InsertNth::new(index)); - - let key = query.key()?; - let value = value.into(); - - let op = Op { - change: self.history.len(), - id, - action: value.into(), - obj, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - - self.ops.insert(query.pos, op.clone()); - self.tx().operations.push(op); - - Ok(id) - } - - pub fn inc>( - &mut self, - obj: OpId, - prop: P, - value: i64, - ) -> Result { - match self.local_op(obj.into(), prop.into(), OpType::Inc(value))? { - Some(opid) => Ok(opid), - None => { - panic!("increment should always create a new op") - } - } - } - - pub fn del>(&mut self, obj: OpId, prop: P) -> Result { - // TODO: Should we also no-op multiple delete operations? - match self.local_op(obj.into(), prop.into(), OpType::Del)? { - Some(opid) => Ok(opid), - None => { - panic!("delete should always create a new op") - } - } - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - pub fn splice( - &mut self, - obj: OpId, - mut pos: usize, - del: usize, - vals: Vec, - ) -> Result, AutomergeError> { - for _ in 0..del { - self.del(obj, pos)?; - } - let mut result = Vec::with_capacity(vals.len()); - for v in vals { - result.push(self.insert(obj, pos, v)?); - pos += 1; - } - Ok(result) - } - - pub fn splice_text( - &mut self, - obj: OpId, - pos: usize, - del: usize, - text: &str, - ) -> Result, AutomergeError> { - let mut vals = vec![]; - for c in text.to_owned().graphemes(true) { - vals.push(c.into()); - } - self.splice(obj, pos, del, vals) - } - - pub fn text(&self, obj: OpId) -> Result { - let obj = obj.into(); - let query = self.ops.search(obj, query::ListVals::new(obj)); - let mut buffer = String::new(); - for q in &query.ops { - if let OpType::Set(ScalarValue::Str(s)) = &q.action { - buffer.push_str(s); - } - } - Ok(buffer) - } - - pub fn text_at(&self, obj: OpId, heads: &[ChangeHash]) -> Result { - let clock = self.clock_at(heads); - let obj = obj.into(); - let query = self.ops.search(obj, query::ListValsAt::new(clock)); - let mut buffer = String::new(); - for q in &query.ops { - if let OpType::Set(ScalarValue::Str(s)) = &q.action { - buffer.push_str(s); - } - } - Ok(buffer) - } - - // TODO - I need to return these OpId's here **only** to get - // the legacy conflicts format of { [opid]: value } - // Something better? - pub fn value>( - &self, - obj: OpId, - prop: P, - ) -> Result, AutomergeError> { - Ok(self.values(obj, prop.into())?.first().cloned()) - } - - pub fn value_at>( - &self, - obj: OpId, - prop: P, - heads: &[ChangeHash], - ) -> Result, AutomergeError> { - Ok(self.values_at(obj, prop, heads)?.first().cloned()) - } - - pub fn values>( - &self, - obj: OpId, - prop: P, - ) -> Result, AutomergeError> { - let obj = obj.into(); - let result = match prop.into() { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(p); - if let Some(p) = prop { - self.ops - .search(obj, query::Prop::new(obj, p)) - .ops - .into_iter() - .map(|o| o.into()) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => self - .ops - .search(obj, query::Nth::new(n)) - .ops - .into_iter() - .map(|o| o.into()) - .collect(), - }; - Ok(result) - } - - pub fn values_at>( - &self, - obj: OpId, - prop: P, - heads: &[ChangeHash], - ) -> Result, AutomergeError> { - let prop = prop.into(); - let obj = obj.into(); - let clock = self.clock_at(heads); - let result = match prop { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(p); - if let Some(p) = prop { - self.ops - .search(obj, query::PropAt::new(p, clock)) - .ops - .into_iter() - .map(|o| o.into()) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => self - .ops - .search(obj, query::NthAt::new(n, clock)) - .ops - .into_iter() - .map(|o| o.into()) - .collect(), - }; - Ok(result) - } - - pub fn load(data: &[u8]) -> Result { - let changes = Change::load_document(data)?; - let mut doc = Self::new(); - doc.apply_changes(&changes)?; - Ok(doc) - } - - pub fn load_incremental(&mut self, data: &[u8]) -> Result { - let changes = Change::load_document(data)?; - let start = self.ops.len(); - self.apply_changes(&changes)?; - let delta = self.ops.len() - start; - Ok(delta) - } - - pub fn apply_changes(&mut self, changes: &[Change]) -> Result { - self.ensure_transaction_closed(); - for c in changes { - if !self.history_index.contains_key(&c.hash) { - if self.is_causally_ready(c) { - self.apply_change(c.clone()); - } else { - self.queue.push(c.clone()); - while let Some(c) = self.pop_next_causally_ready_change() { - self.apply_change(c); - } - } - } - } - Ok(Patch {}) - } - - pub fn apply_change(&mut self, change: Change) { - self.ensure_transaction_closed(); - let ops = self.import_ops(&change, self.history.len()); - self.update_history(change); - for op in ops { - self.insert_op(op); - } - } - - fn local_op( - &mut self, - obj: ObjId, - prop: Prop, - action: OpType, - ) -> Result, AutomergeError> { - match prop { - Prop::Map(s) => self.local_map_op(obj, s, action), - Prop::Seq(n) => self.local_list_op(obj, n, action), - } - } - - fn local_map_op( - &mut self, - obj: ObjId, - prop: String, - action: OpType, - ) -> Result, AutomergeError> { - if prop.is_empty() { - return Err(AutomergeError::EmptyStringKey); - } - - let id = self.next_id(); - let prop = self.ops.m.props.cache(prop); - let query = self.ops.search(obj, query::Prop::new(obj, prop)); - - match (&query.ops[..], &action) { - // If there are no conflicts for this value and the old operation and the new operation are - // both setting the same value then we do nothing. - ( - &[Op { - action: OpType::Set(ref old_v), - .. - }], - OpType::Set(new_v), - ) if old_v == new_v => { - return Ok(None); - } - _ => {} - } - - let pred = query.ops.iter().map(|op| op.id).collect(); - - let op = Op { - change: self.history.len(), - id, - action, - obj, - key: Key::Map(prop), - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(op, query.pos, &query.ops_pos); - - Ok(Some(id)) - } - - fn local_list_op( - &mut self, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result, AutomergeError> { - let query = self.ops.search(obj, query::Nth::new(index)); - - let id = self.next_id(); - let pred = query.ops.iter().map(|op| op.id).collect(); - let key = query.key()?; - - match (&query.ops[..], &action) { - // If there are no conflicts for this value and the old operation and the new operation are - // both setting the same value then we do nothing. - ( - &[Op { - action: OpType::Set(ref old_v), - .. - }], - OpType::Set(new_v), - ) if old_v == new_v => { - return Ok(None); - } - _ => {} - } - - let op = Op { - change: self.history.len(), - id, - action, - obj, - key, - succ: Default::default(), - pred, - insert: false, - }; - - self.insert_local_op(op, query.pos, &query.ops_pos); - - Ok(Some(id)) - } - - fn is_causally_ready(&self, change: &Change) -> bool { - change - .deps - .iter() - .all(|d| self.history_index.contains_key(d)) - } - - fn pop_next_causally_ready_change(&mut self) -> Option { - let mut index = 0; - while index < self.queue.len() { - if self.is_causally_ready(&self.queue[index]) { - return Some(self.queue.swap_remove(index)); - } - index += 1; - } - None - } - - fn import_ops(&mut self, change: &Change, change_id: usize) -> Vec { - change - .iter_ops() - .enumerate() - .map(|(i, c)| { - let actor = self.ops.m.actors.cache(change.actor_id().clone()); - let id = OpId(change.start_op + i as u64, actor); - // FIXME dont need to_string() - let obj: ObjId = self.import(&c.obj.to_string()).unwrap(); - let pred = c - .pred - .iter() - .map(|i| self.import(&i.to_string()).unwrap()) - .collect(); - let key = match &c.key { - legacy::Key::Map(n) => Key::Map(self.ops.m.props.cache(n.to_string())), - legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(HEAD), - // FIXME dont need to_string() - legacy::Key::Seq(legacy::ElementId::Id(i)) => { - Key::Seq(self.import(&i.to_string()).unwrap()) - } - }; - Op { - change: change_id, - id, - action: c.action, - obj, - key, - succ: Default::default(), - pred, - insert: c.insert, - } - }) - .collect() - } - - /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge(&mut self, other: &mut Self) { - // TODO: Make this fallible and figure out how to do this transactionally - other.ensure_transaction_closed(); - let changes = self - .get_changes_added(other) - .into_iter() - .cloned() - .collect::>(); - self.apply_changes(&changes).unwrap(); - } - - pub fn save(&mut self) -> Result, AutomergeError> { - self.ensure_transaction_closed(); - // TODO - would be nice if I could pass an iterator instead of a collection here - let c: Vec<_> = self.history.iter().map(|c| c.decode()).collect(); - let ops: Vec<_> = self.ops.iter().cloned().collect(); - // TODO - can we make encode_document error free - let bytes = encode_document( - &c, - ops.as_slice(), - &self.ops.m.actors, - &self.ops.m.props.cache, - ); - if bytes.is_ok() { - self.saved = self.get_heads().iter().copied().collect(); - } - bytes - } - - // should this return an empty vec instead of None? - pub fn save_incremental(&mut self) -> Vec { - self.ensure_transaction_closed(); - let changes = self._get_changes(self.saved.as_slice()); - let mut bytes = vec![]; - for c in changes { - bytes.extend(c.raw_bytes()); - } - if !bytes.is_empty() { - self.saved = self._get_heads().iter().copied().collect(); - } - bytes - } - - /// Filter the changes down to those that are not transitive dependencies of the heads. - /// - /// Thus a graph with these heads has not seen the remaining changes. - pub(crate) fn filter_changes(&self, heads: &[ChangeHash], changes: &mut HashSet) { - // Reduce the working set to find to those which we may be able to find. - // This filters out those hashes that are successors of or concurrent with all of the - // heads. - // This can help in avoiding traversing the entire graph back to the roots when we try to - // search for a hash we can know won't be found there. - let max_head_index = heads - .iter() - .map(|h| self.history_index.get(h).unwrap_or(&0)) - .max() - .unwrap_or(&0); - let mut may_find: HashSet = changes - .iter() - .filter(|hash| { - let change_index = self.history_index.get(hash).unwrap_or(&0); - change_index <= max_head_index - }) - .copied() - .collect(); - - if may_find.is_empty() { - return; - } - - let mut queue: VecDeque<_> = heads.iter().collect(); - let mut seen = HashSet::new(); - while let Some(hash) = queue.pop_front() { - if seen.contains(hash) { - continue; - } - seen.insert(hash); - - let removed = may_find.remove(hash); - changes.remove(hash); - if may_find.is_empty() { - break; - } - - for dep in self - .history_index - .get(hash) - .and_then(|i| self.history.get(*i)) - .map(|c| c.deps.as_slice()) - .unwrap_or_default() - { - // if we just removed something from our hashes then it is likely there is more - // down here so do a quick inspection on the children. - // When we don't remove anything it is less likely that there is something down - // that chain so delay it. - if removed { - queue.push_front(dep); - } else { - queue.push_back(dep); - } - } - } - } - - pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec { - self.ensure_transaction_closed(); - self._get_missing_deps(heads) - } - - fn _get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { - let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect(); - let mut missing = HashSet::new(); - - for head in self.queue.iter().flat_map(|change| &change.deps) { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - for head in heads { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - let mut missing = missing - .into_iter() - .filter(|hash| !in_queue.contains(hash)) - .copied() - .collect::>(); - missing.sort(); - missing - } - - fn get_changes_fast(&self, have_deps: &[ChangeHash]) -> Option> { - if have_deps.is_empty() { - return Some(self.history.iter().collect()); - } - - let lowest_idx = have_deps - .iter() - .filter_map(|h| self.history_index.get(h)) - .min()? - + 1; - - let mut missing_changes = vec![]; - let mut has_seen: HashSet<_> = have_deps.iter().collect(); - for change in &self.history[lowest_idx..] { - let deps_seen = change.deps.iter().filter(|h| has_seen.contains(h)).count(); - if deps_seen > 0 { - if deps_seen != change.deps.len() { - // future change depends on something we haven't seen - fast path cant work - return None; - } - missing_changes.push(change); - has_seen.insert(&change.hash); - } - } - - // if we get to the end and there is a head we haven't seen then fast path cant work - if self._get_heads().iter().all(|h| has_seen.contains(h)) { - Some(missing_changes) - } else { - None - } - } - - fn get_changes_slow(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { - let mut stack: Vec<_> = have_deps.iter().collect(); - let mut has_seen = HashSet::new(); - while let Some(hash) = stack.pop() { - if has_seen.contains(&hash) { - continue; - } - if let Some(change) = self - .history_index - .get(hash) - .and_then(|i| self.history.get(*i)) - { - stack.extend(change.deps.iter()); - } - has_seen.insert(hash); - } - self.history - .iter() - .filter(|change| !has_seen.contains(&change.hash)) - .collect() - } - - pub fn get_last_local_change(&mut self) -> Option<&Change> { - self.ensure_transaction_closed(); - if let Some(actor) = &self.actor { - let actor = &self.ops.m.actors[*actor]; - return self.history.iter().rev().find(|c| c.actor_id() == actor); - } - None - } - - pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> { - self.ensure_transaction_closed(); - self._get_changes(have_deps) - } - - fn _get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { - if let Some(changes) = self.get_changes_fast(have_deps) { - changes - } else { - self.get_changes_slow(have_deps) - } - } - - fn clock_at(&self, heads: &[ChangeHash]) -> Clock { - let mut clock = Clock::new(); - let mut seen = HashSet::new(); - let mut to_see = heads.to_vec(); - // FIXME - faster - while let Some(hash) = to_see.pop() { - if let Some(c) = self._get_change_by_hash(&hash) { - for h in &c.deps { - if !seen.contains(h) { - to_see.push(*h); - } - } - let actor = self.ops.m.actors.lookup(c.actor_id().clone()).unwrap(); - clock.include(actor, c.max_op()); - seen.insert(hash); - } - } - clock - } - - pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> { - self.ensure_transaction_closed(); - self._get_change_by_hash(hash) - } - - fn _get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { - self.history_index - .get(hash) - .and_then(|index| self.history.get(*index)) - } - - pub fn get_changes_added<'a>(&mut self, other: &'a Self) -> Vec<&'a Change> { - self.ensure_transaction_closed(); - self._get_changes_added(other) - } - - fn _get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { - // Depth-first traversal from the heads through the dependency graph, - // until we reach a change that is already present in other - let mut stack: Vec<_> = other._get_heads(); - let mut seen_hashes = HashSet::new(); - let mut added_change_hashes = Vec::new(); - while let Some(hash) = stack.pop() { - if !seen_hashes.contains(&hash) && self._get_change_by_hash(&hash).is_none() { - seen_hashes.insert(hash); - added_change_hashes.push(hash); - if let Some(change) = other._get_change_by_hash(&hash) { - stack.extend(&change.deps); - } - } - } - // Return those changes in the reverse of the order in which the depth-first search - // found them. This is not necessarily a topological sort, but should usually be close. - added_change_hashes.reverse(); - added_change_hashes - .into_iter() - .filter_map(|h| other._get_change_by_hash(&h)) - .collect() - } - - pub fn get_heads(&mut self) -> Vec { - self.ensure_transaction_closed(); - self._get_heads() - } - - fn _get_heads(&self) -> Vec { - let mut deps: Vec<_> = self.deps.iter().copied().collect(); - deps.sort_unstable(); - deps - } - - fn get_hash(&mut self, actor: usize, seq: u64) -> Result { - self.states - .get(&actor) - .and_then(|v| v.get(seq as usize - 1)) - .and_then(|&i| self.history.get(i)) - .map(|c| c.hash) - .ok_or(AutomergeError::InvalidSeq(seq)) - } - - fn update_history(&mut self, change: Change) -> usize { - self.max_op = std::cmp::max(self.max_op, change.start_op + change.len() as u64 - 1); - - self.update_deps(&change); - - let history_index = self.history.len(); - - self.states - .entry(self.ops.m.actors.cache(change.actor_id().clone())) - .or_default() - .push(history_index); - - self.history_index.insert(change.hash, history_index); - self.history.push(change); - - history_index - } - - fn update_deps(&mut self, change: &Change) { - for d in &change.deps { - self.deps.remove(d); - } - self.deps.insert(change.hash); - } - - pub fn import(&self, s: &str) -> Result { - if let Some(x) = I::from(s) { - Ok(x) - } else { - let n = s - .find('@') - .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; - let counter = s[0..n] - .parse() - .map_err(|_| AutomergeError::InvalidOpId(s.to_owned()))?; - let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap()); - let actor = self - .ops - .m - .actors - .lookup(actor) - .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; - Ok(I::wrap(OpId(counter, actor))) - } - } - - pub fn export(&self, id: E) -> String { - match id.export() { - Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.actors[id.actor()]), - Export::Prop(index) => self.ops.m.props[index].clone(), - Export::Special(s) => s, - } - } - - pub fn dump(&self) { - log!( - " {:12} {:12} {:12} {} {} {}", - "id", - "obj", - "key", - "value", - "pred", - "succ" - ); - for i in self.ops.iter() { - let id = self.export(i.id); - let obj = self.export(i.obj); - let key = match i.key { - Key::Map(n) => self.ops.m.props[n].clone(), - Key::Seq(n) => self.export(n), - }; - let value: String = match &i.action { - OpType::Set(value) => format!("{}", value), - OpType::Make(obj) => format!("make{}", obj), - OpType::Inc(obj) => format!("inc{}", obj), - OpType::Del => format!("del{}", 0), - }; - let pred: Vec<_> = i.pred.iter().map(|id| self.export(*id)).collect(); - let succ: Vec<_> = i.succ.iter().map(|id| self.export(*id)).collect(); - log!( - " {:12} {:12} {:12} {} {:?} {:?}", - id, - obj, - key, - value, - pred, - succ - ); - } - } - - #[cfg(feature = "optree-visualisation")] - pub fn visualise_optree(&self) -> String { - self.ops.visualise() - } -} - -#[derive(Debug, Clone)] -pub(crate) struct Transaction { - pub actor: usize, - pub seq: u64, - pub start_op: u64, - pub time: i64, - pub message: Option, - pub extra_bytes: Vec, - pub hash: Option, - pub deps: Vec, - pub operations: Vec, -} - -impl Default for Automerge { - fn default() -> Self { - Self::new() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::convert::TryInto; - - #[test] - fn insert_op() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - doc.set(ROOT, "hello", "world")?; - assert!(doc.pending_ops() == 1); - doc.value(ROOT, "hello")?; - Ok(()) - } - - #[test] - fn test_list() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - let list_id = doc.set(ROOT, "items", Value::list())?.unwrap(); - doc.set(ROOT, "zzz", "zzzval")?; - assert!(doc.value(ROOT, "items")?.unwrap().1 == list_id); - doc.insert(list_id, 0, "a")?; - doc.insert(list_id, 0, "b")?; - doc.insert(list_id, 2, "c")?; - doc.insert(list_id, 1, "d")?; - assert!(doc.value(list_id, 0)?.unwrap().0 == "b".into()); - assert!(doc.value(list_id, 1)?.unwrap().0 == "d".into()); - assert!(doc.value(list_id, 2)?.unwrap().0 == "a".into()); - assert!(doc.value(list_id, 3)?.unwrap().0 == "c".into()); - assert!(doc.length(list_id) == 4); - doc.save()?; - Ok(()) - } - - #[test] - fn test_del() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - doc.set(ROOT, "xxx", "xxx")?; - assert!(!doc.values(ROOT, "xxx")?.is_empty()); - doc.del(ROOT, "xxx")?; - assert!(doc.values(ROOT, "xxx")?.is_empty()); - Ok(()) - } - - #[test] - fn test_inc() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let id = doc.set(ROOT, "counter", Value::counter(10))?.unwrap(); - assert!(doc.value(ROOT, "counter")? == Some((Value::counter(10), id))); - doc.inc(ROOT, "counter", 10)?; - assert!(doc.value(ROOT, "counter")? == Some((Value::counter(20), id))); - doc.inc(ROOT, "counter", -5)?; - assert!(doc.value(ROOT, "counter")? == Some((Value::counter(15), id))); - Ok(()) - } - - #[test] - fn test_save_incremental() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - - doc.set(ROOT, "foo", 1)?; - - let save1 = doc.save().unwrap(); - - doc.set(ROOT, "bar", 2)?; - - let save2 = doc.save_incremental(); - - doc.set(ROOT, "baz", 3)?; - - let save3 = doc.save_incremental(); - - let mut save_a: Vec = vec![]; - save_a.extend(&save1); - save_a.extend(&save2); - save_a.extend(&save3); - - assert!(doc.save_incremental().is_empty()); - - let save_b = doc.save().unwrap(); - - assert!(save_b.len() < save_a.len()); - - let mut doc_a = Automerge::load(&save_a)?; - let mut doc_b = Automerge::load(&save_b)?; - - assert!(doc_a.values(ROOT, "baz")? == doc_b.values(ROOT, "baz")?); - - assert!(doc_a.save().unwrap() == doc_b.save().unwrap()); - - Ok(()) - } - - #[test] - fn test_save_text() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let text = doc.set(ROOT, "text", Value::text())?.unwrap(); - let heads1 = doc.commit(None, None); - doc.splice_text(text, 0, 0, "hello world")?; - let heads2 = doc.commit(None, None); - doc.splice_text(text, 6, 0, "big bad ")?; - let heads3 = doc.commit(None, None); - - assert!(&doc.text(text)? == "hello big bad world"); - assert!(&doc.text_at(text, &heads1)?.is_empty()); - assert!(&doc.text_at(text, &heads2)? == "hello world"); - assert!(&doc.text_at(text, &heads3)? == "hello big bad world"); - - Ok(()) - } - - #[test] - fn test_props_vals_at() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor("aaaa".try_into().unwrap()); - doc.set(ROOT, "prop1", "val1")?; - doc.commit(None, None); - let heads1 = doc.get_heads(); - doc.set(ROOT, "prop1", "val2")?; - doc.commit(None, None); - let heads2 = doc.get_heads(); - doc.set(ROOT, "prop2", "val3")?; - doc.commit(None, None); - let heads3 = doc.get_heads(); - doc.del(ROOT, "prop1")?; - doc.commit(None, None); - let heads4 = doc.get_heads(); - doc.set(ROOT, "prop3", "val4")?; - doc.commit(None, None); - let heads5 = doc.get_heads(); - assert!(doc.keys_at(ROOT, &heads1) == vec!["prop1".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); - assert!(doc.value_at(ROOT, "prop2", &heads1)? == None); - assert!(doc.value_at(ROOT, "prop3", &heads1)? == None); - - assert!(doc.keys_at(ROOT, &heads2) == vec!["prop1".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(ROOT, "prop2", &heads2)? == None); - assert!(doc.value_at(ROOT, "prop3", &heads2)? == None); - - assert!(doc.keys_at(ROOT, &heads3) == vec!["prop1".to_owned(), "prop2".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); - assert!(doc.value_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads3)? == None); - - assert!(doc.keys_at(ROOT, &heads4) == vec!["prop2".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads4)? == None); - assert!(doc.value_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads4)? == None); - - assert!(doc.keys_at(ROOT, &heads5) == vec!["prop2".to_owned(), "prop3".to_owned()]); - assert!(doc.value_at(ROOT, "prop1", &heads5)? == None); - assert!(doc.value_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); - assert!(doc.value_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); - - assert!(doc.keys_at(ROOT, &[]).is_empty()); - assert!(doc.value_at(ROOT, "prop1", &[])? == None); - assert!(doc.value_at(ROOT, "prop2", &[])? == None); - assert!(doc.value_at(ROOT, "prop3", &[])? == None); - Ok(()) - } - - #[test] - fn test_len_at() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor("aaaa".try_into().unwrap()); - - let list = doc.set(ROOT, "list", Value::list())?.unwrap(); - let heads1 = doc.commit(None, None); - - doc.insert(list, 0, Value::int(10))?; - let heads2 = doc.commit(None, None); - - doc.set(list, 0, Value::int(20))?; - doc.insert(list, 0, Value::int(30))?; - let heads3 = doc.commit(None, None); - - doc.set(list, 1, Value::int(40))?; - doc.insert(list, 1, Value::int(50))?; - let heads4 = doc.commit(None, None); - - doc.del(list, 2)?; - let heads5 = doc.commit(None, None); - - doc.del(list, 0)?; - let heads6 = doc.commit(None, None); - - assert!(doc.length_at(list, &heads1) == 0); - assert!(doc.value_at(list, 0, &heads1)?.is_none()); - - assert!(doc.length_at(list, &heads2) == 1); - assert!(doc.value_at(list, 0, &heads2)?.unwrap().0 == Value::int(10)); - - assert!(doc.length_at(list, &heads3) == 2); - assert!(doc.value_at(list, 0, &heads3)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(list, 1, &heads3)?.unwrap().0 == Value::int(20)); - - assert!(doc.length_at(list, &heads4) == 3); - assert!(doc.value_at(list, 0, &heads4)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(list, 1, &heads4)?.unwrap().0 == Value::int(50)); - assert!(doc.value_at(list, 2, &heads4)?.unwrap().0 == Value::int(40)); - - assert!(doc.length_at(list, &heads5) == 2); - assert!(doc.value_at(list, 0, &heads5)?.unwrap().0 == Value::int(30)); - assert!(doc.value_at(list, 1, &heads5)?.unwrap().0 == Value::int(50)); - - assert!(doc.length_at(list, &heads6) == 1); - assert!(doc.value_at(list, 0, &heads6)?.unwrap().0 == Value::int(50)); - - Ok(()) - } -} +pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs index 537cb80f..79fef3e4 100644 --- a/automerge/src/op_set.rs +++ b/automerge/src/op_set.rs @@ -1,6 +1,7 @@ +use crate::indexed_cache::IndexedCache; use crate::op_tree::OpTreeInternal; use crate::query::TreeQuery; -use crate::{ActorId, IndexedCache, Key, ObjId, Op, OpId}; +use crate::types::{ActorId, Key, ObjId, Op, OpId}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::HashMap; diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs index 6142a7bf..c91c150e 100644 --- a/automerge/src/op_tree.rs +++ b/automerge/src/op_tree.rs @@ -6,7 +6,7 @@ use std::{ pub(crate) use crate::op_set::OpSetMetadata; use crate::query::{Index, QueryResult, TreeQuery}; -use crate::{Op, OpId}; +use crate::types::{Op, OpId}; use std::collections::HashSet; #[allow(dead_code)] @@ -628,7 +628,7 @@ struct CounterData { #[cfg(test)] mod tests { use crate::legacy as amp; - use crate::{Op, OpId}; + use crate::types::{Op, OpId}; use super::*; diff --git a/automerge/src/query.rs b/automerge/src/query.rs index 15ac6fd6..c062c964 100644 --- a/automerge/src/query.rs +++ b/automerge/src/query.rs @@ -1,5 +1,5 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::{Clock, ElemId, Op, OpId, OpType, ScalarValue}; +use crate::types::{Clock, ElemId, Op, OpId, OpType, ScalarValue}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; diff --git a/automerge/src/query/insert.rs b/automerge/src/query/insert.rs index 745af80e..b91f9970 100644 --- a/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -1,6 +1,7 @@ +use crate::error::AutomergeError; use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{AutomergeError, ElemId, Key, Op, HEAD}; +use crate::types::{ElemId, Key, Op, HEAD}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs index 12cfaaa6..e6f6486f 100644 --- a/automerge/src/query/keys.rs +++ b/automerge/src/query/keys.rs @@ -1,6 +1,6 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::Key; +use crate::types::Key; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs index cd66b29e..81c8ba86 100644 --- a/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, Key, Op}; +use crate::types::{Clock, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs index 494b3515..f92b8096 100644 --- a/automerge/src/query/len.rs +++ b/automerge/src/query/len.rs @@ -1,17 +1,15 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::ObjId; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Len { - obj: ObjId, pub len: usize, } impl Len { - pub fn new(obj: ObjId) -> Self { - Len { obj, len: 0 } + pub fn new() -> Self { + Len { len: 0 } } } diff --git a/automerge/src/query/len_at.rs b/automerge/src/query/len_at.rs index acf4af84..03187db1 100644 --- a/automerge/src/query/len_at.rs +++ b/automerge/src/query/len_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/list_vals.rs b/automerge/src/query/list_vals.rs index c19ac4ad..0d8958fd 100644 --- a/automerge/src/query/list_vals.rs +++ b/automerge/src/query/list_vals.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; -use crate::{ElemId, ObjId, Op}; +use crate::types::{ElemId, ObjId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/list_vals_at.rs b/automerge/src/query/list_vals_at.rs index 3ae19d01..5d720bf6 100644 --- a/automerge/src/query/list_vals_at.rs +++ b/automerge/src/query/list_vals_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/nth.rs b/automerge/src/query/nth.rs index e76bc385..6000b71a 100644 --- a/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -1,6 +1,7 @@ +use crate::error::AutomergeError; use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{AutomergeError, ElemId, Key, Op}; +use crate::types::{ElemId, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs index cecf82ac..7a867cad 100644 --- a/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs index ac4b2bca..11d2b0cd 100644 --- a/automerge/src/query/prop.rs +++ b/automerge/src/query/prop.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, is_visible, visible_op, QueryResult, TreeQuery}; -use crate::{Key, ObjId, Op}; +use crate::types::{Key, ObjId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/prop_at.rs b/automerge/src/query/prop_at.rs index 3fcb2c19..a5c02e34 100644 --- a/automerge/src/query/prop_at.rs +++ b/automerge/src/query/prop_at.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery, VisWindow}; -use crate::{Clock, Key, Op}; +use crate::types::{Clock, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs index 5a6b3e24..c30a15f5 100644 --- a/automerge/src/query/seek_op.rs +++ b/automerge/src/query/seek_op.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::{Key, Op, HEAD}; +use crate::types::{Key, Op, HEAD}; use std::cmp::Ordering; use std::fmt::Debug; diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs index 62ee9935..3d58da70 100644 --- a/automerge/src/sync.rs +++ b/automerge/src/sync.rs @@ -6,9 +6,10 @@ use std::{ io::Write, }; +use crate::types::Patch; use crate::{ decoding, decoding::Decoder, encoding, encoding::Encodable, Automerge, AutomergeError, Change, - ChangeHash, Patch, + ChangeHash, }; mod bloom; diff --git a/automerge/src/types.rs b/automerge/src/types.rs index f00beed3..c8856fe4 100644 --- a/automerge/src/types.rs +++ b/automerge/src/types.rs @@ -1,6 +1,5 @@ use crate::error; use crate::legacy as amp; -use crate::ScalarValue; use serde::{Deserialize, Serialize}; use std::cmp::Eq; use std::convert::TryFrom; @@ -9,8 +8,11 @@ use std::fmt; use std::str::FromStr; use tinyvec::{ArrayVec, TinyVec}; +pub(crate) use crate::clock::Clock; +pub(crate) use crate::value::{ScalarValue, Value}; + pub(crate) const HEAD: ElemId = ElemId(OpId(0, 0)); -pub const ROOT: OpId = OpId(0, 0); +pub(crate) const ROOT: OpId = OpId(0, 0); const ROOT_STR: &str = "_root"; const HEAD_STR: &str = "_head"; @@ -161,23 +163,16 @@ pub enum OpType { } #[derive(Debug)] -pub enum Export { +pub(crate) enum Export { Id(OpId), Special(String), Prop(usize), } -pub trait Exportable { +pub(crate) trait Exportable { fn export(&self) -> Export; } -pub trait Importable { - fn wrap(id: OpId) -> Self; - fn from(s: &str) -> Option - where - Self: std::marker::Sized; -} - impl OpId { #[inline] pub fn counter(&self) -> u64 { @@ -234,45 +229,6 @@ impl Exportable for Key { } } -impl Importable for ObjId { - fn wrap(id: OpId) -> Self { - ObjId(id) - } - fn from(s: &str) -> Option { - if s == ROOT_STR { - Some(ROOT.into()) - } else { - None - } - } -} - -impl Importable for OpId { - fn wrap(id: OpId) -> Self { - id - } - fn from(s: &str) -> Option { - if s == ROOT_STR { - Some(ROOT) - } else { - None - } - } -} - -impl Importable for ElemId { - fn wrap(id: OpId) -> Self { - ElemId(id) - } - fn from(s: &str) -> Option { - if s == HEAD_STR { - Some(HEAD) - } else { - None - } - } -} - impl From for ObjId { fn from(o: OpId) -> Self { ObjId(o) @@ -352,11 +308,17 @@ impl Key { } #[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] -pub struct OpId(pub u64, pub usize); +pub(crate) struct OpId(pub u64, pub usize); #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] pub(crate) struct ObjId(pub OpId); +impl ObjId { + pub fn root() -> Self { + ObjId(OpId(0, 0)) + } +} + #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] pub(crate) struct ElemId(pub OpId); @@ -374,7 +336,11 @@ pub(crate) struct Op { impl Op { pub fn is_del(&self) -> bool { - matches!(self.action, OpType::Del) + matches!(&self.action, OpType::Del) + } + + pub fn is_noop(&self, action: &OpType) -> bool { + matches!((&self.action, action), (OpType::Set(n), OpType::Set(m)) if n == m) } pub fn overwrites(&self, other: &Op) -> bool { @@ -389,6 +355,14 @@ impl Op { } } + pub fn value(&self) -> Value { + match &self.action { + OpType::Make(obj_type) => Value::Object(*obj_type), + OpType::Set(scalar) => Value::Scalar(scalar.clone()), + _ => panic!("cant convert op into a value - {:?}", self), + } + } + #[allow(dead_code)] pub fn dump(&self) -> String { match &self.action { diff --git a/automerge/src/value.rs b/automerge/src/value.rs index 333c1f53..e5af0cb6 100644 --- a/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -1,4 +1,5 @@ -use crate::{error, ObjType, Op, OpId, OpType}; +use crate::error; +use crate::types::{ObjType, Op, OpId, OpType}; use serde::{Deserialize, Serialize}; use smol_str::SmolStr; use std::convert::TryFrom; diff --git a/automerge/src/visualisation.rs b/automerge/src/visualisation.rs index 11233d50..81f52470 100644 --- a/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -24,7 +24,7 @@ pub(crate) struct Node<'a, const B: usize> { #[derive(Clone)] pub(crate) enum NodeType<'a, const B: usize> { - ObjRoot(crate::ObjId), + ObjRoot(crate::types::ObjId), ObjTreeNode(&'a crate::op_tree::OpTreeNode), } @@ -225,7 +225,7 @@ impl OpTableRow { impl OpTableRow { fn create( - op: &super::Op, + op: &super::types::Op, metadata: &crate::op_set::OpSetMetadata, actor_shorthands: &HashMap, ) -> Self { @@ -236,8 +236,8 @@ impl OpTableRow { crate::OpType::Inc(v) => format!("inc {}", v), }; let prop = match op.key { - crate::Key::Map(k) => metadata.props[k].clone(), - crate::Key::Seq(e) => print_opid(&e.0, actor_shorthands), + crate::types::Key::Map(k) => metadata.props[k].clone(), + crate::types::Key::Seq(e) => print_opid(&e.0, actor_shorthands), }; let succ = op .succ @@ -254,6 +254,6 @@ impl OpTableRow { } } -fn print_opid(opid: &crate::OpId, actor_shorthands: &HashMap) -> String { +fn print_opid(opid: &crate::types::OpId, actor_shorthands: &HashMap) -> String { format!("{}@{}", opid.counter(), actor_shorthands[&opid.actor()]) } diff --git a/automerge/tests/helpers/mod.rs b/automerge/tests/helpers/mod.rs index d93a211b..ec4beb0f 100644 --- a/automerge/tests/helpers/mod.rs +++ b/automerge/tests/helpers/mod.rs @@ -1,4 +1,8 @@ -use std::{collections::HashMap, convert::TryInto, hash::Hash}; +use std::{ + collections::{BTreeMap, BTreeSet}, + convert::TryInto, + hash::Hash, +}; use serde::ser::{SerializeMap, SerializeSeq}; @@ -42,7 +46,7 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// map!{ /// "todos" => { /// todos => list![ -/// { todo => map!{ title = "water plants" } } +/// { map!{ title = "water plants" } } /// ] /// } /// } @@ -50,9 +54,9 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// /// ``` /// -/// This might look more complicated than you were expecting. Why are there OpIds (`todos`, `todo`, -/// `title`) in there? Well the `RealizedObject` contains all the changes in the document tagged by -/// OpId. This makes it easy to test for conflicts: +/// This might look more complicated than you were expecting. Why is the first element in the list +/// wrapped in braces? Because every property in an automerge document can have multiple +/// conflicting values we must capture all of these. /// /// ```rust /// let mut doc1 = automerge::Automerge::new(); @@ -70,33 +74,20 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// } /// ); /// ``` -/// -/// ## Translating OpIds -/// -/// One thing you may have noticed in the example above is the `op2.translate(&doc2)` call. What is -/// that doing there? Well, the problem is that automerge OpIDs (in the current API) are specific -/// to a document. Using an opid from one document in a different document will not work. Therefore -/// this module defines an `OpIdExt` trait with a `translate` method on it. This method takes a -/// document and converts the opid into something which knows how to be compared with opids from -/// another document by using the document you pass to `translate`. Again, all you really need to -/// know is that when constructing a document for comparison you should call `translate(fromdoc)` -/// on opids which come from a document other than the one you pass to `assert_doc`. #[macro_export] macro_rules! assert_doc { ($doc: expr, $expected: expr) => {{ - use $crate::helpers::{realize, ExportableOpId}; + use $crate::helpers::realize; let realized = realize($doc); - let to_export: RealizedObject> = $expected.into(); - let exported = to_export.export($doc); - if realized != exported { + let expected_obj = $expected.into(); + if realized != expected_obj { let serde_right = serde_json::to_string_pretty(&realized).unwrap(); - let serde_left = serde_json::to_string_pretty(&exported).unwrap(); + let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); panic!( "documents didn't match\n expected\n{}\n got\n{}", &serde_left, &serde_right ); } - pretty_assertions::assert_eq!(realized, exported); }}; } @@ -105,63 +96,52 @@ macro_rules! assert_doc { #[macro_export] macro_rules! assert_obj { ($doc: expr, $obj_id: expr, $prop: expr, $expected: expr) => {{ - use $crate::helpers::{realize_prop, ExportableOpId}; + use $crate::helpers::realize_prop; let realized = realize_prop($doc, $obj_id, $prop); - let to_export: RealizedObject> = $expected.into(); - let exported = to_export.export($doc); - if realized != exported { + let expected_obj = $expected.into(); + if realized != expected_obj { let serde_right = serde_json::to_string_pretty(&realized).unwrap(); - let serde_left = serde_json::to_string_pretty(&exported).unwrap(); + let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); panic!( "documents didn't match\n expected\n{}\n got\n{}", &serde_left, &serde_right ); } - pretty_assertions::assert_eq!(realized, exported); }}; } /// Construct `RealizedObject::Map`. This macro takes a nested set of curl braces. The outer set is -/// the keys of the map, the inner set is the opid tagged values: +/// the keys of the map, the inner set is the set of values for that key: /// /// ``` /// map!{ /// "key" => { -/// opid1 => "value1", -/// opid2 => "value2", +/// "value1", +/// "value2", /// } /// } /// ``` /// /// The map above would represent a map with a conflict on the "key" property. The values can be -/// anything which implements `Into>`. Including nested calls to -/// `map!` or `list!`. +/// anything which implements `Into`. Including nested calls to `map!` or `list!`. #[macro_export] macro_rules! map { - (@single $($x:tt)*) => (()); - (@count $($rest:expr),*) => (<[()]>::len(&[$(map!(@single $rest)),*])); - - (@inner { $($opid:expr => $value:expr,)+ }) => { map!(@inner { $($opid => $value),+ }) }; - (@inner { $($opid:expr => $value:expr),* }) => { + (@inner { $($value:expr,)+ }) => { map!(@inner { $($value),+ }) }; + (@inner { $($value:expr),* }) => { { - use std::collections::HashMap; - let mut inner: HashMap, RealizedObject>> = HashMap::new(); + use std::collections::BTreeSet; + let mut inner: BTreeSet = BTreeSet::new(); $( - let _ = inner.insert($opid.into(), $value.into()); + let _ = inner.insert($value.into()); )* inner } }; - //(&inner $map:expr, $opid:expr => $value:expr, $($tail:tt),*) => { - //$map.insert($opid.into(), $value.into()); - //} ($($key:expr => $inner:tt,)+) => { map!($($key => $inner),+) }; ($($key:expr => $inner:tt),*) => { { - use std::collections::HashMap; - use crate::helpers::ExportableOpId; - let _cap = map!(@count $($key),*); - let mut _map: HashMap, RealizedObject>>> = ::std::collections::HashMap::with_capacity(_cap); + use std::collections::{BTreeMap, BTreeSet}; + let mut _map: BTreeMap> = ::std::collections::BTreeMap::new(); $( let inner = map!(@inner $inner); let _ = _map.insert($key.to_string(), inner); @@ -171,32 +151,32 @@ macro_rules! map { } } -/// Construct `RealizedObject::Sequence`. This macro represents a sequence of opid tagged values +/// Construct `RealizedObject::Sequence`. This macro represents a sequence of values /// /// ``` /// list![ /// { -/// opid1 => "value1", -/// opid2 => "value2", +/// "value1", +/// "value2", /// } /// ] /// ``` /// /// The list above would represent a list with a conflict on the 0 index. The values can be -/// anything which implements `Into>` including nested calls to +/// anything which implements `Into` including nested calls to /// `map!` or `list!`. #[macro_export] macro_rules! list { (@single $($x:tt)*) => (()); (@count $($rest:tt),*) => (<[()]>::len(&[$(list!(@single $rest)),*])); - (@inner { $($opid:expr => $value:expr,)+ }) => { list!(@inner { $($opid => $value),+ }) }; - (@inner { $($opid:expr => $value:expr),* }) => { + (@inner { $($value:expr,)+ }) => { list!(@inner { $($value),+ }) }; + (@inner { $($value:expr),* }) => { { - use std::collections::HashMap; - let mut inner: HashMap, RealizedObject>> = HashMap::new(); + use std::collections::BTreeSet; + let mut inner: BTreeSet = BTreeSet::new(); $( - let _ = inner.insert($opid.into(), $value.into()); + let _ = inner.insert($value.into()); )* inner } @@ -204,9 +184,8 @@ macro_rules! list { ($($inner:tt,)+) => { list!($($inner),+) }; ($($inner:tt),*) => { { - use crate::helpers::ExportableOpId; let _cap = list!(@count $($inner),*); - let mut _list: Vec, RealizedObject>>> = Vec::new(); + let mut _list: Vec> = Vec::new(); $( //println!("{}", stringify!($inner)); let inner = list!(@inner $inner); @@ -217,26 +196,6 @@ macro_rules! list { } } -/// Translate an op ID produced by one document to an op ID which can be understood by -/// another -/// -/// The current API of automerge exposes OpIds of the form (u64, usize) where the first component -/// is the counter of an actors lamport timestamp and the second component is the index into an -/// array of actor IDs stored by the document where the opid was generated. Obviously this is not -/// portable between documents as the index of the actor array is unlikely to match between two -/// documents. This function translates between the two representations. -/// -/// At some point we will probably change the API to not be document specific but this function -/// allows us to write tests first. -pub fn translate_obj_id( - from: &automerge::Automerge, - to: &automerge::Automerge, - id: automerge::OpId, -) -> automerge::OpId { - let exported = from.export(id); - to.import(&exported).unwrap() -} - pub fn mk_counter(value: i64) -> automerge::ScalarValue { automerge::ScalarValue::Counter(value) } @@ -252,14 +211,72 @@ impl std::fmt::Display for ExportedOpId { /// A `RealizedObject` is a representation of all the current values in a document - including /// conflicts. -#[derive(PartialEq, Debug)] -pub enum RealizedObject { - Map(HashMap>>), - Sequence(Vec>>), - Value(automerge::ScalarValue), +#[derive(PartialEq, PartialOrd, Ord, Eq, Hash, Debug)] +pub enum RealizedObject { + Map(BTreeMap>), + Sequence(Vec>), + Value(OrdScalarValue), } -impl serde::Serialize for RealizedObject { +// A copy of automerge::ScalarValue which uses decorum::Total for floating point values. This makes the type +// orderable, which is useful when we want to compare conflicting values of a register in an +// automerge document. +#[derive(PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] +pub enum OrdScalarValue { + Bytes(Vec), + Str(smol_str::SmolStr), + Int(i64), + Uint(u64), + F64(decorum::Total), + Counter(i64), + Timestamp(i64), + Boolean(bool), + Null, +} + +impl From for OrdScalarValue { + fn from(v: automerge::ScalarValue) -> Self { + match v { + automerge::ScalarValue::Bytes(v) => OrdScalarValue::Bytes(v), + automerge::ScalarValue::Str(v) => OrdScalarValue::Str(v), + automerge::ScalarValue::Int(v) => OrdScalarValue::Int(v), + automerge::ScalarValue::Uint(v) => OrdScalarValue::Uint(v), + automerge::ScalarValue::F64(v) => OrdScalarValue::F64(decorum::Total::from(v)), + automerge::ScalarValue::Counter(v) => OrdScalarValue::Counter(v), + automerge::ScalarValue::Timestamp(v) => OrdScalarValue::Timestamp(v), + automerge::ScalarValue::Boolean(v) => OrdScalarValue::Boolean(v), + automerge::ScalarValue::Null => OrdScalarValue::Null, + } + } +} + +impl From<&OrdScalarValue> for automerge::ScalarValue { + fn from(v: &OrdScalarValue) -> Self { + match v { + OrdScalarValue::Bytes(v) => automerge::ScalarValue::Bytes(v.clone()), + OrdScalarValue::Str(v) => automerge::ScalarValue::Str(v.clone()), + OrdScalarValue::Int(v) => automerge::ScalarValue::Int(*v), + OrdScalarValue::Uint(v) => automerge::ScalarValue::Uint(*v), + OrdScalarValue::F64(v) => automerge::ScalarValue::F64(v.into_inner()), + OrdScalarValue::Counter(v) => automerge::ScalarValue::Counter(*v), + OrdScalarValue::Timestamp(v) => automerge::ScalarValue::Timestamp(*v), + OrdScalarValue::Boolean(v) => automerge::ScalarValue::Boolean(*v), + OrdScalarValue::Null => automerge::ScalarValue::Null, + } + } +} + +impl serde::Serialize for OrdScalarValue { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let s = automerge::ScalarValue::from(self); + s.serialize(serializer) + } +} + +impl serde::Serialize for RealizedObject { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -267,23 +284,17 @@ impl serde::Serialize for RealizedObject { match self { Self::Map(kvs) => { let mut map_ser = serializer.serialize_map(Some(kvs.len()))?; - for (k, kvs) in kvs { - let kvs_serded = kvs - .iter() - .map(|(opid, value)| (opid.to_string(), value)) - .collect::>>(); - map_ser.serialize_entry(k, &kvs_serded)?; + for (k, vs) in kvs { + let vs_serded = vs.iter().collect::>(); + map_ser.serialize_entry(k, &vs_serded)?; } map_ser.end() } Self::Sequence(elems) => { let mut list_ser = serializer.serialize_seq(Some(elems.len()))?; for elem in elems { - let kvs_serded = elem - .iter() - .map(|(opid, value)| (opid.to_string(), value)) - .collect::>>(); - list_ser.serialize_element(&kvs_serded)?; + let vs_serded = elem.iter().collect::>(); + list_ser.serialize_element(&vs_serded)?; } list_ser.end() } @@ -292,30 +303,30 @@ impl serde::Serialize for RealizedObject { } } -pub fn realize(doc: &automerge::Automerge) -> RealizedObject { - realize_obj(doc, automerge::ROOT, automerge::ObjType::Map) +pub fn realize(doc: &automerge::Automerge) -> RealizedObject { + realize_obj(doc, &automerge::ROOT, automerge::ObjType::Map) } pub fn realize_prop>( doc: &automerge::Automerge, - obj_id: automerge::OpId, + obj_id: &automerge::ObjId, prop: P, -) -> RealizedObject { +) -> RealizedObject { let (val, obj_id) = doc.value(obj_id, prop).unwrap().unwrap(); match val { - automerge::Value::Object(obj_type) => realize_obj(doc, obj_id, obj_type), - automerge::Value::Scalar(v) => RealizedObject::Value(v), + automerge::Value::Object(obj_type) => realize_obj(doc, &obj_id, obj_type), + automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v)), } } pub fn realize_obj( doc: &automerge::Automerge, - obj_id: automerge::OpId, + obj_id: &automerge::ObjId, objtype: automerge::ObjType, -) -> RealizedObject { +) -> RealizedObject { match objtype { automerge::ObjType::Map | automerge::ObjType::Table => { - let mut result = HashMap::new(); + let mut result = BTreeMap::new(); for key in doc.keys(obj_id) { result.insert(key.clone(), realize_values(doc, obj_id, key)); } @@ -334,166 +345,63 @@ pub fn realize_obj( fn realize_values>( doc: &automerge::Automerge, - obj_id: automerge::OpId, + obj_id: &automerge::ObjId, key: K, -) -> HashMap> { - let mut values_by_opid = HashMap::new(); - for (value, opid) in doc.values(obj_id, key).unwrap() { +) -> BTreeSet { + let mut values = BTreeSet::new(); + for (value, objid) in doc.values(obj_id, key).unwrap() { let realized = match value { - automerge::Value::Object(objtype) => realize_obj(doc, opid, objtype), - automerge::Value::Scalar(v) => RealizedObject::Value(v), + automerge::Value::Object(objtype) => realize_obj(doc, &objid, objtype), + automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v)), }; - let exported_opid = ExportedOpId(doc.export(opid)); - values_by_opid.insert(exported_opid, realized); + values.insert(realized); } - values_by_opid + values } -impl<'a> RealizedObject> { - pub fn export(self, doc: &automerge::Automerge) -> RealizedObject { - match self { - Self::Map(kvs) => RealizedObject::Map( - kvs.into_iter() - .map(|(k, v)| { - ( - k, - v.into_iter() - .map(|(k, v)| (k.export(doc), v.export(doc))) - .collect(), - ) - }) - .collect(), - ), - Self::Sequence(values) => RealizedObject::Sequence( - values - .into_iter() - .map(|v| { - v.into_iter() - .map(|(k, v)| (k.export(doc), v.export(doc))) - .collect() - }) - .collect(), - ), - Self::Value(v) => RealizedObject::Value(v), - } - } -} - -impl<'a, O: Into>, I: Into>>> - From>> for RealizedObject> -{ - fn from(values: HashMap<&str, HashMap>) -> Self { +impl> From>> for RealizedObject { + fn from(values: BTreeMap<&str, BTreeSet>) -> Self { let intoed = values .into_iter() - .map(|(k, v)| { - ( - k.to_string(), - v.into_iter().map(|(k, v)| (k.into(), v.into())).collect(), - ) - }) + .map(|(k, v)| (k.to_string(), v.into_iter().map(|v| v.into()).collect())) .collect(); RealizedObject::Map(intoed) } } -impl<'a, O: Into>, I: Into>>> - From>> for RealizedObject> -{ - fn from(values: Vec>) -> Self { +impl> From>> for RealizedObject { + fn from(values: Vec>) -> Self { RealizedObject::Sequence( values .into_iter() - .map(|v| v.into_iter().map(|(k, v)| (k.into(), v.into())).collect()) + .map(|v| v.into_iter().map(|v| v.into()).collect()) .collect(), ) } } -impl From for RealizedObject> { +impl From for RealizedObject { fn from(b: bool) -> Self { - RealizedObject::Value(b.into()) + RealizedObject::Value(OrdScalarValue::Boolean(b)) } } -impl From for RealizedObject> { +impl From for RealizedObject { fn from(u: usize) -> Self { let v = u.try_into().unwrap(); - RealizedObject::Value(automerge::ScalarValue::Int(v)) + RealizedObject::Value(OrdScalarValue::Int(v)) } } -impl From for RealizedObject> { +impl From for RealizedObject { fn from(s: automerge::ScalarValue) -> Self { - RealizedObject::Value(s) + RealizedObject::Value(OrdScalarValue::from(s)) } } -impl From<&str> for RealizedObject> { +impl From<&str> for RealizedObject { fn from(s: &str) -> Self { - RealizedObject::Value(automerge::ScalarValue::Str(s.into())) - } -} - -#[derive(Eq, PartialEq, Hash)] -pub enum ExportableOpId<'a> { - Native(automerge::OpId), - Translate(Translate<'a>), -} - -impl<'a> ExportableOpId<'a> { - fn export(self, doc: &automerge::Automerge) -> ExportedOpId { - let oid = match self { - Self::Native(oid) => oid, - Self::Translate(Translate { from, opid }) => translate_obj_id(from, doc, opid), - }; - ExportedOpId(doc.export(oid)) - } -} - -pub struct Translate<'a> { - from: &'a automerge::Automerge, - opid: automerge::OpId, -} - -impl<'a> PartialEq for Translate<'a> { - fn eq(&self, other: &Self) -> bool { - self.from.maybe_get_actor().unwrap() == other.from.maybe_get_actor().unwrap() - && self.opid == other.opid - } -} - -impl<'a> Eq for Translate<'a> {} - -impl<'a> Hash for Translate<'a> { - fn hash(&self, state: &mut H) { - self.from.maybe_get_actor().unwrap().hash(state); - self.opid.hash(state); - } -} - -pub trait OpIdExt { - fn native(self) -> ExportableOpId<'static>; - fn translate(self, doc: &automerge::Automerge) -> ExportableOpId<'_>; -} - -impl OpIdExt for automerge::OpId { - /// Use this opid directly when exporting - fn native(self) -> ExportableOpId<'static> { - ExportableOpId::Native(self) - } - - /// Translate this OpID from `doc` when exporting - fn translate(self, doc: &automerge::Automerge) -> ExportableOpId<'_> { - ExportableOpId::Translate(Translate { - from: doc, - opid: self, - }) - } -} - -impl From for ExportableOpId<'_> { - fn from(oid: automerge::OpId) -> Self { - ExportableOpId::Native(oid) + RealizedObject::Value(OrdScalarValue::Str(smol_str::SmolStr::from(s))) } } diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs index 8dcc51df..2253f22b 100644 --- a/automerge/tests/test.rs +++ b/automerge/tests/test.rs @@ -4,17 +4,17 @@ mod helpers; #[allow(unused_imports)] use helpers::{ mk_counter, new_doc, new_doc_with_actor, pretty_print, realize, realize_obj, sorted_actors, - translate_obj_id, OpIdExt, RealizedObject, + RealizedObject, }; #[test] fn no_conflict_on_repeated_assignment() { let mut doc = Automerge::new(); - doc.set(automerge::ROOT, "foo", 1).unwrap(); - let op = doc.set(automerge::ROOT, "foo", 2).unwrap().unwrap(); + doc.set(&automerge::ROOT, "foo", 1).unwrap(); + doc.set(&automerge::ROOT, "foo", 2).unwrap(); assert_doc!( &doc, map! { - "foo" => { op => 2}, + "foo" => { 2 }, } ); } @@ -22,51 +22,49 @@ fn no_conflict_on_repeated_assignment() { #[test] fn no_change_on_repeated_map_set() { let mut doc = new_doc(); - doc.set(automerge::ROOT, "foo", 1).unwrap(); - assert!(doc.set(automerge::ROOT, "foo", 1).unwrap().is_none()); + doc.set(&automerge::ROOT, "foo", 1).unwrap(); + assert!(doc.set(&automerge::ROOT, "foo", 1).unwrap().is_none()); } #[test] fn no_change_on_repeated_list_set() { let mut doc = new_doc(); let list_id = doc - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc.insert(list_id, 0, 1).unwrap(); - doc.set(list_id, 0, 1).unwrap(); - assert!(doc.set(list_id, 0, 1).unwrap().is_none()); + doc.insert(&list_id, 0, 1).unwrap(); + doc.set(&list_id, 0, 1).unwrap(); + assert!(doc.set(&list_id, 0, 1).unwrap().is_none()); } #[test] fn no_change_on_list_insert_followed_by_set_of_same_value() { let mut doc = new_doc(); let list_id = doc - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc.insert(list_id, 0, 1).unwrap(); - assert!(doc.set(list_id, 0, 1).unwrap().is_none()); + doc.insert(&list_id, 0, 1).unwrap(); + assert!(doc.set(&list_id, 0, 1).unwrap().is_none()); } #[test] fn repeated_map_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(automerge::ROOT, "field", 123).unwrap(); + doc1.set(&automerge::ROOT, "field", 123).unwrap(); doc2.merge(&mut doc1); - doc2.set(automerge::ROOT, "field", 456).unwrap(); - doc1.set(automerge::ROOT, "field", 789).unwrap(); + doc2.set(&automerge::ROOT, "field", 456).unwrap(); + doc1.set(&automerge::ROOT, "field", 789).unwrap(); doc1.merge(&mut doc2); - assert_eq!(doc1.values(automerge::ROOT, "field").unwrap().len(), 2); + assert_eq!(doc1.values(&automerge::ROOT, "field").unwrap().len(), 2); - let op = doc1.set(automerge::ROOT, "field", 123).unwrap().unwrap(); + doc1.set(&automerge::ROOT, "field", 123).unwrap(); assert_doc!( &doc1, map! { - "field" => { - op => 123 - } + "field" => { 123 } } ); } @@ -76,22 +74,21 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc1.insert(list_id, 0, 123).unwrap(); + doc1.insert(&list_id, 0, 123).unwrap(); doc2.merge(&mut doc1); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - doc2.set(list_id_in_doc2, 0, 456).unwrap().unwrap(); + doc2.set(&list_id, 0, 456).unwrap(); doc1.merge(&mut doc2); - let doc1_op = doc1.set(list_id, 0, 789).unwrap().unwrap(); + doc1.set(&list_id, 0, 789).unwrap(); assert_doc!( &doc1, map! { "list" => { - list_id => list![ - { doc1_op => 789 }, + list![ + { 789 }, ] } } @@ -102,19 +99,19 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { fn list_deletion() { let mut doc = new_doc(); let list_id = doc - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let op1 = doc.insert(list_id, 0, 123).unwrap(); - doc.insert(list_id, 1, 456).unwrap(); - let op3 = doc.insert(list_id, 2, 789).unwrap(); - doc.del(list_id, 1).unwrap(); + doc.insert(&list_id, 0, 123).unwrap(); + doc.insert(&list_id, 1, 456).unwrap(); + doc.insert(&list_id, 2, 789).unwrap(); + doc.del(&list_id, 1).unwrap(); assert_doc!( &doc, map! { - "list" => {list_id => list![ - { op1 => 123 }, - { op3 => 789 }, + "list" => { list![ + { 123 }, + { 789 }, ]} } ) @@ -124,29 +121,26 @@ fn list_deletion() { fn merge_concurrent_map_prop_updates() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let op1 = doc1.set(automerge::ROOT, "foo", "bar").unwrap().unwrap(); - let hello = doc2 - .set(automerge::ROOT, "hello", "world") - .unwrap() - .unwrap(); + doc1.set(&automerge::ROOT, "foo", "bar").unwrap(); + doc2.set(&automerge::ROOT, "hello", "world").unwrap(); doc1.merge(&mut doc2); assert_eq!( - doc1.value(automerge::ROOT, "foo").unwrap().unwrap().0, + doc1.value(&automerge::ROOT, "foo").unwrap().unwrap().0, "bar".into() ); assert_doc!( &doc1, map! { - "foo" => { op1 => "bar" }, - "hello" => { hello.translate(&doc2) => "world" }, + "foo" => { "bar" }, + "hello" => { "world" }, } ); doc2.merge(&mut doc1); assert_doc!( &doc2, map! { - "foo" => { op1.translate(&doc1) => "bar" }, - "hello" => { hello => "world" }, + "foo" => { "bar" }, + "hello" => { "world" }, } ); assert_eq!(realize(&doc1), realize(&doc2)); @@ -156,19 +150,17 @@ fn merge_concurrent_map_prop_updates() { fn add_concurrent_increments_of_same_property() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let counter_id = doc1 - .set(automerge::ROOT, "counter", mk_counter(0)) - .unwrap() + doc1.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); doc2.merge(&mut doc1); - doc1.inc(automerge::ROOT, "counter", 1).unwrap(); - doc2.inc(automerge::ROOT, "counter", 2).unwrap(); + doc1.inc(&automerge::ROOT, "counter", 1).unwrap(); + doc2.inc(&automerge::ROOT, "counter", 2).unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { "counter" => { - counter_id => mk_counter(3) + mk_counter(3) } } ); @@ -179,19 +171,14 @@ fn add_increments_only_to_preceeded_values() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - // create a counter in doc1 - let doc1_counter_id = doc1 - .set(automerge::ROOT, "counter", mk_counter(0)) - .unwrap() + doc1.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); - doc1.inc(automerge::ROOT, "counter", 1).unwrap(); + doc1.inc(&automerge::ROOT, "counter", 1).unwrap(); // create a counter in doc2 - let doc2_counter_id = doc2 - .set(automerge::ROOT, "counter", mk_counter(0)) - .unwrap() + doc2.set(&automerge::ROOT, "counter", mk_counter(0)) .unwrap(); - doc2.inc(automerge::ROOT, "counter", 3).unwrap(); + doc2.inc(&automerge::ROOT, "counter", 3).unwrap(); // The two values should be conflicting rather than added doc1.merge(&mut doc2); @@ -200,8 +187,8 @@ fn add_increments_only_to_preceeded_values() { &doc1, map! { "counter" => { - doc1_counter_id.native() => mk_counter(1), - doc2_counter_id.translate(&doc2) => mk_counter(3), + mk_counter(1), + mk_counter(3), } } ); @@ -211,8 +198,8 @@ fn add_increments_only_to_preceeded_values() { fn concurrent_updates_of_same_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let set_one_opid = doc1.set(automerge::ROOT, "field", "one").unwrap().unwrap(); - let set_two_opid = doc2.set(automerge::ROOT, "field", "two").unwrap().unwrap(); + doc1.set(&automerge::ROOT, "field", "one").unwrap(); + doc2.set(&automerge::ROOT, "field", "two").unwrap(); doc1.merge(&mut doc2); @@ -220,8 +207,8 @@ fn concurrent_updates_of_same_field() { &doc1, map! { "field" => { - set_one_opid.native() => "one", - set_two_opid.translate(&doc2) => "two", + "one", + "two", } } ); @@ -232,14 +219,13 @@ fn concurrent_updates_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - doc1.insert(list_id, 0, "finch").unwrap(); + doc1.insert(&list_id, 0, "finch").unwrap(); doc2.merge(&mut doc1); - let set_one_op = doc1.set(list_id, 0, "greenfinch").unwrap().unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let set_op_two = doc2.set(list_id_in_doc2, 0, "goldfinch").unwrap().unwrap(); + doc1.set(&list_id, 0, "greenfinch").unwrap(); + doc2.set(&list_id, 0, "goldfinch").unwrap(); doc1.merge(&mut doc2); @@ -247,9 +233,9 @@ fn concurrent_updates_of_same_list_element() { &doc1, map! { "birds" => { - list_id => list![{ - set_one_op.native() => "greenfinch", - set_op_two.translate(&doc2) => "goldfinch", + list![{ + "greenfinch", + "goldfinch", }] } } @@ -261,19 +247,11 @@ fn assignment_conflicts_of_different_types() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let mut doc3 = new_doc(); - let op_one = doc1 - .set(automerge::ROOT, "field", "string") - .unwrap() + doc1.set(&automerge::ROOT, "field", "string").unwrap(); + doc2.set(&automerge::ROOT, "field", automerge::Value::list()) .unwrap(); - let op_two = doc2 - .set(automerge::ROOT, "field", automerge::Value::list()) - .unwrap() + doc3.set(&automerge::ROOT, "field", automerge::Value::map()) .unwrap(); - let op_three = doc3 - .set(automerge::ROOT, "field", automerge::Value::map()) - .unwrap() - .unwrap(); - doc1.merge(&mut doc2); doc1.merge(&mut doc3); @@ -281,9 +259,9 @@ fn assignment_conflicts_of_different_types() { &doc1, map! { "field" => { - op_one.native() => "string", - op_two.translate(&doc2) => list!{}, - op_three.translate(&doc3) => map!{}, + "string", + list!{}, + map!{}, } } ); @@ -293,25 +271,22 @@ fn assignment_conflicts_of_different_types() { fn changes_within_conflicting_map_field() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - let op_one = doc1 - .set(automerge::ROOT, "field", "string") - .unwrap() - .unwrap(); + doc1.set(&automerge::ROOT, "field", "string").unwrap(); let map_id = doc2 - .set(automerge::ROOT, "field", automerge::Value::map()) + .set(&automerge::ROOT, "field", automerge::Value::map()) .unwrap() .unwrap(); - let set_in_doc2 = doc2.set(map_id, "innerKey", 42).unwrap().unwrap(); + doc2.set(&map_id, "innerKey", 42).unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { "field" => { - op_one.native() => "string", - map_id.translate(&doc2) => map!{ + "string", + map!{ "innerKey" => { - set_in_doc2.translate(&doc2) => 42, + 42, } } } @@ -325,27 +300,26 @@ fn changes_within_conflicting_list_element() { let mut doc1 = new_doc_with_actor(actor1); let mut doc2 = new_doc_with_actor(actor2); let list_id = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - doc1.insert(list_id, 0, "hello").unwrap(); + doc1.insert(&list_id, 0, "hello").unwrap(); doc2.merge(&mut doc1); let map_in_doc1 = doc1 - .set(list_id, 0, automerge::Value::map()) + .set(&list_id, 0, automerge::Value::map()) .unwrap() .unwrap(); - let set_map1 = doc1.set(map_in_doc1, "map1", true).unwrap().unwrap(); - let set_key1 = doc1.set(map_in_doc1, "key", 1).unwrap().unwrap(); + doc1.set(&map_in_doc1, "map1", true).unwrap(); + doc1.set(&map_in_doc1, "key", 1).unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); let map_in_doc2 = doc2 - .set(list_id_in_doc2, 0, automerge::Value::map()) + .set(&list_id, 0, automerge::Value::map()) .unwrap() .unwrap(); doc1.merge(&mut doc2); - let set_map2 = doc2.set(map_in_doc2, "map2", true).unwrap().unwrap(); - let set_key2 = doc2.set(map_in_doc2, "key", 2).unwrap().unwrap(); + doc2.set(&map_in_doc2, "map2", true).unwrap(); + doc2.set(&map_in_doc2, "key", 2).unwrap(); doc1.merge(&mut doc2); @@ -353,15 +327,15 @@ fn changes_within_conflicting_list_element() { &doc1, map! { "list" => { - list_id => list![ + list![ { - map_in_doc2.translate(&doc2) => map!{ - "map2" => { set_map2.translate(&doc2) => true }, - "key" => { set_key2.translate(&doc2) => 2 }, + map!{ + "map2" => { true }, + "key" => { 2 }, }, - map_in_doc1.native() => map!{ - "key" => { set_key1.native() => 1 }, - "map1" => { set_map1.native() => true }, + map!{ + "key" => { 1 }, + "map1" => { true }, } } ] @@ -376,22 +350,16 @@ fn concurrently_assigned_nested_maps_should_not_merge() { let mut doc2 = new_doc(); let doc1_map_id = doc1 - .set(automerge::ROOT, "config", automerge::Value::map()) - .unwrap() - .unwrap(); - let doc1_field = doc1 - .set(doc1_map_id, "background", "blue") + .set(&automerge::ROOT, "config", automerge::Value::map()) .unwrap() .unwrap(); + doc1.set(&doc1_map_id, "background", "blue").unwrap(); let doc2_map_id = doc2 - .set(automerge::ROOT, "config", automerge::Value::map()) - .unwrap() - .unwrap(); - let doc2_field = doc2 - .set(doc2_map_id, "logo_url", "logo.png") + .set(&automerge::ROOT, "config", automerge::Value::map()) .unwrap() .unwrap(); + doc2.set(&doc2_map_id, "logo_url", "logo.png").unwrap(); doc1.merge(&mut doc2); @@ -399,11 +367,11 @@ fn concurrently_assigned_nested_maps_should_not_merge() { &doc1, map! { "config" => { - doc1_map_id.native() => map!{ - "background" => {doc1_field.native() => "blue"} + map!{ + "background" => {"blue"} }, - doc2_map_id.translate(&doc2) => map!{ - "logo_url" => {doc2_field.translate(&doc2) => "logo.png"} + map!{ + "logo_url" => {"logo.png"} } } } @@ -418,16 +386,15 @@ fn concurrent_insertions_at_different_list_positions() { assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap()); let list_id = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let one = doc1.insert(list_id, 0, "one").unwrap(); - let three = doc1.insert(list_id, 1, "three").unwrap(); + doc1.insert(&list_id, 0, "one").unwrap(); + doc1.insert(&list_id, 1, "three").unwrap(); doc2.merge(&mut doc1); - let two = doc1.splice(list_id, 1, 0, vec!["two".into()]).unwrap()[0]; - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let four = doc2.insert(list_id_in_doc2, 2, "four").unwrap(); + doc1.splice(&list_id, 1, 0, vec!["two".into()]).unwrap(); + doc2.insert(&list_id, 2, "four").unwrap(); doc1.merge(&mut doc2); @@ -435,11 +402,11 @@ fn concurrent_insertions_at_different_list_positions() { &doc1, map! { "list" => { - list_id => list![ - {one.native() => "one"}, - {two.native() => "two"}, - {three.native() => "three"}, - {four.translate(&doc2) => "four"}, + list![ + {"one"}, + {"two"}, + {"three"}, + {"four"}, ] } } @@ -454,30 +421,29 @@ fn concurrent_insertions_at_same_list_position() { assert!(doc1.maybe_get_actor().unwrap() < doc2.maybe_get_actor().unwrap()); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let parakeet = doc1.insert(list_id, 0, "parakeet").unwrap(); + doc1.insert(&list_id, 0, "parakeet").unwrap(); doc2.merge(&mut doc1); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let starling = doc1.insert(list_id, 1, "starling").unwrap(); - let chaffinch = doc2.insert(list_id_in_doc2, 1, "chaffinch").unwrap(); + doc1.insert(&list_id, 1, "starling").unwrap(); + doc2.insert(&list_id, 1, "chaffinch").unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { "birds" => { - list_id => list![ + list![ { - parakeet.native() => "parakeet", + "parakeet", }, { - starling.native() => "starling", + "starling", }, { - chaffinch.translate(&doc2) => "chaffinch", + "chaffinch", }, ] }, @@ -489,13 +455,10 @@ fn concurrent_insertions_at_same_list_position() { fn concurrent_assignment_and_deletion_of_a_map_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); - doc1.set(automerge::ROOT, "bestBird", "robin").unwrap(); + doc1.set(&automerge::ROOT, "bestBird", "robin").unwrap(); doc2.merge(&mut doc1); - doc1.del(automerge::ROOT, "bestBird").unwrap(); - let set_two = doc2 - .set(automerge::ROOT, "bestBird", "magpie") - .unwrap() - .unwrap(); + doc1.del(&automerge::ROOT, "bestBird").unwrap(); + doc2.set(&automerge::ROOT, "bestBird", "magpie").unwrap(); doc1.merge(&mut doc2); @@ -503,7 +466,7 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { &doc1, map! { "bestBird" => { - set_two.translate(&doc2) => "magpie", + "magpie", } } ); @@ -514,25 +477,22 @@ fn concurrent_assignment_and_deletion_of_list_entry() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let blackbird = doc1.insert(list_id, 0, "blackbird").unwrap(); - doc1.insert(list_id, 1, "thrush").unwrap(); - let goldfinch = doc1.insert(list_id, 2, "goldfinch").unwrap(); + doc1.insert(&list_id, 0, "blackbird").unwrap(); + doc1.insert(&list_id, 1, "thrush").unwrap(); + doc1.insert(&list_id, 2, "goldfinch").unwrap(); doc2.merge(&mut doc1); - - let starling = doc1.set(list_id, 1, "starling").unwrap().unwrap(); - - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - doc2.del(list_id_in_doc2, 1).unwrap(); + doc1.set(&list_id, 1, "starling").unwrap(); + doc2.del(&list_id, 1).unwrap(); assert_doc!( &doc2, map! { - "birds" => {list_id.translate(&doc1) => list![ - { blackbird.translate(&doc1) => "blackbird"}, - { goldfinch.translate(&doc1) => "goldfinch"}, + "birds" => {list![ + {"blackbird"}, + {"goldfinch"}, ]} } ); @@ -540,10 +500,10 @@ fn concurrent_assignment_and_deletion_of_list_entry() { assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { blackbird => "blackbird" }, - { starling => "starling" }, - { goldfinch => "goldfinch" }, + "birds" => {list![ + { "blackbird" }, + { "starling" }, + { "goldfinch" }, ]} } ); @@ -553,10 +513,10 @@ fn concurrent_assignment_and_deletion_of_list_entry() { assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { blackbird => "blackbird" }, - { starling => "starling" }, - { goldfinch => "goldfinch" }, + "birds" => {list![ + { "blackbird" }, + { "starling" }, + { "goldfinch" }, ]} } ); @@ -567,31 +527,29 @@ fn insertion_after_a_deleted_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let blackbird = doc1.insert(list_id, 0, "blackbird").unwrap(); - doc1.insert(list_id, 1, "thrush").unwrap(); - doc1.insert(list_id, 2, "goldfinch").unwrap(); + doc1.insert(&list_id, 0, "blackbird").unwrap(); + doc1.insert(&list_id, 1, "thrush").unwrap(); + doc1.insert(&list_id, 2, "goldfinch").unwrap(); doc2.merge(&mut doc1); - doc1.splice(list_id, 1, 2, Vec::new()).unwrap(); + doc1.splice(&list_id, 1, 2, Vec::new()).unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - let starling = doc2 - .splice(list_id_in_doc2, 2, 0, vec!["starling".into()]) - .unwrap()[0]; + doc2.splice(&list_id, 2, 0, vec!["starling".into()]) + .unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { blackbird.native() => "blackbird" }, - { starling.translate(&doc2) => "starling" } + "birds" => {list![ + { "blackbird" }, + { "starling" } ]} } ); @@ -600,9 +558,9 @@ fn insertion_after_a_deleted_list_element() { assert_doc!( &doc2, map! { - "birds" => {list_id.translate(&doc1) => list![ - { blackbird.translate(&doc1) => "blackbird" }, - { starling.native() => "starling" } + "birds" => {list![ + { "blackbird" }, + { "starling" } ]} } ); @@ -613,29 +571,28 @@ fn concurrent_deletion_of_same_list_element() { let mut doc1 = new_doc(); let mut doc2 = new_doc(); let list_id = doc1 - .set(automerge::ROOT, "birds", automerge::Value::list()) + .set(&automerge::ROOT, "birds", automerge::Value::list()) .unwrap() .unwrap(); - let albatross = doc1.insert(list_id, 0, "albatross").unwrap(); - doc1.insert(list_id, 1, "buzzard").unwrap(); - let cormorant = doc1.insert(list_id, 2, "cormorant").unwrap(); + doc1.insert(&list_id, 0, "albatross").unwrap(); + doc1.insert(&list_id, 1, "buzzard").unwrap(); + doc1.insert(&list_id, 2, "cormorant").unwrap(); doc2.merge(&mut doc1); - doc1.del(list_id, 1).unwrap(); + doc1.del(&list_id, 1).unwrap(); - let list_id_in_doc2 = translate_obj_id(&doc1, &doc2, list_id); - doc2.del(list_id_in_doc2, 1).unwrap(); + doc2.del(&list_id, 1).unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { - "birds" => {list_id => list![ - { albatross => "albatross" }, - { cormorant => "cormorant" } + "birds" => {list![ + { "albatross" }, + { "cormorant" } ]} } ); @@ -644,9 +601,9 @@ fn concurrent_deletion_of_same_list_element() { assert_doc!( &doc2, map! { - "birds" => {list_id.translate(&doc1) => list![ - { albatross.translate(&doc1) => "albatross" }, - { cormorant.translate(&doc1) => "cormorant" } + "birds" => {list![ + { "albatross" }, + { "cormorant" } ]} } ); @@ -658,48 +615,47 @@ fn concurrent_updates_at_different_levels() { let mut doc2 = new_doc(); let animals = doc1 - .set(automerge::ROOT, "animals", automerge::Value::map()) + .set(&automerge::ROOT, "animals", automerge::Value::map()) .unwrap() .unwrap(); let birds = doc1 - .set(animals, "birds", automerge::Value::map()) + .set(&animals, "birds", automerge::Value::map()) .unwrap() .unwrap(); - doc1.set(birds, "pink", "flamingo").unwrap().unwrap(); - doc1.set(birds, "black", "starling").unwrap().unwrap(); + doc1.set(&birds, "pink", "flamingo").unwrap(); + doc1.set(&birds, "black", "starling").unwrap(); let mammals = doc1 - .set(animals, "mammals", automerge::Value::list()) + .set(&animals, "mammals", automerge::Value::list()) .unwrap() .unwrap(); - let badger = doc1.insert(mammals, 0, "badger").unwrap(); + doc1.insert(&mammals, 0, "badger").unwrap(); doc2.merge(&mut doc1); - doc1.set(birds, "brown", "sparrow").unwrap().unwrap(); + doc1.set(&birds, "brown", "sparrow").unwrap(); - let animals_in_doc2 = translate_obj_id(&doc1, &doc2, animals); - doc2.del(animals_in_doc2, "birds").unwrap(); + doc2.del(&animals, "birds").unwrap(); doc1.merge(&mut doc2); assert_obj!( &doc1, - automerge::ROOT, + &automerge::ROOT, "animals", map! { "mammals" => { - mammals => list![{ badger => "badger" }], + list![{ "badger" }], } } ); assert_obj!( &doc2, - automerge::ROOT, + &automerge::ROOT, "animals", map! { "mammals" => { - mammals.translate(&doc1) => list![{ badger.translate(&doc1) => "badger" }], + list![{ "badger" }], } } ); @@ -711,21 +667,20 @@ fn concurrent_updates_of_concurrently_deleted_objects() { let mut doc2 = new_doc(); let birds = doc1 - .set(automerge::ROOT, "birds", automerge::Value::map()) + .set(&automerge::ROOT, "birds", automerge::Value::map()) .unwrap() .unwrap(); let blackbird = doc1 - .set(birds, "blackbird", automerge::Value::map()) + .set(&birds, "blackbird", automerge::Value::map()) .unwrap() .unwrap(); - doc1.set(blackbird, "feathers", "black").unwrap().unwrap(); + doc1.set(&blackbird, "feathers", "black").unwrap(); doc2.merge(&mut doc1); - doc1.del(birds, "blackbird").unwrap(); + doc1.del(&birds, "blackbird").unwrap(); - translate_obj_id(&doc1, &doc2, blackbird); - doc2.set(blackbird, "beak", "orange").unwrap(); + doc2.set(&blackbird, "beak", "orange").unwrap(); doc1.merge(&mut doc2); @@ -733,7 +688,7 @@ fn concurrent_updates_of_concurrently_deleted_objects() { &doc1, map! { "birds" => { - birds => map!{}, + map!{}, } } ); @@ -746,58 +701,55 @@ fn does_not_interleave_sequence_insertions_at_same_position() { let mut doc2 = new_doc_with_actor(actor2); let wisdom = doc1 - .set(automerge::ROOT, "wisdom", automerge::Value::list()) + .set(&automerge::ROOT, "wisdom", automerge::Value::list()) .unwrap() .unwrap(); doc2.merge(&mut doc1); - let doc1elems = doc1 - .splice( - wisdom, - 0, - 0, - vec![ - "to".into(), - "be".into(), - "is".into(), - "to".into(), - "do".into(), - ], - ) - .unwrap(); + doc1.splice( + &wisdom, + 0, + 0, + vec![ + "to".into(), + "be".into(), + "is".into(), + "to".into(), + "do".into(), + ], + ) + .unwrap(); - let wisdom_in_doc2 = translate_obj_id(&doc1, &doc2, wisdom); - let doc2elems = doc2 - .splice( - wisdom_in_doc2, - 0, - 0, - vec![ - "to".into(), - "do".into(), - "is".into(), - "to".into(), - "be".into(), - ], - ) - .unwrap(); + doc2.splice( + &wisdom, + 0, + 0, + vec![ + "to".into(), + "do".into(), + "is".into(), + "to".into(), + "be".into(), + ], + ) + .unwrap(); doc1.merge(&mut doc2); assert_doc!( &doc1, map! { - "wisdom" => {wisdom => list![ - {doc1elems[0].native() => "to"}, - {doc1elems[1].native() => "be"}, - {doc1elems[2].native() => "is"}, - {doc1elems[3].native() => "to"}, - {doc1elems[4].native() => "do"}, - {doc2elems[0].translate(&doc2) => "to"}, - {doc2elems[1].translate(&doc2) => "do"}, - {doc2elems[2].translate(&doc2) => "is"}, - {doc2elems[3].translate(&doc2) => "to"}, - {doc2elems[4].translate(&doc2) => "be"}, + "wisdom" => {list![ + {"to"}, + {"be"}, + {"is"}, + {"to"}, + {"do"}, + {"to"}, + {"do"}, + {"is"}, + {"to"}, + {"be"}, ]} } ); @@ -811,20 +763,19 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( let mut doc2 = new_doc_with_actor(actor2); let list = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let two = doc1.insert(list, 0, "two").unwrap(); + doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1); - let list_in_doc2 = translate_obj_id(&doc1, &doc2, list); - let one = doc2.insert(list_in_doc2, 0, "one").unwrap(); + doc2.insert(&list, 0, "one").unwrap(); assert_doc!( &doc2, map! { - "list" => { list.translate(&doc1) => list![ - { one.native() => "one" }, - { two.translate(&doc1) => "two" }, + "list" => { list![ + { "one" }, + { "two" }, ]} } ); @@ -838,20 +789,19 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() let mut doc2 = new_doc_with_actor(actor2); let list = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let two = doc1.insert(list, 0, "two").unwrap(); + doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1); - let list_in_doc2 = translate_obj_id(&doc1, &doc2, list); - let one = doc2.insert(list_in_doc2, 0, "one").unwrap(); + doc2.insert(&list, 0, "one").unwrap(); assert_doc!( &doc2, map! { - "list" => { list.translate(&doc1) => list![ - { one.native() => "one" }, - { two.translate(&doc1) => "two" }, + "list" => { list![ + { "one" }, + { "two" }, ]} } ); @@ -863,26 +813,25 @@ fn insertion_consistent_with_causality() { let mut doc2 = new_doc(); let list = doc1 - .set(automerge::ROOT, "list", automerge::Value::list()) + .set(&automerge::ROOT, "list", automerge::Value::list()) .unwrap() .unwrap(); - let four = doc1.insert(list, 0, "four").unwrap(); + doc1.insert(&list, 0, "four").unwrap(); doc2.merge(&mut doc1); - let list_in_doc2 = translate_obj_id(&doc1, &doc2, list); - let three = doc2.insert(list_in_doc2, 0, "three").unwrap(); + doc2.insert(&list, 0, "three").unwrap(); doc1.merge(&mut doc2); - let two = doc1.insert(list, 0, "two").unwrap(); + doc1.insert(&list, 0, "two").unwrap(); doc2.merge(&mut doc1); - let one = doc2.insert(list_in_doc2, 0, "one").unwrap(); + doc2.insert(&list, 0, "one").unwrap(); assert_doc!( &doc2, map! { - "list" => {list.translate(&doc1) => list![ - {one.native() => "one"}, - {two.translate(&doc1) => "two"}, - {three.native() => "three" }, - {four.translate(&doc1) => "four"}, + "list" => { list![ + {"one"}, + {"two"}, + {"three" }, + {"four"}, ]} } ); @@ -900,28 +849,22 @@ fn save_and_restore_empty() { fn save_restore_complex() { let mut doc1 = new_doc(); let todos = doc1 - .set(automerge::ROOT, "todos", automerge::Value::list()) + .set(&automerge::ROOT, "todos", automerge::Value::list()) .unwrap() .unwrap(); - let first_todo = doc1.insert(todos, 0, automerge::Value::map()).unwrap(); - doc1.set(first_todo, "title", "water plants") + let first_todo = doc1 + .insert(&todos, 0, automerge::Value::map()) .unwrap() .unwrap(); - let first_done = doc1.set(first_todo, "done", false).unwrap().unwrap(); + doc1.set(&first_todo, "title", "water plants").unwrap(); + doc1.set(&first_todo, "done", false).unwrap(); let mut doc2 = new_doc(); doc2.merge(&mut doc1); - let first_todo_in_doc2 = translate_obj_id(&doc1, &doc2, first_todo); - let weed_title = doc2 - .set(first_todo_in_doc2, "title", "weed plants") - .unwrap() - .unwrap(); + doc2.set(&first_todo, "title", "weed plants").unwrap(); - let kill_title = doc1 - .set(first_todo, "title", "kill plants") - .unwrap() - .unwrap(); + doc1.set(&first_todo, "title", "kill plants").unwrap(); doc1.merge(&mut doc2); let reloaded = Automerge::load(&doc1.save().unwrap()).unwrap(); @@ -929,13 +872,13 @@ fn save_restore_complex() { assert_doc!( &reloaded, map! { - "todos" => {todos.translate(&doc1) => list![ - {first_todo.translate(&doc1) => map!{ + "todos" => {list![ + {map!{ "title" => { - weed_title.translate(&doc2) => "weed plants", - kill_title.translate(&doc1) => "kill plants", + "weed plants", + "kill plants", }, - "done" => {first_done.translate(&doc1) => false}, + "done" => {false}, }} ]} } diff --git a/edit-trace/benches/main.rs b/edit-trace/benches/main.rs index fed72f1e..197614f6 100644 --- a/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -5,9 +5,9 @@ use std::fs; fn replay_trace(commands: Vec<(usize, usize, Vec)>) -> Automerge { let mut doc = Automerge::new(); - let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); + let text = doc.set(&ROOT, "text", Value::text()).unwrap().unwrap(); for (pos, del, vals) in commands { - doc.splice(text, pos, del, vals).unwrap(); + doc.splice(&text, pos, del, vals).unwrap(); } doc.commit(None, None); doc diff --git a/edit-trace/src/main.rs b/edit-trace/src/main.rs index 94fde72c..db39bcdd 100644 --- a/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -19,12 +19,12 @@ fn main() -> Result<(), AutomergeError> { let mut doc = Automerge::new(); let now = Instant::now(); - let text = doc.set(ROOT, "text", Value::text()).unwrap().unwrap(); + let text = doc.set( &ROOT, "text", Value::text()).unwrap().unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); } - doc.splice(text, pos, del, vals)?; + doc.splice(&text, pos, del, vals)?; } let _ = doc.save(); println!("Done in {} ms", now.elapsed().as_millis());